From de92c4e1f8ca42badc9167613b85c3913df425a7 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Thu, 3 Sep 2020 16:15:23 +0100 Subject: [PATCH 01/86] CI: static analysis with np-dev --- .github/workflows/ci.yml | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 149acef72db26..dbccc50db47c3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -142,3 +142,40 @@ jobs: - name: Upload dev docs run: rsync -az --delete doc/build/html/ docs@${{ secrets.server_ip }}:/usr/share/nginx/pandas/pandas-docs/dev if: github.event_name == 'push' + + numpy-types: + name: Numpy Type Annotations + runs-on: ubuntu-latest + steps: + - name: Update for build environment + run: | + sudo apt-get update + sudo apt-get install -y build-essential + sudo apt-get clean + - name: Setting conda path + run: echo ::add-path::$CONDA/bin + - name: Update conda + run: | + conda config --set quiet true --set always_yes true + conda update -n base -c defaults conda + conda list + - name: Checkout pandas + uses: actions/checkout@v2 + - name: Update conda environment + run: | + conda env update -n pandas-dev --file=environment.yml + conda list + - name: Update numpy using wheel from nightly + run: | + source activate pandas-dev + pip install --extra-index-url https://pypi.anaconda.org/scipy-wheels-nightly/simple --pre numpy + conda list + - name: Remove pandas + run: | + source activate pandas-dev + conda uninstall -y --force pandas || true + conda list + - name: Typing validation + run: | + source activate pandas-dev + ci/code_checks.sh typing From 3ff2a1329c1e76456a1e2f0180af3e4a735d3d92 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Thu, 3 Sep 2020 16:19:16 +0100 Subject: [PATCH 02/86] empty From da38df6ae8012c1225122004dc52de7edf9b6738 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Thu, 3 Sep 2020 16:20:29 +0100 Subject: [PATCH 03/86] change branch for ci --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index dbccc50db47c3..54c7386d65348 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,7 +2,7 @@ name: CI on: push: - branches: master + branches: numpy-types pull_request: branches: - master From 960fc4fa712dee06658b70dcde546257ee34ecf7 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Thu, 3 Sep 2020 16:30:19 +0100 Subject: [PATCH 04/86] upgrade flag to pip --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 54c7386d65348..8c59f54fa65ba 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -168,7 +168,7 @@ jobs: - name: Update numpy using wheel from nightly run: | source activate pandas-dev - pip install --extra-index-url https://pypi.anaconda.org/scipy-wheels-nightly/simple --pre numpy + pip install -U --extra-index-url https://pypi.anaconda.org/scipy-wheels-nightly/simple --pre numpy conda list - name: Remove pandas run: | From 93e1499e79a9ace49037aafd4194a86b4e75cdf5 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Thu, 3 Sep 2020 16:41:05 +0100 Subject: [PATCH 05/86] continue-on-error --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8c59f54fa65ba..bd412a61502ec 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -146,6 +146,7 @@ jobs: numpy-types: name: Numpy Type Annotations runs-on: ubuntu-latest + continue-on-error: true steps: - name: Update for build environment run: | From 70f0469c7a6241e2cf33a2f867f58f57a43306a9 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Thu, 3 Sep 2020 16:51:08 +0100 Subject: [PATCH 06/86] Revert "change branch for ci" This reverts commit da38df6ae8012c1225122004dc52de7edf9b6738. --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bd412a61502ec..1e4cf66bf1422 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,7 +2,7 @@ name: CI on: push: - branches: numpy-types + branches: master pull_request: branches: - master From f2ae4dbdda6c1b64a625ad832fcf80df8f10ea12 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 3 Oct 2020 10:52:32 +0100 Subject: [PATCH 07/86] wip --- pandas/compat/numpy/__init__.py | 4 +++- pandas/core/dtypes/cast.py | 6 +++++- pandas/core/dtypes/common.py | 9 +++++++-- pandas/core/dtypes/dtypes.py | 16 ++++++++++++---- pandas/core/dtypes/missing.py | 4 +++- pandas/core/ops/mask_ops.py | 4 +++- pandas/plotting/_matplotlib/tools.py | 11 ++++++++--- 7 files changed, 41 insertions(+), 13 deletions(-) diff --git a/pandas/compat/numpy/__init__.py b/pandas/compat/numpy/__init__.py index a2444b7ba5a0d..733cf3075d4db 100644 --- a/pandas/compat/numpy/__init__.py +++ b/pandas/compat/numpy/__init__.py @@ -43,7 +43,9 @@ def np_datetime64_compat(s, *args, **kwargs): warning, when need to pass '2015-01-01 09:00:00' """ s = tz_replacer(s) - return np.datetime64(s, *args, **kwargs) + # error: No overload variant of "datetime64" matches argument types "Any", + # "Tuple[Any, ...]", "Dict[str, Any]" + return np.datetime64(s, *args, **kwargs) # type: ignore[call-overload] def np_array_datetime64_compat(arr, *args, **kwargs): diff --git a/pandas/core/dtypes/cast.py b/pandas/core/dtypes/cast.py index 3aa1317f6db6d..70e218d8431ca 100644 --- a/pandas/core/dtypes/cast.py +++ b/pandas/core/dtypes/cast.py @@ -1080,7 +1080,11 @@ def soft_convert_objects( if datetime: from pandas import to_datetime - return to_datetime(values, errors="coerce").to_numpy() + # error: No overload variant of "to_datetime" matches argument + # types "ndarray", "str" + return to_datetime( # type: ignore[call-overload] + values, errors="coerce" + ).to_numpy() elif timedelta: from pandas import to_timedelta diff --git a/pandas/core/dtypes/common.py b/pandas/core/dtypes/common.py index 14184f044ae95..fc4077ff74295 100644 --- a/pandas/core/dtypes/common.py +++ b/pandas/core/dtypes/common.py @@ -150,7 +150,10 @@ def ensure_int_or_float(arr: ArrayLike, copy: bool = False) -> np.ndarray: return arr.astype("uint64", copy=copy, casting="safe") # type: ignore[call-arg] except TypeError: if is_extension_array_dtype(arr.dtype): - return arr.to_numpy(dtype="float64", na_value=np.nan) + # error: "ndarray" has no attribute "to_numpy" + return arr.to_numpy( # type: ignore[attr-defined] + dtype="float64", na_value=np.nan + ) return arr.astype("float64", copy=copy) @@ -1791,7 +1794,9 @@ def pandas_dtype(dtype) -> DtypeObj: # registered extension types result = registry.find(dtype) if result is not None: - return result + # error: Incompatible return value type (got "Type[ExtensionDtype]", + # expected "Union[dtype, ExtensionDtype]") + return result # type: ignore[return-value] # try a numpy dtype # raise a consistent TypeError if failed diff --git a/pandas/core/dtypes/dtypes.py b/pandas/core/dtypes/dtypes.py index bf8d50db8416e..2cc73ca44f0ff 100644 --- a/pandas/core/dtypes/dtypes.py +++ b/pandas/core/dtypes/dtypes.py @@ -152,7 +152,9 @@ class CategoricalDtype(PandasExtensionDtype, ExtensionDtype): type: Type[CategoricalDtypeType] = CategoricalDtypeType kind: str_type = "O" str = "|O08" - base = np.dtype("O") + # error: Incompatible types in assignment (expression has type "dtype", + # base class "PandasExtensionDtype" defined the type as "None") + base = np.dtype("O") # type: ignore[assignment] _metadata = ("categories", "ordered") _cache: Dict[str_type, PandasExtensionDtype] = {} @@ -641,7 +643,9 @@ class DatetimeTZDtype(PandasExtensionDtype): kind: str_type = "M" str = "|M8[ns]" num = 101 - base = np.dtype("M8[ns]") + # error: Incompatible types in assignment (expression has type "dtype", + # base class "PandasExtensionDtype" defined the type as "None") + base = np.dtype("M8[ns]") # type: ignore[assignment] na_value = NaT _metadata = ("unit", "tz") _match = re.compile(r"(datetime64|M8)\[(?P.+), (?P.+)\]") @@ -807,7 +811,9 @@ class PeriodDtype(dtypes.PeriodDtypeBase, PandasExtensionDtype): type: Type[Period] = Period kind: str_type = "O" str = "|O08" - base = np.dtype("O") + # error: Incompatible types in assignment (expression has type "dtype", + # base class "PandasExtensionDtype" defined the type as "None") + base = np.dtype("O") # type: ignore[assignment] num = 102 _metadata = ("freq",) _match = re.compile(r"(P|p)eriod\[(?P.+)\]") @@ -1004,7 +1010,9 @@ class IntervalDtype(PandasExtensionDtype): name = "interval" kind: str_type = "O" str = "|O08" - base = np.dtype("O") + # error: Incompatible types in assignment (expression has type "dtype", + # base class "PandasExtensionDtype" defined the type as "None") + base = np.dtype("O") # type: ignore[assignment] num = 103 _metadata = ("subtype",) _match = re.compile(r"(I|i)nterval\[(?P.+)\]") diff --git a/pandas/core/dtypes/missing.py b/pandas/core/dtypes/missing.py index 0b4aab0ac9d88..78902b1ca7ba6 100644 --- a/pandas/core/dtypes/missing.py +++ b/pandas/core/dtypes/missing.py @@ -632,7 +632,9 @@ def isna_all(arr: ArrayLike) -> bool: checker = nan_checker elif dtype.kind in ["m", "M"] or dtype.type is Period: - checker = lambda x: np.asarray(x.view("i8")) == iNaT + # error: Incompatible types in assignment (expression has type + # "Callable[[Any], Any]", variable has type "ufunc") + checker = lambda x: np.asarray(x.view("i8")) == iNaT # type: ignore[assignment] else: checker = lambda x: _isna_ndarraylike(x, inf_as_na=INF_AS_NA) diff --git a/pandas/core/ops/mask_ops.py b/pandas/core/ops/mask_ops.py index 8fb81faf313d7..092c7a1260cdc 100644 --- a/pandas/core/ops/mask_ops.py +++ b/pandas/core/ops/mask_ops.py @@ -103,7 +103,9 @@ def kleene_xor( if right is libmissing.NA: result = np.zeros_like(left) else: - result = left ^ right + # error: Incompatible types in assignment (expression has type + # "Union[bool, Any]", variable has type "ndarray") + result = left ^ right # type: ignore[assignment] if right_mask is None: if right is libmissing.NA: diff --git a/pandas/plotting/_matplotlib/tools.py b/pandas/plotting/_matplotlib/tools.py index aed0c360fc7ce..6b74c86036286 100644 --- a/pandas/plotting/_matplotlib/tools.py +++ b/pandas/plotting/_matplotlib/tools.py @@ -403,10 +403,15 @@ def handle_shared_axes( def flatten_axes(axes: Union["Axes", Sequence["Axes"]]) -> Sequence["Axes"]: if not is_list_like(axes): - return np.array([axes]) + # error: Incompatible return value type (got "ndarray", expected + # "Sequence[Any]") + return np.array([axes]) # type: ignore[return-value] elif isinstance(axes, (np.ndarray, ABCIndexClass)): - return axes.ravel() - return np.array(axes) + # error: Incompatible return value type (got "Union[ndarray, Any]", + # expected "Sequence[Any]") + return axes.ravel() # type: ignore[return-value] + # error: Incompatible return value type (got "ndarray", expected "Sequence[Any]") + return np.array(axes) # type: ignore[return-value] def set_ticks_props( From 9c332b7187813bd28d329ca80420bcd08aa79dc0 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 3 Oct 2020 12:53:01 +0100 Subject: [PATCH 08/86] wip --- pandas/core/algorithms.py | 51 +++++++++++++++++++++++------ pandas/core/array_algos/replace.py | 4 ++- pandas/core/common.py | 12 ++++++- pandas/core/construction.py | 7 +++- pandas/core/dtypes/cast.py | 28 +++++++++++++--- pandas/core/dtypes/missing.py | 6 +++- pandas/core/nanops.py | 27 +++++++++++---- pandas/core/ops/array_ops.py | 14 ++++++-- pandas/core/strings/object_array.py | 4 ++- 9 files changed, 124 insertions(+), 29 deletions(-) diff --git a/pandas/core/algorithms.py b/pandas/core/algorithms.py index d2005d46bbbf1..2b257407bea89 100644 --- a/pandas/core/algorithms.py +++ b/pandas/core/algorithms.py @@ -194,20 +194,37 @@ def _reconstruct_data( ExtensionArray or np.ndarray """ if is_extension_array_dtype(dtype): - values = dtype.construct_array_type()._from_sequence(values) + # error: Item "dtype" of "Union[dtype, ExtensionDtype]" has no + # attribute "construct_array_type" + tmp = dtype.construct_array_type() # type: ignore[union-attr] + values = tmp._from_sequence(values) elif is_bool_dtype(dtype): - values = values.astype(dtype, copy=False) + # error: Argument 1 to "astype" of "_ArrayOrScalarCommon" has + # incompatible type "Union[dtype, ExtensionDtype]"; expected + # "Union[dtype, None, type, _SupportsDtype, str, Tuple[Any, int], + # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DtypeDict, + # Tuple[Any, Any]]" + values = values.astype(dtype, copy=False) # type: ignore[arg-type] # we only support object dtypes bool Index if isinstance(original, ABCIndexClass): values = values.astype(object, copy=False) elif dtype is not None: if is_datetime64_dtype(dtype): - dtype = "datetime64[ns]" + # error: Incompatible types in assignment (expression has type + # "str", variable has type "Union[dtype, ExtensionDtype]") + dtype = "datetime64[ns]" # type: ignore[assignment] elif is_timedelta64_dtype(dtype): - dtype = "timedelta64[ns]" + # error: Incompatible types in assignment (expression has type + # "str", variable has type "Union[dtype, ExtensionDtype]") + dtype = "timedelta64[ns]" # type: ignore[assignment] - values = values.astype(dtype, copy=False) + # error: Argument 1 to "astype" of "_ArrayOrScalarCommon" has + # incompatible type "Union[dtype, ExtensionDtype]"; expected + # "Union[dtype, None, type, _SupportsDtype, str, Tuple[Any, int], + # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DtypeDict, + # Tuple[Any, Any]]" + values = values.astype(dtype, copy=False) # type: ignore[arg-type] return values @@ -421,7 +438,20 @@ def isin(comps: AnyArrayLike, values: AnyArrayLike) -> np.ndarray: ) if not isinstance(values, (ABCIndex, ABCSeries, ABCExtensionArray, np.ndarray)): - values = construct_1d_object_array_from_listlike(list(values)) + # pandas\core\algorithms.py:424: error: Incompatible types in + # assignment (expression has type "ndarray", variable has type + # "ExtensionArray") [assignment] + + # pandas\core\algorithms.py:424: error: Incompatible types in + # assignment (expression has type "ndarray", variable has type "Index") + # [assignment] + + # pandas\core\algorithms.py:424: error: Incompatible types in + # assignment (expression has type "ndarray", variable has type + # "Series") [assignment] + values = construct_1d_object_array_from_listlike( # type: ignore[assignment] + list(values) + ) # TODO: could use ensure_arraylike here comps = extract_array(comps, extract_numpy=True) @@ -1022,10 +1052,11 @@ def checked_add_with_arr(arr, b, arr_mask=None, b_mask=None): to_raise = ((np.iinfo(np.int64).max - b2 < arr) & not_nan).any() else: to_raise = ( - (np.iinfo(np.int64).max - b2[mask1] < arr[mask1]) & not_nan[mask1] - ).any() or ( - (np.iinfo(np.int64).min - b2[mask2] > arr[mask2]) & not_nan[mask2] - ).any() + ((np.iinfo(np.int64).max - b2[mask1] < arr[mask1]) & not_nan[mask1]).any() + or ( + (np.iinfo(np.int64).min - b2[mask2] > arr[mask2]) & not_nan[mask2] + ).any() + ) if to_raise: raise OverflowError("Overflow in int64 addition") diff --git a/pandas/core/array_algos/replace.py b/pandas/core/array_algos/replace.py index 9eaa265adab2b..70801a6535e0f 100644 --- a/pandas/core/array_algos/replace.py +++ b/pandas/core/array_algos/replace.py @@ -69,7 +69,9 @@ def _check_comparison_types( if is_numeric_v_string_like(a, b): # GH#29553 avoid deprecation warnings from numpy - return np.zeros(a.shape, dtype=bool) + # error: Incompatible return value type (got "ndarray", expected + # "Union[ExtensionArray, bool]") + return np.zeros(a.shape, dtype=bool) # type: ignore[return-value] elif is_datetimelike_v_numeric(a, b): # GH#29553 avoid deprecation warnings from numpy diff --git a/pandas/core/common.py b/pandas/core/common.py index b860c83f89cbc..ac73141025147 100644 --- a/pandas/core/common.py +++ b/pandas/core/common.py @@ -500,7 +500,17 @@ def convert_to_list_like( inputs are returned unmodified whereas others are converted to list. """ if isinstance(values, (list, np.ndarray, ABCIndex, ABCSeries, ABCExtensionArray)): - return values + # pandas\core\common.py:503: error: Incompatible return value type (got + # "Union[Any, List[Any], ndarray]", expected "Union[List[Any], + # ExtensionArray]") + + # pandas\core\common.py:503: error: Incompatible return value type (got + # "Union[Any, List[Any], ndarray]", expected "Union[List[Any], Index]") + + # pandas\core\common.py:503: error: Incompatible return value type (got + # "Union[Any, List[Any], ndarray]", expected "Union[List[Any], + # Series]") + return values # type: ignore[return-value] elif isinstance(values, abc.Iterable) and not isinstance(values, str): return list(values) diff --git a/pandas/core/construction.py b/pandas/core/construction.py index 4751f6076f869..af5b329ff0ccb 100644 --- a/pandas/core/construction.py +++ b/pandas/core/construction.py @@ -287,7 +287,12 @@ def array( ): dtype = data.dtype - data = extract_array(data, extract_numpy=True) + # error: Value of type variable "AnyArrayLike" of "extract_array" cannot be + # "Union[Sequence[object], ExtensionArray]" + + # error: Value of type variable "AnyArrayLike" of "extract_array" cannot be + # "Union[Sequence[object], Index]" + data = extract_array(data, extract_numpy=True) # type: ignore[type-var] # this returns None for not-found dtypes. if isinstance(dtype, str): diff --git a/pandas/core/dtypes/cast.py b/pandas/core/dtypes/cast.py index 70e218d8431ca..5285dc11cfdb0 100644 --- a/pandas/core/dtypes/cast.py +++ b/pandas/core/dtypes/cast.py @@ -1500,7 +1500,11 @@ def find_common_type(types: List[DtypeObj]) -> DtypeObj: if is_integer_dtype(t) or is_float_dtype(t) or is_complex_dtype(t): return np.dtype("object") - return np.find_common_type(types, []) + # error: Argument 1 to "find_common_type" has incompatible type + # "List[Union[dtype, ExtensionDtype]]"; expected "Sequence[Union[dtype, + # None, type, _SupportsDtype, str, Tuple[Any, int], Tuple[Any, Union[int, + # Sequence[int]]], List[Any], _DtypeDict, Tuple[Any, Any]]]" + return np.find_common_type(types, []) # type: ignore[arg-type] def cast_scalar_to_array(shape, value, dtype: Optional[DtypeObj] = None) -> np.ndarray: @@ -1524,7 +1528,11 @@ def cast_scalar_to_array(shape, value, dtype: Optional[DtypeObj] = None) -> np.n else: fill_value = value - values = np.empty(shape, dtype=dtype) + # error: Argument "dtype" to "empty" has incompatible type "Union[dtype, + # ExtensionDtype]"; expected "Union[dtype, None, type, _SupportsDtype, str, + # Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], List[Any], + # _DtypeDict, Tuple[Any, Any]]" + values = np.empty(shape, dtype=dtype) # type: ignore[arg-type] values.fill(fill_value) return values @@ -1549,7 +1557,9 @@ def construct_1d_arraylike_from_scalar( """ if is_extension_array_dtype(dtype): - cls = dtype.construct_array_type() + # error: Item "dtype" of "Union[dtype, ExtensionDtype]" has no + # attribute "construct_array_type" + cls = dtype.construct_array_type() # type: ignore[union-attr] subarr = cls._from_sequence([value] * length, dtype=dtype) else: @@ -1564,7 +1574,11 @@ def construct_1d_arraylike_from_scalar( if not isna(value): value = ensure_str(value) - subarr = np.empty(length, dtype=dtype) + # error: Argument "dtype" to "empty" has incompatible type + # "Union[dtype, ExtensionDtype]"; expected "Union[dtype, None, type, + # _SupportsDtype, str, Tuple[Any, int], Tuple[Any, Union[int, + # Sequence[int]]], List[Any], _DtypeDict, Tuple[Any, Any]]" + subarr = np.empty(length, dtype=dtype) # type: ignore[arg-type] subarr.fill(value) return subarr @@ -1625,7 +1639,11 @@ def construct_1d_ndarray_preserving_na( if dtype is not None and dtype.kind == "U": subarr = lib.ensure_string_array(values, convert_na_value=False, copy=copy) else: - subarr = np.array(values, dtype=dtype, copy=copy) + # error: Argument "dtype" to "array" has incompatible type + # "Union[dtype, ExtensionDtype, None]"; expected "Union[dtype, None, + # type, _SupportsDtype, str, Tuple[Any, int], Tuple[Any, Union[int, + # Sequence[int]]], List[Any], _DtypeDict, Tuple[Any, Any]]" + subarr = np.array(values, dtype=dtype, copy=copy) # type: ignore[arg-type] return subarr diff --git a/pandas/core/dtypes/missing.py b/pandas/core/dtypes/missing.py index 78902b1ca7ba6..b4782227deb82 100644 --- a/pandas/core/dtypes/missing.py +++ b/pandas/core/dtypes/missing.py @@ -637,7 +637,11 @@ def isna_all(arr: ArrayLike) -> bool: checker = lambda x: np.asarray(x.view("i8")) == iNaT # type: ignore[assignment] else: - checker = lambda x: _isna_ndarraylike(x, inf_as_na=INF_AS_NA) + # error: Incompatible types in assignment (expression has type + # "Callable[[Any], Any]", variable has type "ufunc") + checker = lambda x: _isna_ndarraylike( # type: ignore[assignment] + x, inf_as_na=INF_AS_NA + ) for i in range(0, total_len, chunk_len): if not checker(arr[i : i + chunk_len]).all(): diff --git a/pandas/core/nanops.py b/pandas/core/nanops.py index 64470da2fb910..daf91ff563155 100644 --- a/pandas/core/nanops.py +++ b/pandas/core/nanops.py @@ -505,7 +505,9 @@ def nansum( if is_float_dtype(dtype): dtype_sum = dtype elif is_timedelta64_dtype(dtype): - dtype_sum = np.float64 + # error: Incompatible types in assignment (expression has type + # "Type[float64]", variable has type "dtype") + dtype_sum = np.float64 # type: ignore[assignment] the_sum = values.sum(axis, dtype=dtype_sum) the_sum = _maybe_null_out(the_sum, axis, mask, values.shape, min_count=min_count) @@ -555,10 +557,14 @@ def nanmean( or is_datetime64_any_dtype(dtype) or is_timedelta64_dtype(dtype) ): - dtype_sum = np.float64 + # error: Incompatible types in assignment (expression has type + # "Type[float64]", variable has type "dtype") + dtype_sum = np.float64 # type: ignore[assignment] elif is_float_dtype(dtype): dtype_sum = dtype - dtype_count = dtype + # error: Incompatible types in assignment (expression has type "dtype", + # variable has type "Type[float64]") + dtype_count = dtype # type: ignore[assignment] count = _get_counts(values.shape, mask, axis, dtype=dtype_count) the_sum = _ensure_numeric(values.sum(axis, dtype=dtype_sum)) @@ -688,11 +694,16 @@ def _get_counts_nanvar( count = np.nan d = np.nan else: - mask2: np.ndarray = count <= ddof + # error: Incompatible types in assignment (expression has type + # "Union[bool, Any]", variable has type "ndarray") + mask2: np.ndarray = count <= ddof # type: ignore[assignment] if mask2.any(): np.putmask(d, mask2, np.nan) np.putmask(count, mask2, np.nan) - return count, d + # error: Incompatible return value type (got "Tuple[Union[int, float, + # ndarray], Any]", expected "Tuple[Union[int, ndarray], Union[int, + # ndarray]]") + return count, d # type: ignore[return-value] @disallow("M8") @@ -844,7 +855,11 @@ def nansem( if not is_float_dtype(values.dtype): values = values.astype("f8") - count, _ = _get_counts_nanvar(values.shape, mask, axis, ddof, values.dtype) + # error: Argument 1 to "_get_counts_nanvar" has incompatible type + # "Tuple[int, ...]"; expected "Tuple[int]" + count, _ = _get_counts_nanvar( # type: ignore[arg-type] + values.shape, mask, axis, ddof, values.dtype + ) var = nanvar(values, axis, skipna, ddof=ddof) return np.sqrt(var) / np.sqrt(count) diff --git a/pandas/core/ops/array_ops.py b/pandas/core/ops/array_ops.py index fd5f126051c53..5623993704793 100644 --- a/pandas/core/ops/array_ops.py +++ b/pandas/core/ops/array_ops.py @@ -74,7 +74,11 @@ def masked_arith_op(x: np.ndarray, y, op): assert isinstance(x, np.ndarray), type(x) if isinstance(y, np.ndarray): dtype = find_common_type([x.dtype, y.dtype]) - result = np.empty(x.size, dtype=dtype) + # error: Argument "dtype" to "empty" has incompatible type + # "Union[dtype, ExtensionDtype]"; expected "Union[dtype, None, type, + # _SupportsDtype, str, Tuple[Any, int], Tuple[Any, Union[int, + # Sequence[int]]], List[Any], _DtypeDict, Tuple[Any, Any]]" + result = np.empty(x.size, dtype=dtype) # type: ignore[arg-type] if len(x) != len(y): raise ValueError(x.shape, y.shape) @@ -353,7 +357,9 @@ def fill_bool(x, left=None): # integer dtypes. Otherwise these are boolean ops filler = fill_int if is_self_int_dtype and is_other_int_dtype else fill_bool - res_values = na_logical_op(lvalues, rvalues, op) + # error: Argument 1 to "na_logical_op" has incompatible type + # "ExtensionArray"; expected "ndarray" + res_values = na_logical_op(lvalues, rvalues, op) # type: ignore[arg-type] # error: Cannot call function of unknown type res_values = filler(res_values) # type: ignore[operator] @@ -418,7 +424,9 @@ def maybe_upcast_datetimelike_array(obj: ArrayLike) -> ArrayLike: if obj.dtype.kind == "m": from pandas.core.arrays import TimedeltaArray - return TimedeltaArray._from_sequence(obj) + # error: Incompatible return value type (got "TimedeltaArray", + # expected "ndarray") + return TimedeltaArray._from_sequence(obj) # type: ignore[return-value] if obj.dtype.kind == "M": from pandas.core.arrays import DatetimeArray diff --git a/pandas/core/strings/object_array.py b/pandas/core/strings/object_array.py index a29d84edd3a77..2eb1a8917ed96 100644 --- a/pandas/core/strings/object_array.py +++ b/pandas/core/strings/object_array.py @@ -51,7 +51,9 @@ def _str_map(self, f, na_value=None, dtype=None): na_value = self._str_na_value if not len(arr): - return np.ndarray(0, dtype=dtype) + # error: Argument 1 to "ndarray" has incompatible type "int"; + # expected "Sequence[int]" + return np.ndarray(0, dtype=dtype) # type: ignore[arg-type] if not isinstance(arr, np.ndarray): arr = np.asarray(arr, dtype=object) From a0bae1bf265cd4d00f9c119373c6aa39722ea24d Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 3 Oct 2020 13:16:50 +0100 Subject: [PATCH 09/86] pep fix --- pandas/compat/numpy/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pandas/compat/numpy/__init__.py b/pandas/compat/numpy/__init__.py index 733cf3075d4db..46075a0672c0f 100644 --- a/pandas/compat/numpy/__init__.py +++ b/pandas/compat/numpy/__init__.py @@ -44,7 +44,7 @@ def np_datetime64_compat(s, *args, **kwargs): """ s = tz_replacer(s) # error: No overload variant of "datetime64" matches argument types "Any", - # "Tuple[Any, ...]", "Dict[str, Any]" + # "Tuple[Any, ...]", "Dict[str, Any]" return np.datetime64(s, *args, **kwargs) # type: ignore[call-overload] From b74ea132cf431337fcc3e91e5551e1ae765d3799 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 3 Oct 2020 13:22:04 +0100 Subject: [PATCH 10/86] new black --- pandas/core/algorithms.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/pandas/core/algorithms.py b/pandas/core/algorithms.py index 2b257407bea89..54a2c48ac88a1 100644 --- a/pandas/core/algorithms.py +++ b/pandas/core/algorithms.py @@ -1052,11 +1052,10 @@ def checked_add_with_arr(arr, b, arr_mask=None, b_mask=None): to_raise = ((np.iinfo(np.int64).max - b2 < arr) & not_nan).any() else: to_raise = ( - ((np.iinfo(np.int64).max - b2[mask1] < arr[mask1]) & not_nan[mask1]).any() - or ( - (np.iinfo(np.int64).min - b2[mask2] > arr[mask2]) & not_nan[mask2] - ).any() - ) + (np.iinfo(np.int64).max - b2[mask1] < arr[mask1]) & not_nan[mask1] + ).any() or ( + (np.iinfo(np.int64).min - b2[mask2] > arr[mask2]) & not_nan[mask2] + ).any() if to_raise: raise OverflowError("Overflow in int64 addition") From 911390ebe74ec902be542e7112008fdd3812d9df Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 3 Oct 2020 13:59:37 +0100 Subject: [PATCH 11/86] wip --- pandas/core/algorithms.py | 34 ++++++++++++++++++++++++++---- pandas/core/arrays/_mixins.py | 6 ++++-- pandas/core/arrays/base.py | 6 ++++-- pandas/core/arrays/datetimelike.py | 5 ++++- pandas/core/arrays/numpy_.py | 10 +++++++-- pandas/core/construction.py | 9 +++++++- pandas/core/indexes/base.py | 4 +++- pandas/core/nanops.py | 19 ++++++++++++++--- pandas/core/sorting.py | 3 ++- pandas/core/strings/accessor.py | 24 +++++++++------------ pandas/tseries/frequencies.py | 3 ++- 11 files changed, 91 insertions(+), 32 deletions(-) diff --git a/pandas/core/algorithms.py b/pandas/core/algorithms.py index 54a2c48ac88a1..fcc5bd9873bd5 100644 --- a/pandas/core/algorithms.py +++ b/pandas/core/algorithms.py @@ -151,14 +151,20 @@ def _ensure_data( # TODO(EA2D): special case not needed with 2D EAs asi8 = values.view("i8") dtype = values.dtype - return asi8, dtype + # error: Incompatible return value type (got "Tuple[Any, + # Union[dtype, ExtensionDtype, None]]", expected + # "Tuple[ndarray, Union[dtype, ExtensionDtype]]") + return asi8, dtype # type: ignore[return-value] from pandas import DatetimeIndex values = DatetimeIndex(values) dtype = values.dtype - return values.asi8, dtype + # error: Incompatible return value type (got "Tuple[Any, Union[dtype, + # ExtensionDtype, None]]", expected "Tuple[ndarray, Union[dtype, + # ExtensionDtype]]") + return values.asi8, dtype # type: ignore[return-value] elif is_categorical_dtype(vals_dtype) and ( is_categorical_dtype(dtype) or dtype is None @@ -454,13 +460,33 @@ def isin(comps: AnyArrayLike, values: AnyArrayLike) -> np.ndarray: ) # TODO: could use ensure_arraylike here - comps = extract_array(comps, extract_numpy=True) + # pandas\core\algorithms.py:457: error: Incompatible types in assignment + # (expression has type "ExtensionArray", variable has type "Index") + # [assignment] + + # pandas\core\algorithms.py:457: error: Incompatible types in assignment + # (expression has type "ExtensionArray", variable has type "Series") + # [assignment] + + # pandas\core\algorithms.py:457: error: Incompatible types in assignment + # (expression has type "ExtensionArray", variable has type "ndarray") + # [assignment] + comps = extract_array(comps, extract_numpy=True) # type: ignore[assignment] if is_categorical_dtype(comps): # TODO(extension) # handle categoricals return cast("Categorical", comps).isin(values) - comps, dtype = _ensure_data(comps) + # pandas\core\algorithms.py:463: error: Incompatible types in assignment + # (expression has type "ndarray", variable has type "ExtensionArray") + # [assignment] + + # pandas\core\algorithms.py:463: error: Incompatible types in assignment + # (expression has type "ndarray", variable has type "Index") [assignment] + + # pandas\core\algorithms.py:463: error: Incompatible types in assignment + # (expression has type "ndarray", variable has type "Series") [assignment] + comps, dtype = _ensure_data(comps) # type: ignore[assignment] values, _ = _ensure_data(values, dtype=dtype) # faster for larger cases to use np.in1d diff --git a/pandas/core/arrays/_mixins.py b/pandas/core/arrays/_mixins.py index 4d13a18c8ef0b..fc06a7d6f1eea 100644 --- a/pandas/core/arrays/_mixins.py +++ b/pandas/core/arrays/_mixins.py @@ -94,7 +94,8 @@ def ndim(self) -> int: @cache_readonly def size(self) -> int: - return np.prod(self.shape) + # error: Incompatible return value type (got "number", expected "int") + return np.prod(self.shape) # type: ignore[return-value] @cache_readonly def nbytes(self) -> int: @@ -216,7 +217,8 @@ def fillna(self: _T, value=None, method=None, limit=None) -> _T: ) value = value[mask] - if mask.any(): + # error: "ExtensionArray" has no attribute "any" + if mask.any(): # type: ignore[attr-defined] if method is not None: func = missing.get_fill_func(method) new_values = func(self._ndarray.copy(), limit=limit, mask=mask) diff --git a/pandas/core/arrays/base.py b/pandas/core/arrays/base.py index c2fc72ff753a8..78929a1e7f5c2 100644 --- a/pandas/core/arrays/base.py +++ b/pandas/core/arrays/base.py @@ -338,7 +338,8 @@ def __iter__(self): for i in range(len(self)): yield self[i] - def __eq__(self, other: Any) -> ArrayLike: + # error: Signature of "__eq__" incompatible with supertype "object" + def __eq__(self, other: Any) -> ArrayLike: # type: ignore[override] """ Return for `self == other` (element-wise equality). """ @@ -350,7 +351,8 @@ def __eq__(self, other: Any) -> ArrayLike: # underlying arrays) raise AbstractMethodError(self) - def __ne__(self, other: Any) -> ArrayLike: + # error: Signature of "__ne__" incompatible with supertype "object" + def __ne__(self, other: Any) -> ArrayLike: # type: ignore[override] """ Return for `self != other` (element-wise in-equality). """ diff --git a/pandas/core/arrays/datetimelike.py b/pandas/core/arrays/datetimelike.py index 83a9c0ba61c2d..33a85239ed3be 100644 --- a/pandas/core/arrays/datetimelike.py +++ b/pandas/core/arrays/datetimelike.py @@ -564,7 +564,10 @@ def _get_getitem_freq(self, key): freq = self.freq return freq - def __setitem__( + # error: Argument 1 of "__setitem__" is incompatible with supertype + # "ExtensionArray"; supertype defines the argument type as "Union[int, + # ndarray]" + def __setitem__( # type: ignore[override] self, key: Union[int, Sequence[int], Sequence[bool], slice], value: Union[NaTType, Any, Sequence[Any]], diff --git a/pandas/core/arrays/numpy_.py b/pandas/core/arrays/numpy_.py index 05139783456b9..85c640789a66e 100644 --- a/pandas/core/arrays/numpy_.py +++ b/pandas/core/arrays/numpy_.py @@ -41,7 +41,11 @@ class PandasDtype(ExtensionDtype): _metadata = ("_dtype",) def __init__(self, dtype: object): - self._dtype = np.dtype(dtype) + # error: Argument 1 to "dtype" has incompatible type "object"; expected + # "Union[dtype, None, type, _SupportsDtype, str, Tuple[Any, int], + # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DtypeDict, + # Tuple[Any, Any]]" + self._dtype = np.dtype(dtype) # type: ignore[arg-type] def __repr__(self) -> str: return f"PandasDtype({repr(self.name)})" @@ -247,7 +251,9 @@ def __array_ufunc__(self, ufunc, method: str, *inputs, **kwargs): # ------------------------------------------------------------------------ # Pandas ExtensionArray Interface - def isna(self) -> np.ndarray: + # error: Return type "ndarray" of "isna" incompatible with return type + # "ArrayLike" in supertype "ExtensionArray" + def isna(self) -> np.ndarray: # type: ignore[override] return isna(self._ndarray) def _validate_fill_value(self, fill_value): diff --git a/pandas/core/construction.py b/pandas/core/construction.py index af5b329ff0ccb..705bdd81e8a73 100644 --- a/pandas/core/construction.py +++ b/pandas/core/construction.py @@ -528,7 +528,14 @@ def sanitize_array( # GH#19853: If data is a scalar, subarr has already the result if not lib.is_scalar(data): if not np.all(isna(data)): - data = np.array(data, dtype=dtype, copy=False) + # error: Argument "dtype" to "array" has incompatible type + # "Union[dtype, ExtensionDtype, None]"; expected + # "Union[dtype, None, type, _SupportsDtype, str, Tuple[Any, + # int], Tuple[Any, Union[int, Sequence[int]]], List[Any], + # _DtypeDict, Tuple[Any, Any]]" + data = np.array( # type:ignore[arg-type] + data, dtype=dtype, copy=False + ) subarr = np.array(data, dtype=object, copy=copy) is_object_or_str_dtype = is_object_dtype(dtype) or is_string_dtype(dtype) diff --git a/pandas/core/indexes/base.py b/pandas/core/indexes/base.py index ff3d8bf05f9a5..b6c0240abb15f 100644 --- a/pandas/core/indexes/base.py +++ b/pandas/core/indexes/base.py @@ -3925,7 +3925,9 @@ def values(self) -> np.ndarray: Index.array : Reference to the underlying data. Index.to_numpy : A NumPy array representing the underlying data. """ - return self._data.view(np.ndarray) + # error: Incompatible return value type (got "Union[ExtensionArray, + # ndarray]", expected "ndarray") + return self._data.view(np.ndarray) # type: ignore[return-value] @cache_readonly @doc(IndexOpsMixin.array) diff --git a/pandas/core/nanops.py b/pandas/core/nanops.py index 826748d638336..75b30589ad517 100644 --- a/pandas/core/nanops.py +++ b/pandas/core/nanops.py @@ -281,7 +281,9 @@ def _get_values( # with scalar fill_value. This guarantee is important for the # maybe_upcast_putmask call below assert is_scalar(fill_value) - values = extract_array(values, extract_numpy=True) + # error: Incompatible types in assignment (expression has type + # "ExtensionArray", variable has type "ndarray") + values = extract_array(values, extract_numpy=True) # type: ignore[assignment] mask = _maybe_get_mask(values, skipna, mask) @@ -339,7 +341,13 @@ def _wrap_results(result, dtype: DtypeObj, fill_value=None): result = Timestamp(result, tz=tz) else: # If we have float dtype, taking a view will give the wrong result - result = result.astype(dtype) + + # error: Argument 1 to "astype" of "_ArrayOrScalarCommon" has + # incompatible type "Union[dtype, ExtensionDtype]"; expected + # "Union[dtype, None, type, _SupportsDtype, str, Tuple[Any, int], + # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DtypeDict, + # Tuple[Any, Any]]" + result = result.astype(dtype) # type: ignore[arg-type] elif is_timedelta64_dtype(dtype): if not isinstance(result, np.ndarray): if result == fill_value: @@ -351,7 +359,12 @@ def _wrap_results(result, dtype: DtypeObj, fill_value=None): result = Timedelta(result, unit="ns") else: - result = result.astype("m8[ns]").view(dtype) + # error: Argument 1 to "view" of "_ArrayOrScalarCommon" has + # incompatible type "Union[dtype, ExtensionDtype]"; expected + # "Union[dtype, None, type, _SupportsDtype, str, Tuple[Any, int], + # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DtypeDict, + # Tuple[Any, Any]]" + result = result.astype("m8[ns]").view(dtype) # type: ignore[arg-type] return result diff --git a/pandas/core/sorting.py b/pandas/core/sorting.py index e02b565ed5d7b..e2e90b4ed3648 100644 --- a/pandas/core/sorting.py +++ b/pandas/core/sorting.py @@ -34,6 +34,7 @@ _INT64_MAX = np.iinfo(np.int64).max +# error: Function "numpy.array" is not valid as a type def get_indexer_indexer( target: "Index", level: Union[str, int, List[str], List[int]], @@ -42,7 +43,7 @@ def get_indexer_indexer( na_position: str, sort_remaining: bool, key: IndexKeyFunc, -) -> Optional[np.array]: +) -> Optional[np.array]: # type: ignore[valid-type] """ Helper method that return the indexer according to input parameters for the sort_index method of DataFrame and Series. diff --git a/pandas/core/strings/accessor.py b/pandas/core/strings/accessor.py index cae8cc1baf1df..0c9426eb7442f 100644 --- a/pandas/core/strings/accessor.py +++ b/pandas/core/strings/accessor.py @@ -237,12 +237,7 @@ def __iter__(self): g = self.get(i) def _wrap_result( - self, - result, - name=None, - expand=None, - fill_value=np.nan, - returns_string=True, + self, result, name=None, expand=None, fill_value=np.nan, returns_string=True, ): from pandas import Index, MultiIndex @@ -1895,12 +1890,7 @@ def get_dummies(self, sep="|"): # we need to cast to Series of strings as only that has all # methods available for making the dummies... result, name = self._array._str_get_dummies(sep) - return self._wrap_result( - result, - name=name, - expand=True, - returns_string=False, - ) + return self._wrap_result(result, name=name, expand=True, returns_string=False,) @forbid_nonstring_types(["bytes"]) def translate(self, table): @@ -2989,10 +2979,16 @@ def _str_extract_noexpand(arr, pat, flags=0): names = dict(zip(regex.groupindex.values(), regex.groupindex.keys())) columns = [names.get(1 + i, i) for i in range(regex.groups)] if arr.size == 0: - result = DataFrame(columns=columns, dtype=object) + # error: Incompatible types in assignment (expression has type + # "DataFrame", variable has type "ndarray") + result = DataFrame( # type: ignore[assignment] + columns=columns, dtype=object + ) else: dtype = _result_dtype(arr) - result = DataFrame( + # error: Incompatible types in assignment (expression has type + # "DataFrame", variable has type "ndarray") + result = DataFrame( # type:ignore[assignment] [groups_or_na(val) for val in arr], columns=columns, index=arr.index, diff --git a/pandas/tseries/frequencies.py b/pandas/tseries/frequencies.py index 8ef6dac2862db..2de09ac599520 100644 --- a/pandas/tseries/frequencies.py +++ b/pandas/tseries/frequencies.py @@ -374,7 +374,8 @@ def _is_business_daily(self) -> bool: shifts = np.diff(self.index.asi8) shifts = np.floor_divide(shifts, _ONE_DAY) weekdays = np.mod(first_weekday + np.cumsum(shifts), 7) - return np.all( + # error: Incompatible return value type (got "bool_", expected "bool") + return np.all( # type: ignore[return-value] ((weekdays == 0) & (shifts == 3)) | ((weekdays > 0) & (weekdays <= 4) & (shifts == 1)) ) From 02588b3a533c4c3f30c0296f4487dcdebd1540cc Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 3 Oct 2020 14:01:17 +0100 Subject: [PATCH 12/86] black changes --- pandas/core/strings/accessor.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/pandas/core/strings/accessor.py b/pandas/core/strings/accessor.py index 0c9426eb7442f..cdb889b685c0d 100644 --- a/pandas/core/strings/accessor.py +++ b/pandas/core/strings/accessor.py @@ -237,7 +237,12 @@ def __iter__(self): g = self.get(i) def _wrap_result( - self, result, name=None, expand=None, fill_value=np.nan, returns_string=True, + self, + result, + name=None, + expand=None, + fill_value=np.nan, + returns_string=True, ): from pandas import Index, MultiIndex @@ -1890,7 +1895,12 @@ def get_dummies(self, sep="|"): # we need to cast to Series of strings as only that has all # methods available for making the dummies... result, name = self._array._str_get_dummies(sep) - return self._wrap_result(result, name=name, expand=True, returns_string=False,) + return self._wrap_result( + result, + name=name, + expand=True, + returns_string=False, + ) @forbid_nonstring_types(["bytes"]) def translate(self, table): From 04ab966c9eedf8fed88805453e36afdec5f10294 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 3 Oct 2020 14:54:13 +0100 Subject: [PATCH 13/86] wip --- pandas/core/arrays/boolean.py | 25 ++++++++++++++++---- pandas/core/arrays/categorical.py | 20 ++++++++++++---- pandas/core/arrays/sparse/accessor.py | 4 +++- pandas/core/indexes/numeric.py | 14 ++++++++--- pandas/core/internals/blocks.py | 15 +++++++++--- pandas/core/internals/managers.py | 25 ++++++++++++++++---- pandas/core/tools/datetimes.py | 34 ++++++++++++++++++++------- pandas/core/window/rolling.py | 16 +++++++++---- 8 files changed, 119 insertions(+), 34 deletions(-) diff --git a/pandas/core/arrays/boolean.py b/pandas/core/arrays/boolean.py index dd750bce7842e..72dc4ae07def9 100644 --- a/pandas/core/arrays/boolean.py +++ b/pandas/core/arrays/boolean.py @@ -377,9 +377,15 @@ def astype(self, dtype, copy: bool = True) -> ArrayLike: if isinstance(dtype, BooleanDtype): values, mask = coerce_to_array(self, copy=copy) if not copy: - return self + # error: Incompatible return value type (got "BooleanArray", + # expected "ndarray") + return self # type: ignore[return-value] else: - return BooleanArray(values, mask, copy=False) + # error: Incompatible return value type (got "BooleanArray", + # expected "ndarray") + return BooleanArray( # type: ignore[return-value] + values, mask, copy=False + ) elif isinstance(dtype, StringDtype): return dtype.construct_array_type()._from_sequence(self, copy=False) @@ -388,11 +394,15 @@ def astype(self, dtype, copy: bool = True) -> ArrayLike: if self._hasna: raise ValueError("cannot convert float NaN to bool") else: - return self._data.astype(dtype, copy=copy) + # error: Incompatible return value type (got "ndarray", + # expected "ExtensionArray") + return self._data.astype(dtype, copy=copy) # type: ignore[return-value] if is_extension_array_dtype(dtype) and is_integer_dtype(dtype): from pandas.core.arrays import IntegerArray - return IntegerArray( + # error: Incompatible return value type (got "IntegerArray", + # expected "ndarray") + return IntegerArray( # type: ignore[return-value] self._data.astype(dtype.numpy_dtype), self._mask.copy(), copy=False ) # for integer, error if there are missing values @@ -405,7 +415,12 @@ def astype(self, dtype, copy: bool = True) -> ArrayLike: if is_float_dtype(dtype): na_value = np.nan # coerce - return self.to_numpy(dtype=dtype, na_value=na_value, copy=False) + + # error: Incompatible return value type (got "ndarray", expected + # "ExtensionArray") + return self.to_numpy( # type: ignore[return-value] + dtype=dtype, na_value=na_value, copy=False + ) def _values_for_argsort(self) -> np.ndarray: """ diff --git a/pandas/core/arrays/categorical.py b/pandas/core/arrays/categorical.py index 1a8861af10ed1..773708d15c821 100644 --- a/pandas/core/arrays/categorical.py +++ b/pandas/core/arrays/categorical.py @@ -1189,7 +1189,9 @@ def _validate_searchsorted_value(self, value): codes = self._unbox_scalar(value) else: locs = [self.categories.get_loc(x) for x in value] - codes = np.array(locs, dtype=self.codes.dtype) + # error: Incompatible types in assignment (expression has type + # "ndarray", variable has type "int") + codes = np.array(locs, dtype=self.codes.dtype) # type: ignore[assignment] return codes def _validate_fill_value(self, fill_value): @@ -1661,7 +1663,11 @@ def fillna(self, value=None, method=None, limit=None): # We get ndarray or Categorical if called via Series.fillna, # where it will unwrap another aligned Series before getting here - not_categories = ~algorithms.isin(value, self.categories) + # error: Value of type variable "AnyArrayLike" of "isin" cannot + # be "Union[ndarray, Categorical]" + not_categories = ~algorithms.isin( # type: ignore[type-var] + value, self.categories + ) if not isna(value[not_categories]).all(): # All entries in `value` must either be a category or NA raise ValueError("fill value must be in categories") @@ -1679,7 +1685,9 @@ def fillna(self, value=None, method=None, limit=None): # NDArrayBackedExtensionArray compat @property - def _ndarray(self) -> np.ndarray: + # error: Signature of "_ndarray" incompatible with supertype + # "NDArrayBackedExtensionArray" + def _ndarray(self) -> np.ndarray: # type: ignore[override] return self._codes def _from_backing_data(self, arr: np.ndarray) -> "Categorical": @@ -2323,7 +2331,11 @@ def _str_get_dummies(self, sep="|"): # sep may not be in categories. Just bail on this. from pandas.core.arrays import PandasArray - return PandasArray(self.astype(str))._str_get_dummies(sep) + # error: Argument 1 to "PandasArray" has incompatible type + # "ExtensionArray"; expected "Union[ndarray, PandasArray]" + return PandasArray(self.astype(str))._str_get_dummies( # type: ignore[arg-type] + sep + ) # The Series.cat accessor diff --git a/pandas/core/arrays/sparse/accessor.py b/pandas/core/arrays/sparse/accessor.py index ec4b0fd89860c..30fe8bda17f0d 100644 --- a/pandas/core/arrays/sparse/accessor.py +++ b/pandas/core/arrays/sparse/accessor.py @@ -351,7 +351,9 @@ def density(self) -> float: """ Ratio of non-sparse points to total (dense) data points. """ - return np.mean([column.array.density for _, column in self._parent.items()]) + # error: Incompatible return value type (got "number", expected "float") + tmp = np.mean([column.array.density for _, column in self._parent.items()]) + return tmp # type: ignore[return-value] @staticmethod def _prep_index(data, index, columns): diff --git a/pandas/core/indexes/numeric.py b/pandas/core/indexes/numeric.py index 34bbaca06cc08..5e593fdd26793 100644 --- a/pandas/core/indexes/numeric.py +++ b/pandas/core/indexes/numeric.py @@ -250,7 +250,10 @@ def inferred_type(self) -> str: @property def asi8(self) -> np.ndarray: # do not cache or you'll create a memory leak - return self._values.view(self._default_dtype) + + # error: Incompatible return value type (got "Union[ExtensionArray, + # ndarray]", expected "ndarray") + return self._values.view(self._default_dtype) # type: ignore[return-value] class Int64Index(IntegerIndex): @@ -335,7 +338,9 @@ class Float64Index(NumericIndex): _typ = "float64index" _engine_type = libindex.Float64Engine - _default_dtype = np.float64 + # error: Incompatible types in assignment (expression has type + # "Type[float64]", base class "NumericIndex" defined the type as "dtype") + _default_dtype = np.float64 # type: ignore[assignment] @property def inferred_type(self) -> str: @@ -412,7 +417,10 @@ def __contains__(self, other: Any) -> bool: if super().__contains__(other): return True - return is_float(other) and np.isnan(other) and self.hasnans + # error: Incompatible return value type (got "Union[Any, ndarray, + # generic]", expected "bool") + tmp = is_float(other) and np.isnan(other) and self.hasnans + return tmp # type: ignore[return-value] @cache_readonly def is_unique(self) -> bool: diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index 09f276be7d64a..5f1ddd907973f 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -816,7 +816,10 @@ def comp(s: Scalar, mask: np.ndarray, regex: bool = False) -> np.ndarray: return ~mask s = com.maybe_box_datetimelike(s) - return compare_or_regex_search(self.values, s, regex, mask) + # error: Incompatible return value type (got "Union[ndarray, + # bool]", expected "ndarray") + tmp = compare_or_regex_search(self.values, s, regex, mask) + return tmp # type: ignore[return-value] # Calculate the mask once, prior to the call of comp # in order to avoid repeating the same computations @@ -1283,7 +1286,10 @@ def func(yvalues: np.ndarray) -> np.ndarray: # process a 1-d slice, returning it # should the axis argument be handled below in apply_along_axis? # i.e. not an arg to missing.interpolate_1d - return missing.interpolate_1d( + + # error: Argument "xvalues" to "interpolate_1d" has incompatible + # type "Index"; expected "ndarray" + return missing.interpolate_1d( # type: ignore[arg-type] xvalues=index, yvalues=yvalues, method=method, @@ -2935,7 +2941,10 @@ def _extract_bool_array(mask: ArrayLike) -> np.ndarray: # We could have BooleanArray, Sparse[bool], ... # Except for BooleanArray, this is equivalent to just # np.asarray(mask, dtype=bool) - mask = mask.to_numpy(dtype=bool, na_value=False) + + # error: Incompatible types in assignment (expression has type + # "ndarray", variable has type "ExtensionArray") + mask = mask.to_numpy(dtype=bool, na_value=False) # type: ignore[assignment] assert isinstance(mask, np.ndarray), type(mask) assert mask.dtype == bool, mask.dtype diff --git a/pandas/core/internals/managers.py b/pandas/core/internals/managers.py index f2480adce89b4..ddaefbf1be6dd 100644 --- a/pandas/core/internals/managers.py +++ b/pandas/core/internals/managers.py @@ -143,8 +143,12 @@ def __init__( # Populate known_consolidate, blknos, and blklocs lazily self._known_consolidated = False - self._blknos = None - self._blklocs = None + # error: Incompatible types in assignment (expression has type "None", + # variable has type "ndarray") + self._blknos = None # type: ignore[assignment] + # error: Incompatible types in assignment (expression has type "None", + # variable has type "ndarray") + self._blklocs = None # type: ignore[assignment] @classmethod def from_blocks(cls, blocks: List[Block], axes: List[Index]): @@ -906,7 +910,12 @@ def fast_xs(self, loc: int) -> ArrayLike: # we'll eventually construct an ExtensionArray. result = np.empty(n, dtype=object) else: - result = np.empty(n, dtype=dtype) + # error: Argument "dtype" to "empty" has incompatible type + # "Union[dtype, ExtensionDtype, None]"; expected "Union[dtype, + # None, type, _SupportsDtype, str, Tuple[Any, int], Tuple[Any, + # Union[int, Sequence[int]]], List[Any], _DtypeDict, Tuple[Any, + # Any]]" + result = np.empty(n, dtype=dtype) # type: ignore[arg-type] for blk in self.blocks: # Such assignment may incorrectly coerce NaT to None @@ -917,7 +926,9 @@ def fast_xs(self, loc: int) -> ArrayLike: if isinstance(dtype, ExtensionDtype): result = dtype.construct_array_type()._from_sequence(result, dtype=dtype) - return result + # error: Incompatible return value type (got "ndarray", expected + # "ExtensionArray") + return result # type: ignore[return-value] def consolidate(self) -> "BlockManager": """ @@ -1041,7 +1052,11 @@ def value_getitem(placement): # We have 6 tests where loc is _not_ an int. # In this case, get_blkno_placements will yield only one tuple, # containing (self._blknos[loc], BlockPlacement(slice(0, 1, 1))) - loc = [loc] + + # error: Incompatible types in assignment (expression has type + # "List[Union[int, slice, ndarray]]", variable has type "Union[int, + # slice, ndarray]") + loc = [loc] # type: ignore[assignment] # Accessing public blknos ensures the public versions are initialized blknos = self.blknos[loc] diff --git a/pandas/core/tools/datetimes.py b/pandas/core/tools/datetimes.py index 7b384c9bbb47d..bbd512aab4dc0 100644 --- a/pandas/core/tools/datetimes.py +++ b/pandas/core/tools/datetimes.py @@ -224,7 +224,9 @@ def _convert_and_box_cache( from pandas import Series result = Series(arg).map(cache_array) - return _box_as_indexlike(result, utc=None, name=name) + # error: Value of type variable "ArrayLike" of "_box_as_indexlike" cannot + # be "Series" + return _box_as_indexlike(result, utc=None, name=name) # type: ignore[type-var] def _return_parsed_timezone_results(result, timezones, tz, name): @@ -340,22 +342,32 @@ def _convert_listlike_datetimes( if errors == "ignore": - result = Index(result, name=name) + # error: Incompatible types in assignment (expression has type + # "Index", variable has type "ExtensionArray") + result = Index(result, name=name) # type: ignore[assignment] else: - result = DatetimeIndex(result, name=name) + # error: Incompatible types in assignment (expression has type + # "DatetimeIndex", variable has type "ExtensionArray") + result = DatetimeIndex(result, name=name) # type: ignore[assignment] # GH 23758: We may still need to localize the result with tz # GH 25546: Apply tz_parsed first (from arg), then tz (from caller) # result will be naive but in UTC try: - result = result.tz_localize("UTC").tz_convert(tz_parsed) + # error: "ExtensionArray" has no attribute "tz_localize" + result = result.tz_localize("UTC").tz_convert( # type: ignore[attr-defined] + tz_parsed + ) except AttributeError: # Regular Index from 'ignore' path return result if tz is not None: - if result.tz is None: - result = result.tz_localize(tz) + # error: "ExtensionArray" has no attribute "tz" + if result.tz is None: # type: ignore[attr-defined] + # error: "ExtensionArray" has no attribute "tz_localize" + result = result.tz_localize(tz) # type: ignore[attr-defined] else: - result = result.tz_convert(tz) + # error: "ExtensionArray" has no attribute "tz_convert" + result = result.tz_convert(tz) # type: ignore[attr-defined] return result elif getattr(arg, "ndim", 1) > 1: raise TypeError( @@ -372,7 +384,9 @@ def _convert_listlike_datetimes( result = np.array(["NaT"], dtype="datetime64[ns]").repeat(len(arg)) return DatetimeIndex(result, name=name) elif errors == "ignore": - result = Index(arg, name=name) + # error: Incompatible types in assignment (expression has type + # "Index", variable has type "ExtensionArray") + result = Index(arg, name=name) # type: ignore[assignment] return result raise @@ -393,7 +407,9 @@ def _convert_listlike_datetimes( format = None tz_parsed = None - result = None + # error: Incompatible types in assignment (expression has type "None", + # variable has type "ExtensionArray") + result = None # type: ignore[assignment] if format is not None: try: diff --git a/pandas/core/window/rolling.py b/pandas/core/window/rolling.py index 39f1839ba559d..cd99ae8a40fc9 100644 --- a/pandas/core/window/rolling.py +++ b/pandas/core/window/rolling.py @@ -357,15 +357,23 @@ def __iter__(self): def _prep_values(self, values: Optional[np.ndarray] = None) -> np.ndarray: """Convert input to numpy arrays for Cython routines""" if values is None: - values = extract_array(self._selected_obj, extract_numpy=True) + # error: Incompatible types in assignment (expression has type + # "ExtensionArray", variable has type "Optional[ndarray]") + values = extract_array( # type: ignore[assignment] + self._selected_obj, extract_numpy=True + ) # GH #12373 : rolling functions error on float32 data # make sure the data is coerced to float64 - if is_float_dtype(values.dtype): + + # error: Item "None" of "Optional[ndarray]" has no attribute "dtype" + if is_float_dtype(values.dtype): # type[union-attr] values = ensure_float64(values) - elif is_integer_dtype(values.dtype): + # error: Item "None" of "Optional[ndarray]" has no attribute "dtype" + elif is_integer_dtype(values.dtype): # type: ignore[union-attr] values = ensure_float64(values) - elif needs_i8_conversion(values.dtype): + # error: Item "None" of "Optional[ndarray]" has no attribute "dtype" + elif needs_i8_conversion(values.dtype): # type: ignore[union-attr] raise NotImplementedError( f"ops for {self._window_type} for this " f"dtype {values.dtype} are not implemented" From 0fac404a8859c8a11efe11b8ea3fc28dc30bf39f Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 3 Oct 2020 15:12:37 +0100 Subject: [PATCH 14/86] ignore errors in pandas/tests/* --- setup.cfg | 117 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 117 insertions(+) diff --git a/setup.cfg b/setup.cfg index 73986f692b6cd..89f3d7e3f1860 100644 --- a/setup.cfg +++ b/setup.cfg @@ -306,3 +306,120 @@ check_untyped_defs=False [mypy-pandas.util._decorators] check_untyped_defs=False +[mypy-pandas.conftest] +ignore_errors=True + +[mypy-pandas.tests.arithmetic.conftest] +ignore_errors=True + +[mypy-pandas.tests.computation.test_eval] +ignore_errors=True + +[mypy-pandas.tests.dtypes.test_common] +ignore_errors=True + +[mypy-pandas.tests.dtypes.test_generic] +ignore_errors=True + +[mypy-pandas.tests.frame.methods.test_to_records] +ignore_errors=True + +[mypy-pandas.tests.frame.test_constructors] +ignore_errors=True + +[mypy-pandas.tests.groupby.transform.test_transform] +ignore_errors=True + +[mypy-pandas.tests.indexes.datetimes.test_datetime] +ignore_errors=True + +[mypy-pandas.tests.indexes.interval.test_interval] +ignore_errors=True + +[mypy-pandas.tests.indexes.timedeltas.test_timedelta] +ignore_errors=True + +[mypy-pandas.tests.indexing.multiindex.test_insert] +ignore_errors=True + +[mypy-pandas.tests.indexing.multiindex.test_setitem] +ignore_errors=True + +[mypy-pandas.tests.indexing.multiindex.test_sorted] +ignore_errors=True + +[mypy-pandas.tests.indexing.test_categorical] +ignore_errors=True + +[mypy-pandas.tests.indexing.test_indexing] +ignore_errors=True + +[mypy-pandas.tests.io.excel.test_writers] +ignore_errors=True + +[mypy-pandas.tests.io.test_clipboard] +ignore_errors=True + +[mypy-pandas.tests.io.test_html] +ignore_errors=True + +[mypy-pandas.tests.plotting.test_frame] +ignore_errors=True + +[mypy-pandas.tests.plotting.test_hist_method] +ignore_errors=True + +[mypy-pandas.tests.plotting.test_misc] +ignore_errors=True + +[mypy-pandas.tests.plotting.test_series] +ignore_errors=True + +[mypy-pandas.tests.reductions.test_reductions] +ignore_errors=True + +[mypy-pandas.tests.resample.test_resample_api] +ignore_errors=True + +[mypy-pandas.tests.resample.test_time_grouper] +ignore_errors=True + +[mypy-pandas.tests.reshape.merge.test_join] +ignore_errors=True + +[mypy-pandas.tests.reshape.merge.test_multi] +ignore_errors=True + +[mypy-pandas.tests.reshape.test_concat] +ignore_errors=True + +[mypy-pandas.tests.series.apply.test_series_apply] +ignore_errors=True + +[mypy-pandas.tests.series.indexing.test_get] +ignore_errors=True + +[mypy-pandas.tests.series.test_datetime_values] +ignore_errors=True + +[mypy-pandas.tests.series.test_duplicates] +ignore_errors=True + +[mypy-pandas.tests.test_algos] +ignore_errors=True + +[mypy-pandas.tests.test_expressions] +ignore_errors=True + +[mypy-pandas.tests.test_multilevel] +ignore_errors=True + +[mypy-pandas.tests.test_strings] +ignore_errors=True + +[mypy-pandas.tests.window.conftest] +ignore_errors=True + +[mypy-pandas.tests.window.test_dtypes] +ignore_errors=True + From 7dc3f7b22799c41e9007972c39cb0ffd95fb5983 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 3 Oct 2020 16:03:04 +0100 Subject: [PATCH 15/86] wip --- pandas/core/aggregation.py | 4 +++- pandas/core/arrays/categorical.py | 12 +++++++--- pandas/core/arrays/floating.py | 19 ++++++++++++---- pandas/core/arrays/integer.py | 6 ++++- pandas/core/indexes/datetimes.py | 6 ++++- pandas/core/indexing.py | 4 +++- pandas/core/internals/concat.py | 7 +++++- pandas/core/internals/construction.py | 21 +++++++++++++----- pandas/core/window/ewm.py | 32 +++++++++++++++++++++++---- pandas/core/window/rolling.py | 15 ++++++++++--- pandas/io/formats/format.py | 26 +++++++++++++++++++--- 11 files changed, 125 insertions(+), 27 deletions(-) diff --git a/pandas/core/aggregation.py b/pandas/core/aggregation.py index f2eb282d1e498..c5b730ae376fc 100644 --- a/pandas/core/aggregation.py +++ b/pandas/core/aggregation.py @@ -172,7 +172,9 @@ def normalize_keyword_aggregation(kwargs: dict) -> Tuple[dict, List[str], List[i # get the new index of columns by comparison col_idx_order = Index(uniquified_aggspec).get_indexer(uniquified_order) - return aggspec, columns, col_idx_order + # error: Incompatible return value type (got "Tuple[defaultdict[Any, Any], + # Any, ndarray]", expected "Tuple[Dict[Any, Any], List[str], List[int]]") + return aggspec, columns, col_idx_order # type: ignore[return-value] def _make_unique_kwarg_list( diff --git a/pandas/core/arrays/categorical.py b/pandas/core/arrays/categorical.py index 773708d15c821..46cd09fa039d8 100644 --- a/pandas/core/arrays/categorical.py +++ b/pandas/core/arrays/categorical.py @@ -407,13 +407,19 @@ def astype(self, dtype: Dtype, copy: bool = True) -> ArrayLike: dtype = self.dtype.update_dtype(dtype) self = self.copy() if copy else self if dtype == self.dtype: - return self - return self._set_dtype(dtype) + # error: Incompatible return value type (got "Categorical", + # expected "ndarray") + return self # type: ignore[return-value] + # error: Incompatible return value type (got "Categorical", + # expected "ndarray") + return self._set_dtype(dtype) # type: ignore[return-value] if is_extension_array_dtype(dtype): return array(self, dtype=dtype, copy=copy) if is_integer_dtype(dtype) and self.isna().any(): raise ValueError("Cannot convert float NaN to integer") - return np.array(self, dtype=dtype, copy=copy) + # error: Incompatible return value type (got "ndarray", expected + # "ExtensionArray") + return np.array(self, dtype=dtype, copy=copy) # type: ignore[return-value] @cache_readonly def itemsize(self) -> int: diff --git a/pandas/core/arrays/floating.py b/pandas/core/arrays/floating.py index bbb5467d42d53..791d86df24204 100644 --- a/pandas/core/arrays/floating.py +++ b/pandas/core/arrays/floating.py @@ -371,7 +371,10 @@ def astype(self, dtype, copy: bool = True) -> ArrayLike: # if the dtype is exactly the same, we can fastpath if self.dtype == dtype: # return the same object for copy=False - return self.copy() if copy else self + + # error: Incompatible return value type (got "FloatingArray", + # expected "ndarray") + return self.copy() if copy else self # type: ignore[return-value] # if we are astyping to another nullable masked dtype, we can fastpath if isinstance(dtype, BaseMaskedDtype): # TODO deal with NaNs @@ -379,7 +382,11 @@ def astype(self, dtype, copy: bool = True) -> ArrayLike: # mask is copied depending on whether the data was copied, and # not directly depending on the `copy` keyword mask = self._mask if data is self._data else self._mask.copy() - return dtype.construct_array_type()(data, mask, copy=False) + # error: Incompatible return value type (got "BaseMaskedArray", + # expected "ndarray") + return dtype.construct_array_type()( # type: ignore[return-value] + data, mask, copy=False + ) elif isinstance(dtype, StringDtype): return StringArray._from_sequence(self, copy=False) @@ -388,11 +395,15 @@ def astype(self, dtype, copy: bool = True) -> ArrayLike: # In astype, we consider dtype=float to also mean na_value=np.nan kwargs = dict(na_value=np.nan) elif is_datetime64_dtype(dtype): - kwargs = dict(na_value=np.datetime64("NaT")) + # error: Dict entry 0 has incompatible type "str": "datetime64"; + # expected "str": "float" + kwargs = dict(na_value=np.datetime64("NaT")) # type: ignore[dict-item] else: kwargs = {} - data = self.to_numpy(dtype=dtype, **kwargs) + # error: Argument 2 to "to_numpy" of "BaseMaskedArray" has incompatible + # type "**Dict[str, float]"; expected "bool" + data = self.to_numpy(dtype=dtype, **kwargs) # type: ignore[arg-type] return astype_nansafe(data, dtype, copy=False) def _values_for_argsort(self) -> np.ndarray: diff --git a/pandas/core/arrays/integer.py b/pandas/core/arrays/integer.py index af521a8efacc7..ebfad74dcc702 100644 --- a/pandas/core/arrays/integer.py +++ b/pandas/core/arrays/integer.py @@ -471,7 +471,11 @@ def astype(self, dtype, copy: bool = True) -> ArrayLike: else: na_value = lib.no_default - return self.to_numpy(dtype=dtype, na_value=na_value, copy=False) + # error: Incompatible return value type (got "ndarray", expected + # "ExtensionArray") + return self.to_numpy( # type: ignore[return-value] + dtype=dtype, na_value=na_value, copy=False + ) def _values_for_argsort(self) -> np.ndarray: """ diff --git a/pandas/core/indexes/datetimes.py b/pandas/core/indexes/datetimes.py index 06405995f7685..789a44a5f06be 100644 --- a/pandas/core/indexes/datetimes.py +++ b/pandas/core/indexes/datetimes.py @@ -336,7 +336,11 @@ def _is_dates_only(self) -> bool: """ from pandas.io.formats.format import is_dates_only - return self.tz is None and is_dates_only(self._values) + # error: Argument 1 to "is_dates_only" has incompatible type + # "Union[ExtensionArray, ndarray]"; expected "Union[ndarray, + # DatetimeArray, Index, DatetimeIndex]" + return self.tz is None and is_dates_only( # type: ignore[arg-type] + self._values) def __reduce__(self): diff --git a/pandas/core/indexing.py b/pandas/core/indexing.py index 7b4b779e80481..c1c401134728b 100644 --- a/pandas/core/indexing.py +++ b/pandas/core/indexing.py @@ -2384,6 +2384,8 @@ def maybe_numeric_slice(df, slice_, include_bool: bool = False): if slice_ is None: dtypes = [np.number] if include_bool: - dtypes.append(bool) + # error: Argument 1 to "append" of "list" has incompatible type + # "Type[bool]"; expected "Type[number]" + dtypes.append(bool) # type: ignore[arg-type] slice_ = IndexSlice[:, df.select_dtypes(include=dtypes).columns] return slice_ diff --git a/pandas/core/internals/concat.py b/pandas/core/internals/concat.py index f5d0c921e1006..98e50fbe13d56 100644 --- a/pandas/core/internals/concat.py +++ b/pandas/core/internals/concat.py @@ -441,7 +441,12 @@ def _get_empty_dtype_and_na(join_units): return np.dtype("m8[ns]"), np.timedelta64("NaT", "ns") else: # pragma try: - g = np.find_common_type(upcast_classes, []) + # error: Argument 1 to "find_common_type" has incompatible type + # "Dict[str, List[Union[dtype, ExtensionDtype]]]"; expected + # "Sequence[Union[dtype, None, type, _SupportsDtype, str, + # Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], + # List[Any], _DtypeDict, Tuple[Any, Any]]]" + g = np.find_common_type(upcast_classes, []) # type: ignore[arg-type] except TypeError: # At least one is an ExtensionArray return np.dtype(np.object_), np.nan diff --git a/pandas/core/internals/construction.py b/pandas/core/internals/construction.py index 6244f1bf0a2d2..8cb59c59cf84f 100644 --- a/pandas/core/internals/construction.py +++ b/pandas/core/internals/construction.py @@ -261,12 +261,19 @@ def init_dict(data: Dict, index, columns, dtype: Optional[DtypeObj] = None): if missing.any() and not is_integer_dtype(dtype): if dtype is None or ( not is_extension_array_dtype(dtype) - and np.issubdtype(dtype, np.flexible) + # error: Argument 1 to "issubdtype" has incompatible type + # "Union[dtype, ExtensionDtype]"; expected "Union[dtype, None, + # type, _SupportsDtype, str, Tuple[Any, int], Tuple[Any, + # Union[int, Sequence[int]]], List[Any], _DtypeDict, Tuple[Any, + # Any]]" + and np.issubdtype(dtype, np.flexible) # type: ignore[arg-type] ): # GH#1783 nan_dtype = np.dtype(object) else: - nan_dtype = dtype + # error: Incompatible types in assignment (expression has type + # "Union[dtype, ExtensionDtype]", variable has type "dtype") + nan_dtype = dtype # type: ignore[assignment] val = construct_1d_arraylike_from_scalar(np.nan, len(index), nan_dtype) arrays.loc[missing] = [val] * missing.sum() @@ -414,7 +421,9 @@ def extract_index(data) -> Index: else: index = ibase.default_index(lengths[0]) - return ensure_index(index) + # error: Value of type variable "AnyArrayLike" of "ensure_index" cannot be + # "Optional[Index]" + return ensure_index(index) # type: ignore[type-var] def reorder_arrays(arrays, arr_columns, columns): @@ -600,12 +609,14 @@ def _list_of_series_to_arrays( values = np.vstack(aligned_values) if values.dtype == np.object_: - content = list(values.T) + # error: "ExtensionArray" has no attribute "T" + content = list(values.T) # type: ignore[attr-defined] columns = _validate_or_indexify_columns(content, columns) content = _convert_object_array(content, dtype=dtype, coerce_float=coerce_float) return content, columns else: - return values.T, columns + # error: "ExtensionArray" has no attribute "T" + return values.T, columns # type: ignore[attr-defined] def _list_of_dict_to_arrays( diff --git a/pandas/core/window/ewm.py b/pandas/core/window/ewm.py index 25938b57d9720..d2c456a3ad498 100644 --- a/pandas/core/window/ewm.py +++ b/pandas/core/window/ewm.py @@ -219,13 +219,23 @@ def __init__( times = self._selected_obj[times] if not is_datetime64_ns_dtype(times): raise ValueError("times must be datetime64[ns] dtype.") - if len(times) != len(obj): + # error: Argument 1 to "len" has incompatible type "Union[str, + # ndarray, FrameOrSeries, None]"; expected "Sized" + if len(times) != len(obj): # type: ignore[arg-type] raise ValueError("times must be the same length as the object.") if not isinstance(halflife, (str, datetime.timedelta)): raise ValueError( "halflife must be a string or datetime.timedelta object" ) - self.times = np.asarray(times.astype(np.int64)) + + # pandas\core\window\ewm.py:228: error: Item "str" of "Union[str, + # ndarray, FrameOrSeries, None]" has no attribute "astype" + # [union-attr] + + # pandas\core\window\ewm.py:228: error: Item "None" of "Union[str, + # ndarray, FrameOrSeries, None]" has no attribute "astype" + # [union-attr] + self.times = np.asarray(times.astype(np.int64)) # type: ignore[union-attr] self.halflife = Timedelta(halflife).value # Halflife is no longer applicable when calculating COM # But allow COM to still be calculated if the user passes other decay args @@ -241,7 +251,12 @@ def __init__( ) self.times = None self.halflife = None - self.com = get_center_of_mass(com, span, halflife, alpha) + # error: Argument 3 to "get_center_of_mass" has incompatible type + # "Union[float, Any, None, timedelta64, int64]"; expected + # "Optional[float]" + self.com = get_center_of_mass( # type: ignore[arg-type] + com, span, halflife, alpha + ) @property def _constructor(self): @@ -415,8 +430,17 @@ def _get_cov(X, Y): ) return X._wrap_result(cov) + # pandas\core\window\ewm.py:419: error: Item "ndarray" of + # "Union[ndarray, FrameOrSeries, None]" has no attribute + # "_selected_obj" [union-attr] + + # pandas\core\window\ewm.py:419: error: Item "None" of "Union[ndarray, + # FrameOrSeries, None]" has no attribute "_selected_obj" [union-attr] return flex_binary_moment( - self._selected_obj, other._selected_obj, _get_cov, pairwise=bool(pairwise) + self._selected_obj, # type: ignore[union-attr] + other._selected_obj, + _get_cov, + pairwise=bool(pairwise), ) @Substitution(name="ewm", func_name="corr") diff --git a/pandas/core/window/rolling.py b/pandas/core/window/rolling.py index cd99ae8a40fc9..fa9b77e77e191 100644 --- a/pandas/core/window/rolling.py +++ b/pandas/core/window/rolling.py @@ -376,7 +376,8 @@ def _prep_values(self, values: Optional[np.ndarray] = None) -> np.ndarray: elif needs_i8_conversion(values.dtype): # type: ignore[union-attr] raise NotImplementedError( f"ops for {self._window_type} for this " - f"dtype {values.dtype} are not implemented" + # error: Item "None" of "Optional[ndarray]" has no attribute "dtype" + f"dtype {values.dtype} are not implemented" # type: ignore[union-attr] ) else: try: @@ -389,7 +390,9 @@ def _prep_values(self, values: Optional[np.ndarray] = None) -> np.ndarray: if inf.any(): values = np.where(inf, np.nan, values) - return values + # error: Incompatible return value type (got "Optional[ndarray]", + # expected "ndarray") + return values # type: ignore[return-value] def _wrap_result(self, result: np.ndarray) -> "Series": """ @@ -1128,7 +1131,13 @@ def _pop_args(win_type, arg_names, kwargs): return _validate_win_type(self.win_type, kwargs) - def _get_window( + # pandas\core\window\rolling.py:1131: error: Missing return statement + # [return] + + # pandas\core\window\rolling.py:1131: error: Return type "ndarray" of + # "_get_window" incompatible with return type "int" in supertype + # "BaseWindow" [override] + def _get_window( # type: ignore[return,override] self, other=None, win_type: Optional[Union[str, Tuple]] = None ) -> np.ndarray: """ diff --git a/pandas/io/formats/format.py b/pandas/io/formats/format.py index b9d41f142c2b5..727ddcd5b3a68 100644 --- a/pandas/io/formats/format.py +++ b/pandas/io/formats/format.py @@ -1587,7 +1587,9 @@ def _format_strings(self) -> List[str]: if is_categorical_dtype(values.dtype): # Categorical is special for now, so that we can preserve tzinfo - array = values._internal_get_values() + + # error: "ExtensionArray" has no attribute "_internal_get_values" + array = values._internal_get_values() # type: ignore[attr-defined] else: array = np.asarray(values) @@ -1646,8 +1648,26 @@ def format_percentiles( with np.errstate(invalid="ignore"): if ( not is_numeric_dtype(percentiles) - or not np.all(percentiles >= 0) - or not np.all(percentiles <= 1) + + # pandas\io\formats\format.py:1649: error: Unsupported operand + # types for >= ("List[Union[int, float]]" and "int") [operator] + + # pandas\io\formats\format.py:1649: error: Unsupported operand + # types for >= ("List[float]" and "int") [operator] + + # pandas\io\formats\format.py:1649: error: Unsupported operand + # types for >= ("List[Union[str, float]]" and "int") [operator] + or not np.all(percentiles >= 0) # type: ignore[operator] + + # pandas\io\formats\format.py:1650: error: Unsupported operand + # types for <= ("List[Union[int, float]]" and "int") [operator] + + # pandas\io\formats\format.py:1650: error: Unsupported operand + # types for <= ("List[float]" and "int") [operator] + + # pandas\io\formats\format.py:1650: error: Unsupported operand + # types for <= ("List[Union[str, float]]" and "int") [operator] + or not np.all(percentiles <= 1) # type: ignore[operator] ): raise ValueError("percentiles should all be in the interval [0,1]") From 003bc917a8001b5c03dbe2ddc3316a891d8d16c6 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 3 Oct 2020 16:04:58 +0100 Subject: [PATCH 16/86] black (urgh.. deletes line breaks between comments) --- pandas/core/indexes/datetimes.py | 3 +-- pandas/io/formats/format.py | 6 ------ 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/pandas/core/indexes/datetimes.py b/pandas/core/indexes/datetimes.py index 789a44a5f06be..6934c30e9c3b8 100644 --- a/pandas/core/indexes/datetimes.py +++ b/pandas/core/indexes/datetimes.py @@ -339,8 +339,7 @@ def _is_dates_only(self) -> bool: # error: Argument 1 to "is_dates_only" has incompatible type # "Union[ExtensionArray, ndarray]"; expected "Union[ndarray, # DatetimeArray, Index, DatetimeIndex]" - return self.tz is None and is_dates_only( # type: ignore[arg-type] - self._values) + return self.tz is None and is_dates_only(self._values) # type: ignore[arg-type] def __reduce__(self): diff --git a/pandas/io/formats/format.py b/pandas/io/formats/format.py index 727ddcd5b3a68..1cf89f56692e5 100644 --- a/pandas/io/formats/format.py +++ b/pandas/io/formats/format.py @@ -1648,23 +1648,17 @@ def format_percentiles( with np.errstate(invalid="ignore"): if ( not is_numeric_dtype(percentiles) - # pandas\io\formats\format.py:1649: error: Unsupported operand # types for >= ("List[Union[int, float]]" and "int") [operator] - # pandas\io\formats\format.py:1649: error: Unsupported operand # types for >= ("List[float]" and "int") [operator] - # pandas\io\formats\format.py:1649: error: Unsupported operand # types for >= ("List[Union[str, float]]" and "int") [operator] or not np.all(percentiles >= 0) # type: ignore[operator] - # pandas\io\formats\format.py:1650: error: Unsupported operand # types for <= ("List[Union[int, float]]" and "int") [operator] - # pandas\io\formats\format.py:1650: error: Unsupported operand # types for <= ("List[float]" and "int") [operator] - # pandas\io\formats\format.py:1650: error: Unsupported operand # types for <= ("List[Union[str, float]]" and "int") [operator] or not np.all(percentiles <= 1) # type: ignore[operator] From e449a10fb7c6be219a735ce8cb8769e0f9d9b0ef Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 3 Oct 2020 16:41:17 +0100 Subject: [PATCH 17/86] wip --- pandas/core/arrays/integer.py | 13 +++++++++++-- pandas/core/arrays/interval.py | 9 +++++++-- pandas/core/arrays/masked.py | 14 +++++++++++--- pandas/core/arrays/timedeltas.py | 10 ++++++++-- pandas/core/frame.py | 27 +++++++++++++++++++++++---- pandas/core/generic.py | 3 ++- pandas/core/internals/blocks.py | 4 ++-- pandas/core/internals/concat.py | 4 +++- pandas/core/reshape/melt.py | 4 +++- pandas/core/window/ewm.py | 8 ++++---- pandas/core/window/rolling.py | 4 ++-- pandas/io/formats/format.py | 16 +++++++++++++--- 12 files changed, 89 insertions(+), 27 deletions(-) diff --git a/pandas/core/arrays/integer.py b/pandas/core/arrays/integer.py index ebfad74dcc702..a60a32fcd73f4 100644 --- a/pandas/core/arrays/integer.py +++ b/pandas/core/arrays/integer.py @@ -84,7 +84,14 @@ def _get_common_dtype(self, dtypes: List[DtypeObj]) -> Optional[DtypeObj]: ): return None np_dtype = np.find_common_type( - [t.numpy_dtype if isinstance(t, BaseMaskedDtype) else t for t in dtypes], [] + # error: List comprehension has incompatible type List[Union[Any, + # dtype, ExtensionDtype]]; expected List[Union[dtype, None, type, + # _SupportsDtype, str, Tuple[Any, Union[int, Sequence[int]]], + # List[Any], _DtypeDict, Tuple[Any, Any]]] + [ # type: ignore[misc] + t.numpy_dtype if isinstance(t, BaseMaskedDtype) else t for t in dtypes + ], + [], ) if np.issubdtype(np_dtype, np.integer): return INT_STR_TO_DTYPE[str(np_dtype)] @@ -356,7 +363,9 @@ def __pos__(self): return self def __abs__(self): - return type(self)(np.abs(self._data), self._mask) + # error: Argument 1 to "IntegerArray" has incompatible type + # "Union[ndarray, generic]"; expected "ndarray" + return type(self)(np.abs(self._data), self._mask) # type: ignore[arg-type] @classmethod def _from_sequence(cls, scalars, dtype=None, copy: bool = False) -> "IntegerArray": diff --git a/pandas/core/arrays/interval.py b/pandas/core/arrays/interval.py index 413430942575d..a5a3448ff89b8 100644 --- a/pandas/core/arrays/interval.py +++ b/pandas/core/arrays/interval.py @@ -554,7 +554,10 @@ def __getitem__(self, value): if is_scalar(left) and isna(left): return self._fill_value return Interval(left, right, self.closed) - if np.ndim(left) > 1: + # error: Argument 1 to "ndim" has incompatible type "Union[ndarray, + # ExtensionArray]"; expected "Union[bool, int, float, complex, + # _SupportsArray, Sequence[Any]]" + if np.ndim(left) > 1: # type: ignore[arg-type] # GH#30588 multi-dimensional indexer disallowed raise ValueError("multi-dimensional indexing not allowed") return self._shallow_copy(left, right) @@ -742,7 +745,9 @@ def copy(self): # TODO: Could skip verify_integrity here. return type(self).from_arrays(left, right, closed=closed) - def isna(self) -> np.ndarray: + # error: Return type "ndarray" of "isna" incompatible with return type + # "ArrayLike" in supertype "ExtensionArray" + def isna(self) -> np.ndarray: # type: ignore[override] return isna(self._left) def shift(self, periods: int = 1, fill_value: object = None) -> "IntervalArray": diff --git a/pandas/core/arrays/masked.py b/pandas/core/arrays/masked.py index 97ade0dc70843..175f396707bc8 100644 --- a/pandas/core/arrays/masked.py +++ b/pandas/core/arrays/masked.py @@ -247,7 +247,9 @@ def _hasna(self) -> bool: # source code using it.. return self._mask.any() - def isna(self) -> np.ndarray: + # error: Return type "ndarray" of "isna" incompatible with return type + # "ArrayLike" in supertype "ExtensionArray" + def isna(self) -> np.ndarray: # type: ignore[override] return self._mask @property @@ -305,8 +307,14 @@ def factorize(self, na_sentinel: int = -1) -> Tuple[np.ndarray, ExtensionArray]: # the hashtables don't handle all different types of bits uniques = uniques.astype(self.dtype.numpy_dtype, copy=False) - uniques = type(self)(uniques, np.zeros(len(uniques), dtype=bool)) - return codes, uniques + # error: Incompatible types in assignment (expression has type + # "BaseMaskedArray", variable has type "ndarray") + uniques = type(self)( # type: ignore[assignment] + uniques, np.zeros(len(uniques), dtype=bool) + ) + # error: Incompatible return value type (got "Tuple[ndarray, ndarray]", + # expected "Tuple[ndarray, ExtensionArray]") + return codes, uniques # type: ignore[return-value] def value_counts(self, dropna: bool = True) -> "Series": """ diff --git a/pandas/core/arrays/timedeltas.py b/pandas/core/arrays/timedeltas.py index c97c7da375fd4..6cdb3253bdbdf 100644 --- a/pandas/core/arrays/timedeltas.py +++ b/pandas/core/arrays/timedeltas.py @@ -300,7 +300,9 @@ def _generate_range( @classmethod def _rebox_native(cls, value: int) -> np.timedelta64: - return np.int64(value).view("m8[ns]") + # error: Incompatible return value type (got "int64", expected + # "timedelta64") + return np.int64(value).view("m8[ns]") # type: ignore[return-value] def _unbox_scalar(self, value, setitem: bool = False): if not isinstance(value, self._scalar_type) and value is not NaT: @@ -718,7 +720,11 @@ def __rfloordiv__(self, other): return result elif is_object_dtype(other.dtype): - result = [other[n] // self[n] for n in range(len(self))] + # error: Incompatible types in assignment (expression has type + # "List[Any]", variable has type "ndarray") + result = [ # type: ignore[assignment] + other[n] // self[n] for n in range(len(self)) + ] result = np.array(result) return result diff --git a/pandas/core/frame.py b/pandas/core/frame.py index 1f9987d9d3f5b..74827948fd8a3 100644 --- a/pandas/core/frame.py +++ b/pandas/core/frame.py @@ -457,13 +457,22 @@ def __init__( ) elif isinstance(data, dict): - mgr = init_dict(data, index, columns, dtype=dtype) + # error: Argument "dtype" to "init_dict" has incompatible type + # "Union[ExtensionDtype, str, dtype, Type[object], None]"; expected + # "Union[dtype, ExtensionDtype, None]" + mgr = init_dict(data, index, columns, dtype=dtype) # type: ignore[arg-type] elif isinstance(data, ma.MaskedArray): import numpy.ma.mrecords as mrecords # masked recarray if isinstance(data, mrecords.MaskedRecords): - mgr = masked_rec_array_to_mgr(data, index, columns, dtype, copy) + # error: Argument 4 to "masked_rec_array_to_mgr" has + # incompatible type "Union[ExtensionDtype, str, dtype, + # Type[object], None]"; expected "Union[dtype, ExtensionDtype, + # None]" + mgr = masked_rec_array_to_mgr( # type: ignore[arg-type] + data, index, columns, dtype, copy + ) # a masked array else: @@ -474,7 +483,12 @@ def __init__( data[mask] = fill_value else: data = data.copy() - mgr = init_ndarray(data, index, columns, dtype=dtype, copy=copy) + # error: Argument "dtype" to "init_ndarray" has incompatible + # type "Union[ExtensionDtype, str, dtype, Type[object], None]"; + # expected "Union[dtype, ExtensionDtype, None]" + mgr = init_ndarray( # type: ignore[arg-type] + data, index, columns, dtype=dtype, copy=copy + ) elif isinstance(data, (np.ndarray, Series, Index)): if data.dtype.names: @@ -482,7 +496,12 @@ def __init__( data = {k: data[k] for k in data_columns} if columns is None: columns = data_columns - mgr = init_dict(data, index, columns, dtype=dtype) + # error: Argument "dtype" to "init_dict" has incompatible type + # "Union[ExtensionDtype, str, dtype, Type[object], None]"; + # expected "Union[dtype, ExtensionDtype, None]" + mgr = init_dict( # type: ignore[arg-type] + data, index, columns, dtype=dtype + ) elif getattr(data, "name", None) is not None: mgr = init_dict({data.name: data}, index, columns, dtype=dtype) else: diff --git a/pandas/core/generic.py b/pandas/core/generic.py index 04e1fc91c5fd4..0cc0d964ce8a5 100644 --- a/pandas/core/generic.py +++ b/pandas/core/generic.py @@ -616,7 +616,8 @@ def size(self) -> int: >>> df.size 4 """ - return np.prod(self.shape) + # error: Incompatible return value type (got "number", expected "int") + return np.prod(self.shape) # type: ignore[return-value] @property def _selected_obj(self: FrameOrSeries) -> FrameOrSeries: diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index 5f1ddd907973f..76fbc5831533b 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -1289,8 +1289,8 @@ def func(yvalues: np.ndarray) -> np.ndarray: # error: Argument "xvalues" to "interpolate_1d" has incompatible # type "Index"; expected "ndarray" - return missing.interpolate_1d( # type: ignore[arg-type] - xvalues=index, + return missing.interpolate_1d( + xvalues=index, # type: ignore[arg-type] yvalues=yvalues, method=method, limit=limit, diff --git a/pandas/core/internals/concat.py b/pandas/core/internals/concat.py index 98e50fbe13d56..8c443987941a0 100644 --- a/pandas/core/internals/concat.py +++ b/pandas/core/internals/concat.py @@ -418,7 +418,9 @@ def _get_empty_dtype_and_na(join_units): if "extension" in upcast_classes: if len(upcast_classes) == 1: cls = upcast_classes["extension"][0] - return cls, cls.na_value + # error: Item "dtype" of "Union[dtype, ExtensionDtype]" has no + # attribute "na_value" + return cls, cls.na_value # type: ignore[union-attr] else: return np.dtype("object"), np.nan elif "object" in upcast_classes: diff --git a/pandas/core/reshape/melt.py b/pandas/core/reshape/melt.py index 83a5f43c2a340..f40322db443b9 100644 --- a/pandas/core/reshape/melt.py +++ b/pandas/core/reshape/melt.py @@ -125,7 +125,9 @@ def melt( mcolumns = id_vars + var_name + [value_name] - mdata[value_name] = frame._values.ravel("F") + # error: Incompatible types in assignment (expression has type "ndarray", + # target has type "Series") + mdata[value_name] = frame._values.ravel("F") # type: ignore[assignment] for i, col in enumerate(var_name): # asanyarray will keep the columns as an Index mdata[col] = np.asanyarray(frame.columns._get_level_values(i)).repeat(N) diff --git a/pandas/core/window/ewm.py b/pandas/core/window/ewm.py index d2c456a3ad498..622a338bda2e5 100644 --- a/pandas/core/window/ewm.py +++ b/pandas/core/window/ewm.py @@ -254,8 +254,8 @@ def __init__( # error: Argument 3 to "get_center_of_mass" has incompatible type # "Union[float, Any, None, timedelta64, int64]"; expected # "Optional[float]" - self.com = get_center_of_mass( # type: ignore[arg-type] - com, span, halflife, alpha + self.com = get_center_of_mass( + com, span, halflife, alpha # type: ignore[arg-type] ) @property @@ -437,8 +437,8 @@ def _get_cov(X, Y): # pandas\core\window\ewm.py:419: error: Item "None" of "Union[ndarray, # FrameOrSeries, None]" has no attribute "_selected_obj" [union-attr] return flex_binary_moment( - self._selected_obj, # type: ignore[union-attr] - other._selected_obj, + self._selected_obj, + other._selected_obj, # type: ignore[union-attr] _get_cov, pairwise=bool(pairwise), ) diff --git a/pandas/core/window/rolling.py b/pandas/core/window/rolling.py index fa9b77e77e191..ebeefc2086094 100644 --- a/pandas/core/window/rolling.py +++ b/pandas/core/window/rolling.py @@ -375,9 +375,9 @@ def _prep_values(self, values: Optional[np.ndarray] = None) -> np.ndarray: # error: Item "None" of "Optional[ndarray]" has no attribute "dtype" elif needs_i8_conversion(values.dtype): # type: ignore[union-attr] raise NotImplementedError( - f"ops for {self._window_type} for this " # error: Item "None" of "Optional[ndarray]" has no attribute "dtype" - f"dtype {values.dtype} are not implemented" # type: ignore[union-attr] + f"ops for {self._window_type} for this " # type: ignore[union-attr] + f"dtype {values.dtype} are not implemented" ) else: try: diff --git a/pandas/io/formats/format.py b/pandas/io/formats/format.py index 1cf89f56692e5..c33440cd71190 100644 --- a/pandas/io/formats/format.py +++ b/pandas/io/formats/format.py @@ -1680,10 +1680,20 @@ def format_percentiles( prec = -np.floor( np.log10(np.min(np.ediff1d(unique_pcts, to_begin=to_begin, to_end=to_end))) ).astype(int) - prec = max(1, prec) + # error: Incompatible types in assignment (expression has type "Union[int, + # ndarray, generic]", variable has type "Union[ndarray, generic]") + prec = max(1, prec) # type: ignore[assignment] out = np.empty_like(percentiles, dtype=object) - out[int_idx] = percentiles[int_idx].astype(int).astype(str) - out[~int_idx] = percentiles[~int_idx].round(prec).astype(str) + # error: No overload variant of "__getitem__" of "list" matches argument + # type "Union[bool_, ndarray]" + out[int_idx] = ( + percentiles[int_idx].astype(int).astype(str) # type: ignore[call-overload] + ) + # error: No overload variant of "__getitem__" of "list" matches argument + # type "Union[bool_, ndarray]" + out[~int_idx] = ( + percentiles[~int_idx].round(prec).astype(str) # type: ignore[call-overload] + ) return [i + "%" for i in out] From 0539f9fb4f0ca0546409224c1843a96d83773c23 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 3 Oct 2020 19:50:41 +0100 Subject: [PATCH 18/86] wip --- pandas/core/arrays/boolean.py | 4 +- pandas/core/arrays/categorical.py | 12 +++- pandas/core/arrays/integer.py | 16 +++++- pandas/core/arrays/sparse/array.py | 4 +- pandas/core/arrays/timedeltas.py | 4 +- pandas/core/dtypes/concat.py | 14 ++++- pandas/core/frame.py | 92 +++++++++++++++++++++++------- pandas/core/generic.py | 8 ++- pandas/core/groupby/generic.py | 13 ++++- pandas/core/groupby/groupby.py | 32 ++++++++--- pandas/core/indexes/base.py | 4 +- pandas/core/strings/accessor.py | 13 ++++- pandas/core/window/rolling.py | 4 +- pandas/io/formats/format.py | 20 ++++++- 14 files changed, 192 insertions(+), 48 deletions(-) diff --git a/pandas/core/arrays/boolean.py b/pandas/core/arrays/boolean.py index 72dc4ae07def9..5a4ab995cabf9 100644 --- a/pandas/core/arrays/boolean.py +++ b/pandas/core/arrays/boolean.py @@ -613,7 +613,9 @@ def logical_method(self, other): elif op.__name__ in {"xor", "rxor"}: result, mask = ops.kleene_xor(self._data, other, self._mask, mask) - return BooleanArray(result, mask) + # error: Argument 2 to "BooleanArray" has incompatible type + # "Optional[Any]"; expected "ndarray" + return BooleanArray(result, mask) # type: ignore[arg-type] name = f"__{op.__name__}__" return set_function_name(logical_method, name, cls) diff --git a/pandas/core/arrays/categorical.py b/pandas/core/arrays/categorical.py index 46cd09fa039d8..17d94c72eafce 100644 --- a/pandas/core/arrays/categorical.py +++ b/pandas/core/arrays/categorical.py @@ -417,9 +417,19 @@ def astype(self, dtype: Dtype, copy: bool = True) -> ArrayLike: return array(self, dtype=dtype, copy=copy) if is_integer_dtype(dtype) and self.isna().any(): raise ValueError("Cannot convert float NaN to integer") + # error: Incompatible return value type (got "ndarray", expected # "ExtensionArray") - return np.array(self, dtype=dtype, copy=copy) # type: ignore[return-value] + return np.array( # type: ignore[return-value] + # error: Argument "dtype" to "array" has incompatible type + # "Union[ExtensionDtype, str, dtype, Type[str], Type[float], Type[int], + # Type[complex], Type[bool], Type[object]]"; expected "Union[dtype, + # None, type, _SupportsDtype, str, Tuple[Any, int], Tuple[Any, + # Union[int, Sequence[int]]], List[Any], _DtypeDict, Tuple[Any, Any]]" + self, + dtype=dtype, # type: ignore[arg-type] + copy=copy, + ) @cache_readonly def itemsize(self) -> int: diff --git a/pandas/core/arrays/integer.py b/pandas/core/arrays/integer.py index a60a32fcd73f4..892f0854f64ef 100644 --- a/pandas/core/arrays/integer.py +++ b/pandas/core/arrays/integer.py @@ -460,14 +460,22 @@ def astype(self, dtype, copy: bool = True) -> ArrayLike: # if the dtype is exactly the same, we can fastpath if self.dtype == dtype: # return the same object for copy=False - return self.copy() if copy else self + + # error: Incompatible return value type (got "IntegerArray", + # expected "ndarray") + return self.copy() if copy else self # type: ignore[return-value] # if we are astyping to another nullable masked dtype, we can fastpath if isinstance(dtype, BaseMaskedDtype): data = self._data.astype(dtype.numpy_dtype, copy=copy) # mask is copied depending on whether the data was copied, and # not directly depending on the `copy` keyword mask = self._mask if data is self._data else self._mask.copy() - return dtype.construct_array_type()(data, mask, copy=False) + + # error: Incompatible return value type (got "BaseMaskedArray", + # expected "ndarray") + return dtype.construct_array_type()( # type: ignore[return-value] + data, mask, copy=False + ) elif isinstance(dtype, StringDtype): return dtype.construct_array_type()._from_sequence(self, copy=False) @@ -476,7 +484,9 @@ def astype(self, dtype, copy: bool = True) -> ArrayLike: # In astype, we consider dtype=float to also mean na_value=np.nan na_value = np.nan elif is_datetime64_dtype(dtype): - na_value = np.datetime64("NaT") + # error: Incompatible types in assignment (expression has type + # "datetime64", variable has type "float") + na_value = np.datetime64("NaT") # type: ignore[assignment] else: na_value = lib.no_default diff --git a/pandas/core/arrays/sparse/array.py b/pandas/core/arrays/sparse/array.py index d4ec641794fc2..149cdc82413f4 100644 --- a/pandas/core/arrays/sparse/array.py +++ b/pandas/core/arrays/sparse/array.py @@ -834,7 +834,9 @@ def take(self, indices, allow_fill=False, fill_value=None) -> "SparseArray": result = self._take_with_fill(indices, fill_value=fill_value) kwargs = {} else: - result = self._take_without_fill(indices) + # error: Incompatible types in assignment (expression has type + # "Union[ndarray, SparseArray]", variable has type "ndarray") + result = self._take_without_fill(indices) # type: ignore[assignment] kwargs = {"dtype": self.dtype} return type(self)(result, fill_value=self.fill_value, kind=self.kind, **kwargs) diff --git a/pandas/core/arrays/timedeltas.py b/pandas/core/arrays/timedeltas.py index 6cdb3253bdbdf..19107c4b1b8f1 100644 --- a/pandas/core/arrays/timedeltas.py +++ b/pandas/core/arrays/timedeltas.py @@ -122,7 +122,9 @@ def _box_func(self, x) -> Union[Timedelta, NaTType]: return Timedelta(x, unit="ns") @property - def dtype(self) -> np.dtype: + # error: Return type "dtype" of "dtype" incompatible with return type + # "ExtensionDtype" in supertype "ExtensionArray" + def dtype(self) -> np.dtype: # type: ignore[override] """ The dtype for the TimedeltaArray. diff --git a/pandas/core/dtypes/concat.py b/pandas/core/dtypes/concat.py index 60fd959701821..71a0ecddb4754 100644 --- a/pandas/core/dtypes/concat.py +++ b/pandas/core/dtypes/concat.py @@ -78,8 +78,12 @@ def _cast_to_common_type(arr: ArrayLike, dtype: DtypeObj) -> ArrayLike: # problem case: SparseArray.astype(dtype) doesn't follow the specified # dtype exactly, but converts this to Sparse[dtype] -> first manually # convert to dense array - arr = cast(SparseArray, arr) - return arr.to_dense().astype(dtype, copy=False) + + # error: Incompatible types in assignment (expression has type + # "SparseArray", variable has type "ndarray") + arr = cast(SparseArray, arr) # type: ignore[assignment] + # error: "ndarray" has no attribute "to_dense" + return arr.to_dense().astype(dtype, copy=False) # type: ignore[attr-defined] if ( isinstance(arr, np.ndarray) @@ -95,7 +99,11 @@ def _cast_to_common_type(arr: ArrayLike, dtype: DtypeObj) -> ArrayLike: if isinstance(arr, np.ndarray): # numpy's astype cannot handle ExtensionDtypes return array(arr, dtype=dtype, copy=False) - return arr.astype(dtype, copy=False) + # error: Argument 1 to "astype" of "_ArrayOrScalarCommon" has incompatible + # type "Union[dtype, ExtensionDtype]"; expected "Union[dtype, None, type, + # _SupportsDtype, str, Tuple[Any, int], Tuple[Any, Union[int, + # Sequence[int]]], List[Any], _DtypeDict, Tuple[Any, Any]]" + return arr.astype(dtype, copy=False) # type: ignore[arg-type] def concat_compat(to_concat, axis: int = 0): diff --git a/pandas/core/frame.py b/pandas/core/frame.py index 74827948fd8a3..a8a0d86094366 100644 --- a/pandas/core/frame.py +++ b/pandas/core/frame.py @@ -470,8 +470,8 @@ def __init__( # incompatible type "Union[ExtensionDtype, str, dtype, # Type[object], None]"; expected "Union[dtype, ExtensionDtype, # None]" - mgr = masked_rec_array_to_mgr( # type: ignore[arg-type] - data, index, columns, dtype, copy + mgr = masked_rec_array_to_mgr( + data, index, columns, dtype, copy # type: ignore[arg-type] ) # a masked array @@ -486,8 +486,12 @@ def __init__( # error: Argument "dtype" to "init_ndarray" has incompatible # type "Union[ExtensionDtype, str, dtype, Type[object], None]"; # expected "Union[dtype, ExtensionDtype, None]" - mgr = init_ndarray( # type: ignore[arg-type] - data, index, columns, dtype=dtype, copy=copy + mgr = init_ndarray( + data, + index, + columns, + dtype=dtype, # type: ignore[arg-type] + copy=copy, ) elif isinstance(data, (np.ndarray, Series, Index)): @@ -499,8 +503,8 @@ def __init__( # error: Argument "dtype" to "init_dict" has incompatible type # "Union[ExtensionDtype, str, dtype, Type[object], None]"; # expected "Union[dtype, ExtensionDtype, None]" - mgr = init_dict( # type: ignore[arg-type] - data, index, columns, dtype=dtype + mgr = init_dict( + data, index, columns, dtype=dtype # type: ignore[arg-type] ) elif getattr(data, "name", None) is not None: mgr = init_dict({data.name: data}, index, columns, dtype=dtype) @@ -531,9 +535,20 @@ def __init__( mgr = arrays_to_mgr(arrays, columns, index, columns, dtype=dtype) else: - mgr = init_ndarray(data, index, columns, dtype=dtype, copy=copy) + # error: Argument "dtype" to "init_ndarray" has + # incompatible type "Union[ExtensionDtype, str, dtype, + # Type[object], None]"; expected "Union[dtype, + # ExtensionDtype, None]" + mgr = init_ndarray( # type: ignore[arg-type] + data, index, columns, dtype=dtype, copy=copy + ) else: - mgr = init_dict({}, index, columns, dtype=dtype) + # error: Argument "dtype" to "init_dict" has incompatible type + # "Union[ExtensionDtype, str, dtype, Type[object], None]"; + # expected "Union[dtype, ExtensionDtype, None]" + mgr = init_dict( # type: ignore[arg-type] + {}, index, columns, dtype=dtype + ) # For data is scalar else: if index is None or columns is None: @@ -546,14 +561,26 @@ def __init__( if is_extension_array_dtype(dtype): values = [ - construct_1d_arraylike_from_scalar(data, len(index), dtype) + # error: Argument 3 to "construct_1d_arraylike_from_scalar" + # has incompatible type "Union[ExtensionDtype, str, dtype, + # Type[object]]"; expected "Union[dtype, ExtensionDtype]" + construct_1d_arraylike_from_scalar( # type: ignore[arg-type] + data, len(index), dtype + ) for _ in range(len(columns)) ] mgr = arrays_to_mgr(values, columns, index, columns, dtype=None) else: # Attempt to coerce to a numpy array try: - arr = np.array(data, dtype=dtype, copy=copy) + # error: Argument "dtype" to "array" has incompatible type + # "Union[ExtensionDtype, str, dtype, Type[object]]"; + # expected "Union[dtype, None, type, _SupportsDtype, str, + # Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], + # List[Any], _DtypeDict, Tuple[Any, Any]]" + arr = np.array( # type: ignore[arg-type] + data, dtype=dtype, copy=copy + ) except (ValueError, TypeError) as err: exc = TypeError( "DataFrame constructor called with " @@ -564,12 +591,19 @@ def __init__( if arr.ndim != 0: raise ValueError("DataFrame constructor not properly called!") - values = cast_scalar_to_array( + # error: Incompatible types in assignment (expression has type + # "ndarray", variable has type "List[ExtensionArray]") + values = cast_scalar_to_array( # type: ignore[assignment] (len(index), len(columns)), data, dtype=dtype ) mgr = init_ndarray( - values, index, columns, dtype=values.dtype, copy=False + # error: "List[ExtensionArray]" has no attribute "dtype" + values, + index, + columns, + dtype=values.dtype, # type: ignore[attr-defined] + copy=False, ) NDFrame.__init__(self, mgr) @@ -2009,13 +2043,18 @@ def to_records( if dtype_mapping is None: formats.append(v.dtype) elif isinstance(dtype_mapping, (type, np.dtype, str)): - formats.append(dtype_mapping) + # error: Argument 1 to "append" of "list" has incompatible type + # "Union[type, dtype, str]"; expected "dtype" + formats.append(dtype_mapping) # type: ignore[arg-type] else: element = "row" if i < index_len else "column" msg = f"Invalid dtype {dtype_mapping} specified for {element} {name}" raise ValueError(msg) - return np.rec.fromarrays(arrays, dtype={"names": names, "formats": formats}) + # error: Module has no attribute "fromarrays" + return np.rec.fromarrays( # type: ignore[attr-defined] + arrays, dtype={"names": names, "formats": formats} + ) @classmethod def _from_arrays( @@ -2839,7 +2878,9 @@ def transpose(self, *args, copy: bool = False) -> DataFrame: ) else: - new_values = self.values.T + # error: Incompatible types in assignment (expression has type + # "ndarray", variable has type "List[Any]") + new_values = self.values.T # type: ignore[assignment] if copy: new_values = new_values.copy() result = self._constructor( @@ -2898,7 +2939,9 @@ def _get_column_array(self, i: int) -> ArrayLike: Get the values of the i'th column (ndarray or ExtensionArray, as stored in the Block) """ - return self._mgr.iget_values(i) + # error: Incompatible return value type (got "ExtensionArray", expected + # "ndarray") + return self._mgr.iget_values(i) # type: ignore[return-value] def _iter_column_arrays(self) -> Iterator[ArrayLike]: """ @@ -2906,7 +2949,9 @@ def _iter_column_arrays(self) -> Iterator[ArrayLike]: This returns the values as stored in the Block (ndarray or ExtensionArray). """ for i in range(len(self.columns)): - yield self._get_column_array(i) + # error: Incompatible types in "yield" (actual type + # "ExtensionArray", expected type "ndarray") + yield self._get_column_array(i) # type: ignore[misc] def __getitem__(self, key): key = lib.item_from_zerodim(key) @@ -3650,7 +3695,8 @@ def extract_unique_dtypes_from_dtypes_set( ) keep_these &= ~self.dtypes.isin(excluded_dtypes) - return self.iloc[:, keep_these.values] + # error: "ndarray" has no attribute "values" + return self.iloc[:, keep_these.values] # type: ignore[attr-defined] def insert(self, loc, column, value, allow_duplicates=False) -> None: """ @@ -5348,7 +5394,11 @@ def sort_values( # type: ignore[override] # need to rewrap columns in Series to apply key function if key is not None: - keys = [Series(k, name=name) for (k, name) in zip(keys, by)] + # error: List comprehension has incompatible type List[Series]; + # expected List[ndarray] + keys = [ # type: ignore[misc] + Series(k, name=name) for (k, name) in zip(keys, by) + ] indexer = lexsort_indexer( keys, orders=ascending, na_position=na_position, key=key @@ -5362,7 +5412,9 @@ def sort_values( # type: ignore[override] # need to rewrap column in Series to apply key function if key is not None: - k = Series(k, name=by) + # error: Incompatible types in assignment (expression has type + # "Series", variable has type "ndarray") + k = Series(k, name=by) # type[assignment] if isinstance(ascending, (tuple, list)): ascending = ascending[0] diff --git a/pandas/core/generic.py b/pandas/core/generic.py index 0cc0d964ce8a5..d26a8b1663336 100644 --- a/pandas/core/generic.py +++ b/pandas/core/generic.py @@ -1909,7 +1909,9 @@ def __array_wrap__( # ptp also requires the item_from_zerodim return result d = self._construct_axes_dict(self._AXIS_ORDERS, copy=False) - return self._constructor(result, **d).__finalize__( + # error: Argument 1 to "NDFrame" has incompatible type "ndarray"; + # expected "BlockManager" + return self._constructor(result, **d).__finalize__( # type: ignore[arg-type] self, method="__array_wrap__" ) @@ -9856,7 +9858,9 @@ def abs(self: FrameOrSeries) -> FrameOrSeries: 2 6 30 -30 3 7 40 -50 """ - return np.abs(self) + # error: Incompatible return value type (got "Union[ndarray, generic]", + # expected "FrameOrSeries") + return np.abs(self) # type: ignore[return-value] def describe( self: FrameOrSeries, diff --git a/pandas/core/groupby/generic.py b/pandas/core/groupby/generic.py index e7e812737d48e..22da85a5f1557 100644 --- a/pandas/core/groupby/generic.py +++ b/pandas/core/groupby/generic.py @@ -319,7 +319,12 @@ def _aggregate_multiple_funcs(self, arg): # let higher level handle return results - output = self._wrap_aggregated_output(results, index=None) + # Argument 1 to "_wrap_aggregated_output" of "SeriesGroupBy" has + # incompatible type "Dict[OutputKey, Union[DataFrame, Series]]"; + # expected "Mapping[OutputKey, Union[Series, ndarray]]" + output = self._wrap_aggregated_output( # type: ignore[arg-type] + results, index=None + ) return self.obj._constructor_expanddim(output, columns=columns) # TODO: index should not be Optional - see GH 35490 @@ -711,8 +716,10 @@ def value_counts( # lab is a Categorical with categories an IntervalIndex lab = cut(Series(val), bins, include_lowest=True) - lev = lab.cat.categories - lab = lev.take(lab.cat.codes) + # error: "ndarray" has no attribute "cat" + lev = lab.cat.categories # type: ignore[attr-defined] + # error: "ndarray" has no attribute "cat" + lab = lev.take(lab.cat.codes) # type: ignore[attr-defined] llab = lambda lab, inc: lab[inc]._multiindex.codes[-1] if is_interval_dtype(lab.dtype): diff --git a/pandas/core/groupby/groupby.py b/pandas/core/groupby/groupby.py index 887f50f8dbcd5..99605627a2c71 100644 --- a/pandas/core/groupby/groupby.py +++ b/pandas/core/groupby/groupby.py @@ -1052,7 +1052,12 @@ def _cython_agg_general( if len(output) == 0: raise DataError("No numeric types to aggregate") - return self._wrap_aggregated_output(output, index=self.grouper.result_index) + # error: Argument 1 to "_wrap_aggregated_output" of "BaseGroupBy" has + # incompatible type "Dict[OutputKey, Union[ndarray, DatetimeArray]]"; + # expected "Mapping[OutputKey, ndarray]" + return self._wrap_aggregated_output( # type: ignore[arg-type] + output, index=self.grouper.result_index + ) def _transform_with_numba(self, data, func, *args, engine_kwargs=None, **kwargs): """ @@ -1563,9 +1568,14 @@ def sem(self, ddof: int = 1): ) # TODO(GH-22046) - setting with iloc broken if labels are not unique # .values to remove labels - result.iloc[:, cols] = ( - result.iloc[:, cols].values / np.sqrt(self.count().iloc[:, cols]).values - ) + + # pandas\core\groupby\groupby.py:1567: error: Item "ndarray" of + # "Union[ndarray, generic]" has no attribute "values" [union-attr] + + # pandas\core\groupby\groupby.py:1567: error: Item "generic" of + # "Union[ndarray, generic]" has no attribute "values" [union-attr] + tmp = result.iloc[:, cols].values # type: ignore[union-attr] + result.iloc[:, cols] = tmp / np.sqrt(self.count().iloc[:, cols]).values return result @Substitution(name="groupby") @@ -2127,12 +2137,20 @@ def pre_processor(vals: np.ndarray) -> Tuple[np.ndarray, Optional[Type]]: inference = None if is_integer_dtype(vals.dtype): if is_extension_array_dtype(vals.dtype): - vals = vals.to_numpy(dtype=float, na_value=np.nan) + # error: "ndarray" has no attribute "to_numpy" + vals = vals.to_numpy( # type: ignore[attr-defined] + dtype=float, na_value=np.nan + ) inference = np.int64 elif is_bool_dtype(vals.dtype) and is_extension_array_dtype(vals.dtype): - vals = vals.to_numpy(dtype=float, na_value=np.nan) + # error: "ndarray" has no attribute "to_numpy" + vals = vals.to_numpy( # type: ignore[attr-defined] + dtype=float, na_value=np.nan + ) elif is_datetime64_dtype(vals.dtype): - inference = "datetime64[ns]" + # error: Incompatible types in assignment (expression has type + # "str", variable has type "Optional[Type[int64]]") + inference = "datetime64[ns]" # type: ignore[assignment] vals = np.asarray(vals).astype(float) return vals, inference diff --git a/pandas/core/indexes/base.py b/pandas/core/indexes/base.py index b6c0240abb15f..78576d0c07804 100644 --- a/pandas/core/indexes/base.py +++ b/pandas/core/indexes/base.py @@ -3969,7 +3969,9 @@ def _get_engine_target(self) -> np.ndarray: """ Get the ndarray that we can pass to the IndexEngine constructor. """ - return self._values + # error: Incompatible return value type (got "Union[ExtensionArray, + # ndarray]", expected "ndarray") + return self._values # type: ignore[return-value] @doc(IndexOpsMixin.memory_usage) def memory_usage(self, deep: bool = False) -> int: diff --git a/pandas/core/strings/accessor.py b/pandas/core/strings/accessor.py index cdb889b685c0d..2cf1bc26ec82f 100644 --- a/pandas/core/strings/accessor.py +++ b/pandas/core/strings/accessor.py @@ -589,14 +589,23 @@ def cat(self, others=None, sep=None, na_rep=None, join="left"): if isinstance(self._orig, ABCIndexClass): # add dtype for case that result is all-NA - result = Index(result, dtype=object, name=self._orig.name) + + # error: Incompatible types in assignment (expression has type + # "Index", variable has type "ndarray") + result = Index( # type: ignore[assignment] + result, dtype=object, name=self._orig.name + ) else: # Series if is_categorical_dtype(self._orig.dtype): # We need to infer the new categories. dtype = None else: dtype = self._orig.dtype - result = Series(result, dtype=dtype, index=data.index, name=self._orig.name) + # error: Incompatible types in assignment (expression has type + # "Series", variable has type "ndarray") + result = Series( # type: ignore[assignment] + result, dtype=dtype, index=data.index, name=self._orig.name + ) return result _shared_docs[ diff --git a/pandas/core/window/rolling.py b/pandas/core/window/rolling.py index ebeefc2086094..29e03007ef5c8 100644 --- a/pandas/core/window/rolling.py +++ b/pandas/core/window/rolling.py @@ -383,7 +383,9 @@ def _prep_values(self, values: Optional[np.ndarray] = None) -> np.ndarray: try: values = ensure_float64(values) except (ValueError, TypeError) as err: - raise TypeError(f"cannot handle this type -> {values.dtype}") from err + # error: Item "None" of "Optional[ndarray]" has no attribute "dtype" + tmp = values.dtype # type: ignore[union-attr] + raise TypeError(f"cannot handle this type -> {tmp}") from err # Convert inf to nan for C funcs inf = np.isinf(values) diff --git a/pandas/io/formats/format.py b/pandas/io/formats/format.py index c33440cd71190..3a725e8ab5b1c 100644 --- a/pandas/io/formats/format.py +++ b/pandas/io/formats/format.py @@ -1666,10 +1666,26 @@ def format_percentiles( raise ValueError("percentiles should all be in the interval [0,1]") percentiles = 100 * percentiles - int_idx = np.isclose(percentiles.astype(int), percentiles) + # pandas\io\formats\format.py:1669: error: Item "List[Union[int, float]]" + # of "Union[Any, List[Union[int, float]], List[Union[str, float]]]" has no + # attribute "astype" [union-attr] + + # pandas\io\formats\format.py:1669: error: Item "List[Union[str, float]]" + # of "Union[Any, List[Union[int, float]], List[Union[str, float]]]" has no + # attribute "astype" [union-attr] + int_idx = np.isclose( + percentiles.astype(int), percentiles # type: ignore[union-attr] + ) if np.all(int_idx): - out = percentiles.astype(int).astype(str) + # pandas\io\formats\format.py:1672: error: Item "List[Union[int, + # float]]" of "Union[Any, List[Union[int, float]], List[Union[str, + # float]]]" has no attribute "astype" [union-attr] + + # pandas\io\formats\format.py:1672: error: Item "List[Union[str, + # float]]" of "Union[Any, List[Union[int, float]], List[Union[str, + # float]]]" has no attribute "astype" [union-attr] + out = percentiles.astype(int).astype(str) # type: ignore[union-attr] return [i + "%" for i in out] unique_pcts = np.unique(percentiles) From 6113fdd387d79f05b8a09532b438cd4236833fa6 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 3 Oct 2020 20:50:10 +0100 Subject: [PATCH 19/86] wip --- pandas/_testing.py | 20 +++++++++++--- pandas/core/arrays/integer.py | 7 +++-- pandas/core/frame.py | 38 ++++++++++++++++++++++---- pandas/core/groupby/generic.py | 10 ++++--- pandas/core/groupby/groupby.py | 8 +++--- pandas/core/reshape/merge.py | 7 ++++- pandas/core/reshape/pivot.py | 4 ++- pandas/core/reshape/tile.py | 3 ++- pandas/core/window/ewm.py | 11 +++++++- pandas/core/window/rolling.py | 6 ++++- pandas/io/pytables.py | 49 ++++++++++++++++++++++++++-------- pandas/io/sql.py | 4 ++- pandas/io/stata.py | 32 +++++++++++++++++----- 13 files changed, 157 insertions(+), 42 deletions(-) diff --git a/pandas/_testing.py b/pandas/_testing.py index cf6272edc4c05..7ad1494ee3b83 100644 --- a/pandas/_testing.py +++ b/pandas/_testing.py @@ -14,7 +14,10 @@ import zipfile import numpy as np -from numpy.random import rand, randn + +# pandas\_testing.py:17: error: Module 'numpy.random' has no attribute 'rand' +# pandas\_testing.py:17: error: Module 'numpy.random' has no attribute 'randn' +from numpy.random import rand, randn # type: ignore[attr-defined] from pandas._config.localization import ( # noqa:F401 can_set_locale, @@ -102,9 +105,18 @@ + STRING_DTYPES + DATETIME64_DTYPES + TIMEDELTA64_DTYPES - + BOOL_DTYPES - + OBJECT_DTYPES - + BYTES_DTYPES + # error: Unsupported operand types for + ("List[Union[ExtensionDtype, str, + # dtype, Type[str], Type[float], Type[int], Type[complex], Type[bool], + # Type[object]]]" and "List[object]") + + BOOL_DTYPES # type: ignore[operator] + # error: Unsupported operand types for + ("List[Union[ExtensionDtype, str, + # dtype, Type[str], Type[float], Type[int], Type[complex], Type[bool], + # Type[object]]]" and "List[object]") + + OBJECT_DTYPES # type: ignore[operator] + # error: Unsupported operand types for + ("List[Union[ExtensionDtype, str, + # dtype, Type[str], Type[float], Type[int], Type[complex], Type[bool], + # Type[object]]]" and "List[object]") + + BYTES_DTYPES # type: ignore[operator] ) diff --git a/pandas/core/arrays/integer.py b/pandas/core/arrays/integer.py index 892f0854f64ef..7e93550a93138 100644 --- a/pandas/core/arrays/integer.py +++ b/pandas/core/arrays/integer.py @@ -88,8 +88,11 @@ def _get_common_dtype(self, dtypes: List[DtypeObj]) -> Optional[DtypeObj]: # dtype, ExtensionDtype]]; expected List[Union[dtype, None, type, # _SupportsDtype, str, Tuple[Any, Union[int, Sequence[int]]], # List[Any], _DtypeDict, Tuple[Any, Any]]] - [ # type: ignore[misc] - t.numpy_dtype if isinstance(t, BaseMaskedDtype) else t for t in dtypes + [ + t.numpy_dtype # type: ignore[misc] + if isinstance(t, BaseMaskedDtype) + else t + for t in dtypes ], [], ) diff --git a/pandas/core/frame.py b/pandas/core/frame.py index a8a0d86094366..267daf7d40416 100644 --- a/pandas/core/frame.py +++ b/pandas/core/frame.py @@ -509,7 +509,16 @@ def __init__( elif getattr(data, "name", None) is not None: mgr = init_dict({data.name: data}, index, columns, dtype=dtype) else: - mgr = init_ndarray(data, index, columns, dtype=dtype, copy=copy) + # error: Argument "dtype" to "init_ndarray" has incompatible + # type "Union[ExtensionDtype, str, dtype, Type[object], None]"; + # expected "Union[dtype, ExtensionDtype, None]" + mgr = init_ndarray( + data, + index, + columns, + dtype=dtype, # type: ignore[arg-type] + copy=copy, + ) # For data is list-like, or Iterable (will consume into list) elif isinstance(data, abc.Iterable) and not isinstance(data, (str, bytes)): @@ -521,7 +530,12 @@ def __init__( if is_list_like(data[0]) and getattr(data[0], "ndim", 1) == 1: if is_named_tuple(data[0]) and columns is None: columns = data[0]._fields - arrays, columns = to_arrays(data, columns, dtype=dtype) + # error: Argument "dtype" to "to_arrays" has incompatible + # type "Union[ExtensionDtype, str, dtype, Type[object], + # None]"; expected "Union[dtype, ExtensionDtype, None]" + arrays, columns = to_arrays( + data, columns, dtype=dtype # type: ignore[arg-type] + ) columns = ensure_index(columns) # set the index @@ -533,14 +547,28 @@ def __init__( else: index = ibase.default_index(len(data)) - mgr = arrays_to_mgr(arrays, columns, index, columns, dtype=dtype) + # error: Argument "dtype" to "arrays_to_mgr" has + # incompatible type "Union[ExtensionDtype, str, dtype, + # Type[object], None]"; expected "Union[dtype, + # ExtensionDtype, None]" + mgr = arrays_to_mgr( + arrays, + columns, + index, + columns, + dtype=dtype, # type: ignore[arg-type] + ) else: # error: Argument "dtype" to "init_ndarray" has # incompatible type "Union[ExtensionDtype, str, dtype, # Type[object], None]"; expected "Union[dtype, # ExtensionDtype, None]" - mgr = init_ndarray( # type: ignore[arg-type] - data, index, columns, dtype=dtype, copy=copy + mgr = init_ndarray( + data, + index, + columns, + dtype=dtype, # type: ignore[arg-type] + copy=copy, ) else: # error: Argument "dtype" to "init_dict" has incompatible type diff --git a/pandas/core/groupby/generic.py b/pandas/core/groupby/generic.py index 22da85a5f1557..db36ecc09189c 100644 --- a/pandas/core/groupby/generic.py +++ b/pandas/core/groupby/generic.py @@ -322,8 +322,8 @@ def _aggregate_multiple_funcs(self, arg): # Argument 1 to "_wrap_aggregated_output" of "SeriesGroupBy" has # incompatible type "Dict[OutputKey, Union[DataFrame, Series]]"; # expected "Mapping[OutputKey, Union[Series, ndarray]]" - output = self._wrap_aggregated_output( # type: ignore[arg-type] - results, index=None + output = self._wrap_aggregated_output( + results, index=None # type: ignore[arg-type] ) return self.obj._constructor_expanddim(output, columns=columns) @@ -745,7 +745,11 @@ def value_counts( # multi-index components codes = self.grouper.reconstructed_codes codes = [rep(level_codes) for level_codes in codes] + [llab(lab, inc)] - levels = [ping.group_index for ping in self.grouper.groupings] + [lev] + # error: List item 0 has incompatible type "Union[ndarray, Any]"; + # expected "Index" + levels = [ # type: ignore[list-item] + ping.group_index for ping in self.grouper.groupings + ] + [lev] names = self.grouper.names + [self._selection_name] if dropna: diff --git a/pandas/core/groupby/groupby.py b/pandas/core/groupby/groupby.py index 99605627a2c71..322618750d706 100644 --- a/pandas/core/groupby/groupby.py +++ b/pandas/core/groupby/groupby.py @@ -1055,8 +1055,8 @@ def _cython_agg_general( # error: Argument 1 to "_wrap_aggregated_output" of "BaseGroupBy" has # incompatible type "Dict[OutputKey, Union[ndarray, DatetimeArray]]"; # expected "Mapping[OutputKey, ndarray]" - return self._wrap_aggregated_output( # type: ignore[arg-type] - output, index=self.grouper.result_index + return self._wrap_aggregated_output( + output, index=self.grouper.result_index # type: ignore[arg-type] ) def _transform_with_numba(self, data, func, *args, engine_kwargs=None, **kwargs): @@ -1574,8 +1574,8 @@ def sem(self, ddof: int = 1): # pandas\core\groupby\groupby.py:1567: error: Item "generic" of # "Union[ndarray, generic]" has no attribute "values" [union-attr] - tmp = result.iloc[:, cols].values # type: ignore[union-attr] - result.iloc[:, cols] = tmp / np.sqrt(self.count().iloc[:, cols]).values + tmp = np.sqrt(self.count().iloc[:, cols]).values # type: ignore[union-attr] + result.iloc[:, cols] = result.iloc[:, cols].values / tmp return result @Substitution(name="groupby") diff --git a/pandas/core/reshape/merge.py b/pandas/core/reshape/merge.py index 493ba87565220..a99a31834617a 100644 --- a/pandas/core/reshape/merge.py +++ b/pandas/core/reshape/merge.py @@ -1943,7 +1943,12 @@ def _factorize_keys( elif is_extension_array_dtype(lk.dtype) and is_dtype_equal(lk.dtype, rk.dtype): lk, _ = lk._values_for_factorize() - rk, _ = rk._values_for_factorize() + + # error: Incompatible types in assignment (expression has type + # "ndarray", variable has type "ExtensionArray") + + # error: "ndarray" has no attribute "_values_for_factorize" + rk, _ = rk._values_for_factorize() # type: ignore[attr-defined,assignment] if is_integer_dtype(lk) and is_integer_dtype(rk): # GH#23917 TODO: needs tests for case where lk is integer-dtype diff --git a/pandas/core/reshape/pivot.py b/pandas/core/reshape/pivot.py index 842a42f80e1b7..6e439ce4c6efd 100644 --- a/pandas/core/reshape/pivot.py +++ b/pandas/core/reshape/pivot.py @@ -451,7 +451,9 @@ def pivot( cols = com.convert_to_list_like(index) else: cols = [] - cols.extend(columns) + # error: Item "ExtensionArray" of "Union[List[Any], ExtensionArray]" + # has no attribute "extend" + cols.extend(columns) # type: ignore[union-attr] append = index is None indexed = data.set_index(cols, append=append) diff --git a/pandas/core/reshape/tile.py b/pandas/core/reshape/tile.py index 4c5347bd16e8b..b62ae2279b1a3 100644 --- a/pandas/core/reshape/tile.py +++ b/pandas/core/reshape/tile.py @@ -604,7 +604,8 @@ def _round_frac(x, precision: int): if not np.isfinite(x) or x == 0: return x else: - frac, whole = np.modf(x) + # error: 'numpy.generic' object is not iterable + frac, whole = np.modf(x) # type: ignore[misc] if whole == 0: digits = -int(np.floor(np.log10(abs(frac)))) - 1 + precision else: diff --git a/pandas/core/window/ewm.py b/pandas/core/window/ewm.py index 622a338bda2e5..2e556fbca7b44 100644 --- a/pandas/core/window/ewm.py +++ b/pandas/core/window/ewm.py @@ -493,6 +493,15 @@ def _cov(x, y): corr = cov / zsqrt(x_var * y_var) return X._wrap_result(corr) + # pandas\core\window\ewm.py:497: error: Item "ndarray" of + # "Union[ndarray, FrameOrSeries, None]" has no attribute + # "_selected_obj" [union-attr] + + # pandas\core\window\ewm.py:497: error: Item "None" of "Union[ndarray, + # FrameOrSeries, None]" has no attribute "_selected_obj" [union-attr] return flex_binary_moment( - self._selected_obj, other._selected_obj, _get_corr, pairwise=bool(pairwise) + self._selected_obj, # type: ignore[union-attr] + other._selected_obj, + _get_corr, + pairwise=bool(pairwise), ) diff --git a/pandas/core/window/rolling.py b/pandas/core/window/rolling.py index 29e03007ef5c8..8e16b1a07ffa9 100644 --- a/pandas/core/window/rolling.py +++ b/pandas/core/window/rolling.py @@ -388,7 +388,11 @@ def _prep_values(self, values: Optional[np.ndarray] = None) -> np.ndarray: raise TypeError(f"cannot handle this type -> {tmp}") from err # Convert inf to nan for C funcs - inf = np.isinf(values) + + # error: Argument 1 to "__call__" of "ufunc" has incompatible type + # "Optional[ndarray]"; expected "Union[bool, int, float, complex, + # _SupportsArray, Sequence[Any]]" + inf = np.isinf(values) # type: ignore[arg-type] if inf.any(): values = np.where(inf, np.nan, values) diff --git a/pandas/io/pytables.py b/pandas/io/pytables.py index a3d6975c00a95..4317d67126247 100644 --- a/pandas/io/pytables.py +++ b/pandas/io/pytables.py @@ -2059,7 +2059,9 @@ def convert(self, values: np.ndarray, nan_rep, encoding: str, errors: str): kwargs["freq"] = None new_pd_index = Index(values, **kwargs) - new_pd_index = _set_tz(new_pd_index, self.tz) + # error: Incompatible types in assignment (expression has type + # "Union[ndarray, DatetimeIndex]", variable has type "Index") + new_pd_index = _set_tz(new_pd_index, self.tz) # type: ignore[assignment] return new_pd_index, new_pd_index def take_data(self): @@ -2223,7 +2225,9 @@ def convert(self, values: np.ndarray, nan_rep, encoding: str, errors: str): """ assert isinstance(values, np.ndarray), type(values) - values = Int64Index(np.arange(len(values))) + # error: Incompatible types in assignment (expression has type + # "Int64Index", variable has type "ndarray") + values = Int64Index(np.arange(len(values))) # type: ignore[assignment] return values, values def set_attr(self): @@ -3055,10 +3059,19 @@ def write_array(self, key: str, value: ArrayLike, items: Optional[Index] = None) elif is_datetime64tz_dtype(value.dtype): # store as UTC # with a zone - self._handle.create_array(self.group, key, value.asi8) + + # error: "ndarray" has no attribute "asi8" + self._handle.create_array( + self.group, key, value.asi8 # type: ignore[attr-defined] + ) node = getattr(self.group, key) - node._v_attrs.tz = _get_tz(value.tz) + # pandas\io\pytables.py:3061: error: "ExtensionArray" has no + # attribute "tz" [attr-defined] + + # pandas\io\pytables.py:3061: error: "ndarray" has no attribute + # "tz" [attr-defined] + node._v_attrs.tz = _get_tz(value.tz) # type: ignore[attr-defined] node._v_attrs.value_type = "datetime64" elif is_timedelta64_dtype(value.dtype): self._handle.create_array(self.group, key, value.view("i8")) @@ -3338,7 +3351,10 @@ def validate_multiindex(self, obj): @property def nrows_expected(self) -> int: """ based on our axes, compute the expected nrows """ - return np.prod([i.cvalues.shape[0] for i in self.index_axes]) + # error: Incompatible return value type (got "number", expected "int") + return np.prod( # type: ignore[return-value] + [i.cvalues.shape[0] for i in self.index_axes] + ) @property def is_exists(self) -> bool: @@ -3424,8 +3440,12 @@ def write_metadata(self, key: str, values: np.ndarray): key : str values : ndarray """ - values = Series(values) - self.parent.put( + # error: Incompatible types in assignment (expression has type + # "Series", variable has type "ndarray") + values = Series(values) # type: ignore[assignment] + # error: Value of type variable "FrameOrSeries" of "put" of "HDFStore" + # cannot be "ndarray" + self.parent.put( # type: ignore[type-var] self._get_metadata_path(key), values, format="table", @@ -4506,7 +4526,8 @@ def read( index_ = cols cols_ = Index(index, name=getattr(index, "name", None)) else: - values = cvalues.T + # error: "ExtensionArray" has no attribute "T" + values = cvalues.T # type: ignore[attr-defined] index_ = Index(index, name=getattr(index, "name", None)) cols_ = cols @@ -4768,14 +4789,18 @@ def _set_tz( elif coerce: values = np.asarray(values, dtype="M8[ns]") - return values + # error: Incompatible return value type (got "Union[ndarray, Index]", + # expected "Union[ndarray, DatetimeIndex]") + return values # type: ignore[return-value] def _convert_index(name: str, index: Index, encoding: str, errors: str) -> IndexCol: assert isinstance(name, str) index_name = index.name - converted, dtype_name = _get_data_and_dtype_name(index) + # error: Value of type variable "ArrayLike" of "_get_data_and_dtype_name" + # cannot be "Index" + converted, dtype_name = _get_data_and_dtype_name(index) # type: ignore[type-var] kind = _dtype_to_kind(dtype_name) atom = DataIndexableCol._get_atom(converted) @@ -5081,7 +5106,9 @@ def _get_data_and_dtype_name(data: ArrayLike): Convert the passed data into a storable form and a dtype string. """ if isinstance(data, Categorical): - data = data.codes + # error: Incompatible types in assignment (expression has type + # "ndarray", variable has type "ExtensionArray") + data = data.codes # type: ignore[assignment] # For datetime64tz we need to drop the TZ in tests TODO: why? dtype_name = data.dtype.name.split("[")[0] diff --git a/pandas/io/sql.py b/pandas/io/sql.py index 51888e5021d80..222f6e7857db7 100644 --- a/pandas/io/sql.py +++ b/pandas/io/sql.py @@ -790,7 +790,9 @@ def insert_data(self): mask = isna(d) d[mask] = None - data_list[i] = d + # error: No overload variant of "__setitem__" of "list" matches + # argument types "int", "ndarray" + data_list[i] = d # type: ignore[call-overload] return column_names, data_list diff --git a/pandas/io/stata.py b/pandas/io/stata.py index d36bd42e7da8d..8972dd60d043b 100644 --- a/pandas/io/stata.py +++ b/pandas/io/stata.py @@ -552,7 +552,10 @@ def _cast_to_stata_types(data: DataFrame) -> DataFrame: dtype = c_data[1] else: dtype = c_data[2] - if c_data[2] == np.float64: # Warn if necessary + # error: Non-overlapping equality check (left operand type: + # "Type[signedinteger]", right operand type: "Type[float64]") + if c_data[2] == np.float64: # type: ignore[comparison-overlap] + # Warn if necessary if data[col].max() >= 2 ** 53: ws = precision_loss_doc.format("uint64", "float64") @@ -644,7 +647,10 @@ def __init__(self, catarray: Series, encoding: str = "latin-1"): ) # Ensure int32 - self.off = np.array(self.off, dtype=np.int32) + + # error: Incompatible types in assignment (expression has type + # "ndarray", variable has type "List[int]") + self.off = np.array(self.off, dtype=np.int32) # type: ignore[assignment] self.val = np.array(self.val, dtype=np.int32) # Total length @@ -1208,7 +1214,10 @@ def g(typ: int) -> Union[str, np.dtype]: dtyplist = [g(x) for x in raw_typlist] - return typlist, dtyplist + # error: Incompatible return value type (got "Tuple[List[Union[int, + # str]], List[Union[str, dtype]]]", expected "Tuple[List[Union[int, + # str]], List[Union[int, dtype]]]") + return typlist, dtyplist # type: ignore[return-value] def _get_varlist(self) -> List[str]: # 33 in order formats, 129 in formats 118 and 119 @@ -1392,9 +1401,12 @@ def _setup_dtype(self) -> np.dtype: dtypes.append(("s" + str(i), self.byteorder + self.NUMPY_TYPE_MAP[typ])) else: dtypes.append(("s" + str(i), "S" + str(typ))) - self._dtype = np.dtype(dtypes) + # error: Incompatible types in assignment (expression has type "dtype", + # variable has type "None") + self._dtype = np.dtype(dtypes) # type: ignore[assignment] - return self._dtype + # error: Incompatible return value type (got "None", expected "dtype") + return self._dtype # type: ignore[return-value] def _calcsize(self, fmt: Union[int, str]) -> int: if isinstance(fmt, int): @@ -1803,7 +1815,9 @@ def _do_convert_categoricals( warnings.warn( categorical_conversion_warning, CategoricalConversionWarning ) - initial_categories = None + # error: Incompatible types in assignment (expression has + # type "None", variable has type "ndarray") + initial_categories = None # type: ignore[assignment] cat_data = Categorical( column, categories=initial_categories, ordered=order_categoricals ) @@ -2009,7 +2023,11 @@ def _convert_datetime_to_stata_type(fmt: str) -> np.dtype: "ty", "%ty", ]: - return np.float64 # Stata expects doubles for SIFs + # Stata expects doubles for SIFs + + # error: Incompatible return value type (got "Type[float64]", expected + # "dtype") + return np.float64 # type: ignore[return-value] else: raise NotImplementedError(f"Format {fmt} not implemented") From fe770ba5d0b065d53bf2d5c117783a2348590d27 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 3 Oct 2020 21:24:48 +0100 Subject: [PATCH 20/86] wip --- pandas/core/algorithms.py | 4 ++- pandas/core/arrays/base.py | 10 +++++--- pandas/core/arrays/datetimelike.py | 4 ++- pandas/core/arrays/datetimes.py | 5 +++- pandas/core/arrays/period.py | 4 ++- pandas/core/arrays/timedeltas.py | 6 ++++- pandas/core/construction.py | 4 +-- pandas/core/frame.py | 12 ++++----- pandas/core/generic.py | 4 ++- pandas/core/nanops.py | 41 ++++++++++++++++++++++-------- pandas/core/reshape/merge.py | 32 +++++++++++++++++++---- pandas/core/series.py | 7 ++++- pandas/core/window/ewm.py | 4 +-- pandas/core/window/rolling.py | 2 +- 14 files changed, 102 insertions(+), 37 deletions(-) diff --git a/pandas/core/algorithms.py b/pandas/core/algorithms.py index fcc5bd9873bd5..ec05a31a99748 100644 --- a/pandas/core/algorithms.py +++ b/pandas/core/algorithms.py @@ -1168,7 +1168,9 @@ def _get_score(at): else: q = np.asarray(q, np.float64) result = [_get_score(x) for x in q] - result = np.array(result, dtype=np.float64) + # error: Incompatible types in assignment (expression has type + # "ndarray", variable has type "List[Any]") + result = np.array(result, dtype=np.float64) # type: ignore[assignment] return result diff --git a/pandas/core/arrays/base.py b/pandas/core/arrays/base.py index 78929a1e7f5c2..52af4dc5b5853 100644 --- a/pandas/core/arrays/base.py +++ b/pandas/core/arrays/base.py @@ -416,7 +416,8 @@ def size(self) -> int: """ The number of elements in the array. """ - return np.prod(self.shape) + # error: Incompatible return value type (got "number", expected "int") + return np.prod(self.shape) # type: ignore[return-value] @property def ndim(self) -> int: @@ -782,7 +783,8 @@ def equals(self, other: object) -> bool: if isinstance(equal_values, ExtensionArray): # boolean array with NA -> fill with False equal_values = equal_values.fillna(False) - equal_na = self.isna() & other.isna() + # error: Unsupported left operand type for & ("ExtensionArray") + equal_na = self.isna() & other.isna() # type: ignore[operator] return bool((equal_values | equal_na).all()) def _values_for_factorize(self) -> Tuple[np.ndarray, Any]: @@ -855,7 +857,9 @@ def factorize(self, na_sentinel: int = -1) -> Tuple[np.ndarray, "ExtensionArray" ) uniques = self._from_factorized(uniques, self) - return codes, uniques + # error: Incompatible return value type (got "Tuple[ndarray, ndarray]", + # expected "Tuple[ndarray, ExtensionArray]") + return codes, uniques # type: ignore[return-value] _extension_array_shared_docs[ "repeat" diff --git a/pandas/core/arrays/datetimelike.py b/pandas/core/arrays/datetimelike.py index 33a85239ed3be..56a3aff4fa336 100644 --- a/pandas/core/arrays/datetimelike.py +++ b/pandas/core/arrays/datetimelike.py @@ -1256,7 +1256,9 @@ def _addsub_object_array(self, other: np.ndarray, op): res_values = op(self.astype("O"), np.asarray(other)) result = array(res_values.ravel()) - result = extract_array(result, extract_numpy=True).reshape(self.shape) + # error: "ExtensionArray" has no attribute "reshape"; maybe "shape"? + tmp = extract_array(result, extract_numpy=True) + result = tmp.reshape(self.shape) # type: ignore[attr-defined] return result def _time_shift(self, periods, freq=None): diff --git a/pandas/core/arrays/datetimes.py b/pandas/core/arrays/datetimes.py index db73c84b39cf9..e76770bfea056 100644 --- a/pandas/core/arrays/datetimes.py +++ b/pandas/core/arrays/datetimes.py @@ -485,7 +485,10 @@ def _box_func(self, x) -> Union[Timestamp, NaTType]: return Timestamp(x, freq=self.freq, tz=self.tz) @property - def dtype(self) -> Union[np.dtype, DatetimeTZDtype]: + # error: Return type "Union[dtype, DatetimeTZDtype]" of "dtype" + # incompatible with return type "ExtensionDtype" in supertype + # "ExtensionArray" + def dtype(self) -> Union[np.dtype, DatetimeTZDtype]: # type: ignore[override] """ The dtype for the DatetimeArray. diff --git a/pandas/core/arrays/period.py b/pandas/core/arrays/period.py index 15f2842e39875..77fd3237694c4 100644 --- a/pandas/core/arrays/period.py +++ b/pandas/core/arrays/period.py @@ -257,7 +257,9 @@ def _generate_range(cls, start, end, periods, freq, fields): # DatetimeLike Interface @classmethod - def _rebox_native(cls, value: int) -> np.int64: + # error: Return type "int64" of "_rebox_native" incompatible with return + # type "Union[int, datetime64, timedelta64]" in supertype "AttributesMixin" + def _rebox_native(cls, value: int) -> np.int64: # type: ignore[override] return np.int64(value) def _unbox_scalar( diff --git a/pandas/core/arrays/timedeltas.py b/pandas/core/arrays/timedeltas.py index 19107c4b1b8f1..5c3f02d1f8a0e 100644 --- a/pandas/core/arrays/timedeltas.py +++ b/pandas/core/arrays/timedeltas.py @@ -668,7 +668,11 @@ def __floordiv__(self, other): return result elif is_object_dtype(other.dtype): - result = [self[n] // other[n] for n in range(len(self))] + # error: Incompatible types in assignment (expression has type + # "List[Any]", variable has type "ndarray") + result = [ # type: ignore[assignment] + self[n] // other[n] for n in range(len(self)) + ] result = np.array(result) if lib.infer_dtype(result, skipna=False) == "timedelta": result, _ = sequence_to_td64ns(result) diff --git a/pandas/core/construction.py b/pandas/core/construction.py index 705bdd81e8a73..cd871f1cc76b6 100644 --- a/pandas/core/construction.py +++ b/pandas/core/construction.py @@ -533,8 +533,8 @@ def sanitize_array( # "Union[dtype, None, type, _SupportsDtype, str, Tuple[Any, # int], Tuple[Any, Union[int, Sequence[int]]], List[Any], # _DtypeDict, Tuple[Any, Any]]" - data = np.array( # type:ignore[arg-type] - data, dtype=dtype, copy=False + data = np.array( + data, dtype=dtype, copy=False # type:ignore[arg-type] ) subarr = np.array(data, dtype=object, copy=copy) diff --git a/pandas/core/frame.py b/pandas/core/frame.py index 267daf7d40416..c467c0a03e392 100644 --- a/pandas/core/frame.py +++ b/pandas/core/frame.py @@ -574,8 +574,8 @@ def __init__( # error: Argument "dtype" to "init_dict" has incompatible type # "Union[ExtensionDtype, str, dtype, Type[object], None]"; # expected "Union[dtype, ExtensionDtype, None]" - mgr = init_dict( # type: ignore[arg-type] - {}, index, columns, dtype=dtype + mgr = init_dict( + {}, index, columns, dtype=dtype # type: ignore[arg-type] ) # For data is scalar else: @@ -592,8 +592,8 @@ def __init__( # error: Argument 3 to "construct_1d_arraylike_from_scalar" # has incompatible type "Union[ExtensionDtype, str, dtype, # Type[object]]"; expected "Union[dtype, ExtensionDtype]" - construct_1d_arraylike_from_scalar( # type: ignore[arg-type] - data, len(index), dtype + construct_1d_arraylike_from_scalar( + data, len(index), dtype # type: ignore[arg-type] ) for _ in range(len(columns)) ] @@ -606,8 +606,8 @@ def __init__( # expected "Union[dtype, None, type, _SupportsDtype, str, # Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], # List[Any], _DtypeDict, Tuple[Any, Any]]" - arr = np.array( # type: ignore[arg-type] - data, dtype=dtype, copy=copy + arr = np.array( + data, dtype=dtype, copy=copy # type: ignore[arg-type] ) except (ValueError, TypeError) as err: exc = TypeError( diff --git a/pandas/core/generic.py b/pandas/core/generic.py index d26a8b1663336..4480cddaca657 100644 --- a/pandas/core/generic.py +++ b/pandas/core/generic.py @@ -10223,7 +10223,9 @@ def describe_1d(data): # when some numerics are found, keep only numerics default_include = [np.number] if datetime_is_numeric: - default_include.append("datetime") + # error: Argument 1 to "append" of "list" has incompatible type + # "str"; expected "Type[number]" + default_include.append("datetime") # type: ignore[arg-type] data = self.select_dtypes(include=default_include) if len(data.columns) == 0: data = self diff --git a/pandas/core/nanops.py b/pandas/core/nanops.py index 75b30589ad517..2eb9a932240d9 100644 --- a/pandas/core/nanops.py +++ b/pandas/core/nanops.py @@ -870,8 +870,8 @@ def nansem( # error: Argument 1 to "_get_counts_nanvar" has incompatible type # "Tuple[int, ...]"; expected "Tuple[int]" - count, _ = _get_counts_nanvar( # type: ignore[arg-type] - values.shape, mask, axis, ddof, values.dtype + count, _ = _get_counts_nanvar( + values.shape, mask, axis, ddof, values.dtype # type: ignore[arg-type] ) var = nanvar(values, axis, skipna, ddof=ddof) @@ -1033,7 +1033,9 @@ def nanskew( >>> nanops.nanskew(s) 1.7320508075688787 """ - values = extract_array(values, extract_numpy=True) + # error: Incompatible types in assignment (expression has type + # "ExtensionArray", variable has type "ndarray") + values = extract_array(values, extract_numpy=True) # type: ignore[assignment] mask = _maybe_get_mask(values, skipna, mask) if not is_float_dtype(values.dtype): values = values.astype("f8") @@ -1117,7 +1119,9 @@ def nankurt( >>> nanops.nankurt(s) -1.2892561983471076 """ - values = extract_array(values, extract_numpy=True) + # error: Incompatible types in assignment (expression has type + # "ExtensionArray", variable has type "ndarray") + values = extract_array(values, extract_numpy=True) # type: ignore[assignment] mask = _maybe_get_mask(values, skipna, mask) if not is_float_dtype(values.dtype): values = values.astype("f8") @@ -1224,10 +1228,14 @@ def _maybe_arg_null_out( if axis is None or not getattr(result, "ndim", False): if skipna: if mask.all(): - result = -1 + # error: Incompatible types in assignment (expression has type + # "int", variable has type "ndarray") + result = -1 # type: ignore[assignment] else: if mask.any(): - result = -1 + # error: Incompatible types in assignment (expression has type + # "int", variable has type "ndarray") + result = -1 # type: ignore[assignment] else: if skipna: na_mask = mask.all(axis) @@ -1268,7 +1276,9 @@ def _get_counts( n = mask.size - mask.sum() else: n = np.prod(values_shape) - return dtype.type(n) + # error: Incompatible return value type (got "Union[Any, generic]", + # expected "Union[int, float, ndarray]") + return dtype.type(n) # type: ignore[return-value] if mask is not None: count = mask.shape[axis] - mask.sum(axis) @@ -1276,11 +1286,17 @@ def _get_counts( count = values_shape[axis] if is_scalar(count): - return dtype.type(count) + # error: Incompatible return value type (got "Union[Any, generic]", + # expected "Union[int, float, ndarray]") + return dtype.type(count) # type: ignore[return-value] try: return count.astype(dtype) except AttributeError: - return np.array(count, dtype=dtype) + # error: Argument "dtype" to "array" has incompatible type + # "Union[ExtensionDtype, dtype]"; expected "Union[dtype, None, type, + # _SupportsDtype, str, Tuple[Any, int], Tuple[Any, Union[int, + # Sequence[int]]], List[Any], _DtypeDict, Tuple[Any, Any]]" + return np.array(count, dtype=dtype) # type: ignore[arg-type] def _maybe_null_out( @@ -1310,9 +1326,12 @@ def _maybe_null_out( result[null_mask] = None elif result is not NaT: if check_below_min_count(shape, mask, min_count): - result = np.nan + # error: Incompatible types in assignment (expression has type + # "float", variable has type "ndarray") + result = np.nan # type: ignore[assignment] - return result + # error: Incompatible return value type (got "ndarray", expected "float") + return result # type: ignore[return-value] def check_below_min_count( diff --git a/pandas/core/reshape/merge.py b/pandas/core/reshape/merge.py index a99a31834617a..adc69339fb293 100644 --- a/pandas/core/reshape/merge.py +++ b/pandas/core/reshape/merge.py @@ -1921,14 +1921,33 @@ def _factorize_keys( (array([0, 1, 2]), array([0, 1]), 3) """ # Some pre-processing for non-ndarray lk / rk - lk = extract_array(lk, extract_numpy=True) - rk = extract_array(rk, extract_numpy=True) + + # error: Incompatible types in assignment (expression has type + # "ExtensionArray", variable has type "ndarray") + lk = extract_array(lk, extract_numpy=True) # type: ignore[assignment] + # error: Incompatible types in assignment (expression has type + # "ExtensionArray", variable has type "ndarray") + rk = extract_array(rk, extract_numpy=True) # type: ignore[assignment] if is_datetime64tz_dtype(lk.dtype) and is_datetime64tz_dtype(rk.dtype): # Extract the ndarray (UTC-localized) values # Note: we dont need the dtypes to match, as these can still be compared - lk, _ = lk._values_for_factorize() - rk, _ = rk._values_for_factorize() + + # pandas\core\reshape\merge.py:1930: error: Incompatible types in + # assignment (expression has type "ndarray", variable has type + # "ExtensionArray") [assignment] + + # pandas\core\reshape\merge.py:1930: error: "ndarray" has no attribute + # "_values_for_factorize" [attr-defined] + lk, _ = lk._values_for_factorize() # type: ignore[assignment, attr-defined] + + # pandas\core\reshape\merge.py:1931: error: Incompatible types in + # assignment (expression has type "ndarray", variable has type + # "ExtensionArray") [assignment] + + # pandas\core\reshape\merge.py:1931: error: "ndarray" has no attribute + # "_values_for_factorize" [attr-defined] + rk, _ = rk._values_for_factorize() # type: ignore[assignment, attr-defined] elif ( is_categorical_dtype(lk) and is_categorical_dtype(rk) and is_dtype_equal(lk, rk) @@ -1936,7 +1955,10 @@ def _factorize_keys( assert isinstance(lk, Categorical) assert isinstance(rk, Categorical) # Cast rk to encoding so we can compare codes with lk - rk = lk._validate_listlike(rk) + + # error: Incompatible types in assignment (expression has type + # "ndarray", variable has type "ExtensionArray") + rk = lk._validate_listlike(rk) # type: ignore[assignment] lk = ensure_int64(lk.codes) rk = ensure_int64(rk) diff --git a/pandas/core/series.py b/pandas/core/series.py index 2b972d33d7cdd..905cfeb91cec9 100644 --- a/pandas/core/series.py +++ b/pandas/core/series.py @@ -696,7 +696,12 @@ def __array_ufunc__( return result # Determine if we should defer. - no_defer = (np.ndarray.__array_ufunc__, cls.__array_ufunc__) + + # error: "Type[ndarray]" has no attribute "__array_ufunc__" + no_defer = ( + np.ndarray.__array_ufunc__, # type: ignore[attr-defined] + cls.__array_ufunc__, + ) for item in inputs: higher_priority = ( diff --git a/pandas/core/window/ewm.py b/pandas/core/window/ewm.py index 2e556fbca7b44..a054fd71f4b1f 100644 --- a/pandas/core/window/ewm.py +++ b/pandas/core/window/ewm.py @@ -500,8 +500,8 @@ def _cov(x, y): # pandas\core\window\ewm.py:497: error: Item "None" of "Union[ndarray, # FrameOrSeries, None]" has no attribute "_selected_obj" [union-attr] return flex_binary_moment( - self._selected_obj, # type: ignore[union-attr] - other._selected_obj, + self._selected_obj, + other._selected_obj, # type: ignore[union-attr] _get_corr, pairwise=bool(pairwise), ) diff --git a/pandas/core/window/rolling.py b/pandas/core/window/rolling.py index 8e16b1a07ffa9..481b2f788e077 100644 --- a/pandas/core/window/rolling.py +++ b/pandas/core/window/rolling.py @@ -367,7 +367,7 @@ def _prep_values(self, values: Optional[np.ndarray] = None) -> np.ndarray: # make sure the data is coerced to float64 # error: Item "None" of "Optional[ndarray]" has no attribute "dtype" - if is_float_dtype(values.dtype): # type[union-attr] + if is_float_dtype(values.dtype): # type: ignore[union-attr] values = ensure_float64(values) # error: Item "None" of "Optional[ndarray]" has no attribute "dtype" elif is_integer_dtype(values.dtype): # type: ignore[union-attr] From 6387978943479218d625dfa85b80575b791e0286 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 3 Oct 2020 22:16:40 +0100 Subject: [PATCH 21/86] mypy green locally --- pandas/core/algorithms.py | 18 +++++++++++++++-- pandas/core/arrays/datetimes.py | 3 ++- pandas/core/arrays/period.py | 11 ++++++++--- pandas/core/common.py | 6 ++++-- pandas/core/construction.py | 14 +++++++++++++- pandas/core/frame.py | 34 +++++++++++++++++++++++++++------ pandas/core/groupby/generic.py | 16 ++++++++++++---- pandas/core/groupby/grouper.py | 4 +++- pandas/core/nanops.py | 7 ++++++- pandas/core/reshape/merge.py | 14 +++++++++++--- pandas/core/reshape/reshape.py | 4 +++- pandas/core/series.py | 5 ++++- pandas/io/stata.py | 4 +++- 13 files changed, 113 insertions(+), 27 deletions(-) diff --git a/pandas/core/algorithms.py b/pandas/core/algorithms.py index ec05a31a99748..71d352cb6b48c 100644 --- a/pandas/core/algorithms.py +++ b/pandas/core/algorithms.py @@ -487,7 +487,17 @@ def isin(comps: AnyArrayLike, values: AnyArrayLike) -> np.ndarray: # pandas\core\algorithms.py:463: error: Incompatible types in assignment # (expression has type "ndarray", variable has type "Series") [assignment] comps, dtype = _ensure_data(comps) # type: ignore[assignment] - values, _ = _ensure_data(values, dtype=dtype) + + # pandas\core\algorithms.py:490: error: Incompatible types in assignment + # (expression has type "ndarray", variable has type "ExtensionArray") + # [assignment] + + # pandas\core\algorithms.py:490: error: Incompatible types in assignment + # (expression has type "ndarray", variable has type "Index") [assignment] + + # pandas\core\algorithms.py:490: error: Incompatible types in assignment + # (expression has type "ndarray", variable has type "Series") [assignment] + values, _ = _ensure_data(values, dtype=dtype) # type: ignore[assignment] # faster for larger cases to use np.in1d f = htable.ismember_object @@ -497,7 +507,11 @@ def isin(comps: AnyArrayLike, values: AnyArrayLike) -> np.ndarray: if len(comps) > 1_000_000 and not is_object_dtype(comps): # If the the values include nan we need to check for nan explicitly # since np.nan it not equal to np.nan - if np.isnan(values).any(): + + # error: Argument 1 to "__call__" of "ufunc" has incompatible type + # "ExtensionArray"; expected "Union[bool, int, float, complex, + # _SupportsArray, Sequence[Any]]" + if np.isnan(values).any(): # type: ignore[arg-type] f = lambda c, v: np.logical_or(np.in1d(c, v), np.isnan(c)) else: f = np.in1d diff --git a/pandas/core/arrays/datetimes.py b/pandas/core/arrays/datetimes.py index e76770bfea056..277556e750407 100644 --- a/pandas/core/arrays/datetimes.py +++ b/pandas/core/arrays/datetimes.py @@ -454,7 +454,8 @@ def _generate_range( @classmethod def _rebox_native(cls, value: int) -> np.datetime64: - return np.int64(value).view("M8[ns]") + # error: Incompatible return value type (got "int64", expected "datetime64") + return np.int64(value).view("M8[ns]") # type: ignore[return-value] def _unbox_scalar(self, value, setitem: bool = False): if not isinstance(value, self._scalar_type) and value is not NaT: diff --git a/pandas/core/arrays/period.py b/pandas/core/arrays/period.py index 77fd3237694c4..2a1e176665971 100644 --- a/pandas/core/arrays/period.py +++ b/pandas/core/arrays/period.py @@ -1082,9 +1082,14 @@ def _make_field_arrays(*fields): length = len(x) arrays = [ - np.asarray(x) - if isinstance(x, (np.ndarray, list, ABCSeries)) - else np.repeat(x, length) + np.asarray(x) if isinstance(x, (np.ndarray, list, ABCSeries)) + # error: Argument 2 to "repeat" has incompatible type "Optional[int]"; + # expected "Union[Union[Union[int, integer], Union[bool, bool_]], + # ndarray, Sequence[Union[Union[int, integer], Union[bool, bool_]]], + # Sequence[Union[bool, int, float, complex, _SupportsArray, + # Sequence[Any]]], Sequence[Union[bool, int, float, complex, + # _SupportsArray, Sequence[Any]]]]" + else np.repeat(x, length) # type: ignore[arg-type] for x in fields ] diff --git a/pandas/core/common.py b/pandas/core/common.py index ac73141025147..029b90c436bfc 100644 --- a/pandas/core/common.py +++ b/pandas/core/common.py @@ -426,8 +426,10 @@ def random_state(state=None): or is_array_like(state) or (not np_version_under1p18 and isinstance(state, np.random.BitGenerator)) ): - return np.random.RandomState(state) - elif isinstance(state, np.random.RandomState): + # error: Module has no attribute "RandomState"; maybe "__RandomState_ctor"? + return np.random.RandomState(state) # type: ignore[attr-defined] + # error: Module has no attribute "RandomState"; maybe "__RandomState_ctor"? + elif isinstance(state, np.random.RandomState): # type: ignore[attr-defined] return state elif state is None: return np.random diff --git a/pandas/core/construction.py b/pandas/core/construction.py index cd871f1cc76b6..c63dbc8764495 100644 --- a/pandas/core/construction.py +++ b/pandas/core/construction.py @@ -292,7 +292,19 @@ def array( # error: Value of type variable "AnyArrayLike" of "extract_array" cannot be # "Union[Sequence[object], Index]" - data = extract_array(data, extract_numpy=True) # type: ignore[type-var] + + # pandas\core\construction.py:295: error: Incompatible types in assignment + # (expression has type "ExtensionArray", variable has type + # "Union[Sequence[object], Index]") [assignment] + + # pandas\core\construction.py:295: error: Incompatible types in assignment + # (expression has type "ExtensionArray", variable has type + # "Union[Sequence[object], Series]") [assignment] + + # pandas\core\construction.py:295: error: Incompatible types in assignment + # (expression has type "ExtensionArray", variable has type + # "Union[Sequence[object], ndarray]") [assignment] + data = extract_array(data, extract_numpy=True) # type: ignore[type-var,assignment] # this returns None for not-found dtypes. if isinstance(dtype, str): diff --git a/pandas/core/frame.py b/pandas/core/frame.py index c467c0a03e392..c0709c04c9a12 100644 --- a/pandas/core/frame.py +++ b/pandas/core/frame.py @@ -507,7 +507,20 @@ def __init__( data, index, columns, dtype=dtype # type: ignore[arg-type] ) elif getattr(data, "name", None) is not None: - mgr = init_dict({data.name: data}, index, columns, dtype=dtype) + # pandas\core\frame.py:510: error: Item "ndarray" of + # "Union[ndarray, Series, Index]" has no attribute "name" + # [union-attr] + + # pandas\core\frame.py:510: error: Argument "dtype" to + # "init_dict" has incompatible type "Union[ExtensionDtype, str, + # dtype, Type[object], None]"; expected "Union[dtype, + # ExtensionDtype, None]" [arg-type] + mgr = init_dict( + {data.name: data}, # type: ignore[union-attr] + index, + columns, + dtype=dtype, # type: ignore[arg-type] + ) else: # error: Argument "dtype" to "init_ndarray" has incompatible # type "Union[ExtensionDtype, str, dtype, Type[object], None]"; @@ -536,7 +549,9 @@ def __init__( arrays, columns = to_arrays( data, columns, dtype=dtype # type: ignore[arg-type] ) - columns = ensure_index(columns) + # error: Value of type variable "AnyArrayLike" of + # "ensure_index" cannot be "Optional[Collection[Any]]" + columns = ensure_index(columns) # type: ignore[type-var] # set the index if index is None: @@ -622,7 +637,13 @@ def __init__( # error: Incompatible types in assignment (expression has type # "ndarray", variable has type "List[ExtensionArray]") values = cast_scalar_to_array( # type: ignore[assignment] - (len(index), len(columns)), data, dtype=dtype + # error: Argument "dtype" to "cast_scalar_to_array" has + # incompatible type "Union[ExtensionDtype, str, dtype, + # Type[object]]"; expected "Union[dtype, ExtensionDtype, + # None]" + (len(index), len(columns)), + data, + dtype=dtype, # type: ignore[arg-type] ) mgr = init_ndarray( @@ -5424,8 +5445,9 @@ def sort_values( # type: ignore[override] if key is not None: # error: List comprehension has incompatible type List[Series]; # expected List[ndarray] - keys = [ # type: ignore[misc] - Series(k, name=name) for (k, name) in zip(keys, by) + keys = [ + Series(k, name=name) # type: ignore[misc] + for (k, name) in zip(keys, by) ] indexer = lexsort_indexer( @@ -5442,7 +5464,7 @@ def sort_values( # type: ignore[override] if key is not None: # error: Incompatible types in assignment (expression has type # "Series", variable has type "ndarray") - k = Series(k, name=by) # type[assignment] + k = Series(k, name=by) # type: ignore[assignment] if isinstance(ascending, (tuple, list)): ascending = ascending[0] diff --git a/pandas/core/groupby/generic.py b/pandas/core/groupby/generic.py index db36ecc09189c..e448bd573fe54 100644 --- a/pandas/core/groupby/generic.py +++ b/pandas/core/groupby/generic.py @@ -724,7 +724,15 @@ def value_counts( if is_interval_dtype(lab.dtype): # TODO: should we do this inside II? - sorter = np.lexsort((lab.left, lab.right, ids)) + + # pandas\core\groupby\generic.py:727: error: "ndarray" has no + # attribute "left" [attr-defined] + + # pandas\core\groupby\generic.py:727: error: "ndarray" has no + # attribute "right" [attr-defined] + sorter = np.lexsort( + (lab.left, lab.right, ids) # type: ignore[attr-defined] + ) else: sorter = np.lexsort((lab, ids)) @@ -747,9 +755,9 @@ def value_counts( codes = [rep(level_codes) for level_codes in codes] + [llab(lab, inc)] # error: List item 0 has incompatible type "Union[ndarray, Any]"; # expected "Index" - levels = [ # type: ignore[list-item] - ping.group_index for ping in self.grouper.groupings - ] + [lev] + levels = [ping.group_index for ping in self.grouper.groupings] + [ + lev # type: ignore[list-item] + ] names = self.grouper.names + [self._selection_name] if dropna: diff --git a/pandas/core/groupby/grouper.py b/pandas/core/groupby/grouper.py index a509acb3604e1..8fa84d06b764b 100644 --- a/pandas/core/groupby/grouper.py +++ b/pandas/core/groupby/grouper.py @@ -570,7 +570,9 @@ def indices(self): def codes(self) -> np.ndarray: if self._codes is None: self._make_codes() - return self._codes + # error: Incompatible return value type (got "Optional[ndarray]", + # expected "ndarray") + return self._codes # type: ignore[return-value] @cache_readonly def result_index(self) -> Index: diff --git a/pandas/core/nanops.py b/pandas/core/nanops.py index 2eb9a932240d9..96006bd522ec6 100644 --- a/pandas/core/nanops.py +++ b/pandas/core/nanops.py @@ -901,7 +901,12 @@ def reduction( result = getattr(values, meth)(axis) result = _wrap_results(result, dtype, fill_value) - return _maybe_null_out(result, axis, mask, values.shape) + # error: Incompatible return value type (got "float", expected + # "Union[ExtensionDtype, str, dtype, Type[str], Type[float], Type[int], + # Type[complex], Type[bool], Type[object]]") + return _maybe_null_out( # type: ignore[return-value] + result, axis, mask, values.shape + ) return reduction diff --git a/pandas/core/reshape/merge.py b/pandas/core/reshape/merge.py index adc69339fb293..510eb007e29be 100644 --- a/pandas/core/reshape/merge.py +++ b/pandas/core/reshape/merge.py @@ -1325,7 +1325,8 @@ def get_join_indexers( for n in range(len(left_keys)) ) zipped = zip(*mapped) - llab, rlab, shape = [list(x) for x in zipped] + # error: No overload variant of "list" matches argument type "object" + llab, rlab, shape = [list(x) for x in zipped] # type: ignore[call-overload] # get flat i8 keys from label lists lkey, rkey = _get_join_keys(llab, rlab, shape, sort) @@ -1796,7 +1797,8 @@ def _get_multiindex_indexer(join_keys, index: MultiIndex, sort: bool): for n in range(index.nlevels) ) zipped = zip(*mapped) - rcodes, lcodes, shape = [list(x) for x in zipped] + # error: No overload variant of "list" matches argument type "object" + rcodes, lcodes, shape = [list(x) for x in zipped] # type: ignore[call-overload] if sort: rcodes = list(map(np.take, rcodes, index.codes)) else: @@ -1964,7 +1966,13 @@ def _factorize_keys( rk = ensure_int64(rk) elif is_extension_array_dtype(lk.dtype) and is_dtype_equal(lk.dtype, rk.dtype): - lk, _ = lk._values_for_factorize() + # pandas\core\reshape\merge.py:1967: error: Incompatible types in + # assignment (expression has type "ndarray", variable has type + # "ExtensionArray") [assignment] + + # pandas\core\reshape\merge.py:1967: error: "ndarray" has no attribute + # "_values_for_factorize" [attr-defined] + lk, _ = lk._values_for_factorize() # type: ignore[attr-defined,assignment] # error: Incompatible types in assignment (expression has type # "ndarray", variable has type "ExtensionArray") diff --git a/pandas/core/reshape/reshape.py b/pandas/core/reshape/reshape.py index 18ebe14763797..55e4b4fef17bf 100644 --- a/pandas/core/reshape/reshape.py +++ b/pandas/core/reshape/reshape.py @@ -160,7 +160,9 @@ def _make_selectors(self): self.full_shape = ngroups, stride selector = self.sorted_labels[-1] + stride * comp_index + self.lift - mask = np.zeros(np.prod(self.full_shape), dtype=bool) + # error: Argument 1 to "zeros" has incompatible type "number"; expected + # "Union[int, Sequence[int]]" + mask = np.zeros(np.prod(self.full_shape), dtype=bool) # type: ignore[arg-type] mask.put(selector, True) if mask.sum() < len(self.index): diff --git a/pandas/core/series.py b/pandas/core/series.py index 905cfeb91cec9..342052a2ccef8 100644 --- a/pandas/core/series.py +++ b/pandas/core/series.py @@ -1099,7 +1099,10 @@ def _set_value(self, label, value, takeable: bool = False): self._values[label] = value else: loc = self.index.get_loc(label) - validate_numeric_casting(self.dtype, value) + # error: Argument 1 to "validate_numeric_casting" has + # incompatible type "Union[dtype, ExtensionDtype]"; expected + # "dtype" + validate_numeric_casting(self.dtype, value) # type: ignore[arg-type] self._values[loc] = value except KeyError: diff --git a/pandas/io/stata.py b/pandas/io/stata.py index 8972dd60d043b..18b2e051a5520 100644 --- a/pandas/io/stata.py +++ b/pandas/io/stata.py @@ -651,7 +651,9 @@ def __init__(self, catarray: Series, encoding: str = "latin-1"): # error: Incompatible types in assignment (expression has type # "ndarray", variable has type "List[int]") self.off = np.array(self.off, dtype=np.int32) # type: ignore[assignment] - self.val = np.array(self.val, dtype=np.int32) + # error: Incompatible types in assignment (expression has type + # "ndarray", variable has type "List[int]") [assignment] + self.val = np.array(self.val, dtype=np.int32) # type: ignore[assignment] # Total length self.len = 4 + 4 + 4 * self.n + 4 * self.n + self.text_len From 8423bf4702c8a29daa3783a925357ca53260e774 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Tue, 6 Oct 2020 15:02:37 +0100 Subject: [PATCH 22/86] updates --- pandas/compat/numpy/__init__.py | 4 +- pandas/compat/numpy/function.py | 4 +- pandas/core/algorithms.py | 13 +++- pandas/core/arrays/_ranges.py | 9 ++- pandas/core/arrays/base.py | 8 +- pandas/core/arrays/datetimes.py | 10 ++- pandas/core/arrays/period.py | 13 +++- pandas/core/frame.py | 6 +- pandas/core/indexes/multi.py | 6 +- pandas/core/internals/concat.py | 5 +- pandas/core/nanops.py | 110 ++++++++++++++++++++++++++-- pandas/core/sorting.py | 16 +++- pandas/core/strings/object_array.py | 5 +- pandas/core/util/hashing.py | 42 ++++++++++- pandas/core/window/ewm.py | 40 +++++++++- pandas/io/formats/format.py | 53 +++----------- pandas/io/pytables.py | 10 ++- pandas/plotting/_matplotlib/misc.py | 39 ++++++++-- pandas/util/_test_decorators.py | 6 +- 19 files changed, 320 insertions(+), 79 deletions(-) diff --git a/pandas/compat/numpy/__init__.py b/pandas/compat/numpy/__init__.py index 46075a0672c0f..4387492cd9419 100644 --- a/pandas/compat/numpy/__init__.py +++ b/pandas/compat/numpy/__init__.py @@ -6,7 +6,9 @@ import numpy as np # numpy versioning -_np_version = np.__version__ +# pandas\compat\numpy\__init__.py:9: error: Module has no attribute +# "__version__"; maybe "version"? [attr-defined] +_np_version = np.__version__ # type: ignore[attr-defined] _nlv = LooseVersion(_np_version) np_version_under1p17 = _nlv < LooseVersion("1.17") np_version_under1p18 = _nlv < LooseVersion("1.18") diff --git a/pandas/compat/numpy/function.py b/pandas/compat/numpy/function.py index 938f57f504b04..eaa63e39e2034 100644 --- a/pandas/compat/numpy/function.py +++ b/pandas/compat/numpy/function.py @@ -20,7 +20,9 @@ from distutils.version import LooseVersion from typing import Any, Dict, Optional, Union -from numpy import __version__, ndarray +# pandas\compat\numpy\function.py:23: error: Module 'numpy' has no attribute +# '__version__'; maybe "version"? [attr-defined] +from numpy import __version__, ndarray # type: ignore[attr-defined] from pandas._libs.lib import is_bool, is_integer from pandas.errors import UnsupportedFunctionCall diff --git a/pandas/core/algorithms.py b/pandas/core/algorithms.py index 71d352cb6b48c..028d59f426646 100644 --- a/pandas/core/algorithms.py +++ b/pandas/core/algorithms.py @@ -1266,7 +1266,11 @@ def compute(self, method: str) -> Series: elif is_bool_dtype(pandas_dtype): # GH 26154: ensure False is smaller than True - arr = 1 - (-arr) + + # pandas\core\algorithms.py:1269: error: Incompatible types in + # assignment (expression has type "Union[ndarray, generic]", + # variable has type "ndarray") [assignment] + arr = 1 - (-arr) # type: ignore[assignment] if self.keep == "last": arr = arr[::-1] @@ -2118,7 +2122,12 @@ def safe_sort( if not isinstance(values, np.ndarray) and not is_extension_array_dtype(values): # don't convert to string types dtype, _ = infer_dtype_from_array(values) - values = np.asarray(values, dtype=dtype) + # pandas\core\algorithms.py:2121: error: Argument "dtype" to "asarray" + # has incompatible type "Union[dtype, ExtensionDtype]"; expected + # "Union[dtype, None, type, _SupportsDtype, str, Tuple[Any, int], + # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DtypeDict, + # Tuple[Any, Any]]" [arg-type] + values = np.asarray(values, dtype=dtype) # type: ignore[arg-type] def sort_mixed(values): # order ints before strings, safe in py3 diff --git a/pandas/core/arrays/_ranges.py b/pandas/core/arrays/_ranges.py index 14b442bf71080..bf62cafa39467 100644 --- a/pandas/core/arrays/_ranges.py +++ b/pandas/core/arrays/_ranges.py @@ -150,7 +150,9 @@ def _generate_range_overflow_safe_signed( addend = np.int64(periods) * np.int64(stride) try: # easy case with no overflows - return np.int64(endpoint) + addend + # pandas\core\arrays\_ranges.py:153: error: Incompatible return + # value type (got "signedinteger", expected "int") [return-value] + return np.int64(endpoint) + addend # type: ignore[return-value] except (FloatingPointError, OverflowError): # with endpoint negative and addend positive we risk # FloatingPointError; with reversed signed we risk OverflowError @@ -168,7 +170,10 @@ def _generate_range_overflow_safe_signed( i64max = np.uint64(np.iinfo(np.int64).max) assert result > i64max if result <= i64max + np.uint64(stride): - return result + # pandas\core\arrays\_ranges.py:171: error: Incompatible return + # value type (got "unsignedinteger", expected "int") + # [return-value] + return result # type: ignore[return-value] raise OutOfBoundsDatetime( f"Cannot generate range with {side}={endpoint} and periods={periods}" diff --git a/pandas/core/arrays/base.py b/pandas/core/arrays/base.py index e0c5ede7b4884..3de6e073fcc4a 100644 --- a/pandas/core/arrays/base.py +++ b/pandas/core/arrays/base.py @@ -617,7 +617,9 @@ def fillna(self, value=None, method=None, limit=None): ) value = value[mask] - if mask.any(): + # pandas\core\arrays\base.py:620: error: "ExtensionArray" has no + # attribute "any" [attr-defined] + if mask.any(): # type: ignore[attr-defined] if method is not None: func = get_fill_func(method) new_values = func(self.astype(object), limit=limit, mask=mask) @@ -638,7 +640,9 @@ def dropna(self): ------- valid : ExtensionArray """ - return self[~self.isna()] + # pandas\core\arrays\base.py:641: error: Unsupported operand type for ~ + # ("ExtensionArray") [operator] + return self[~self.isna()] # type: ignore[operator] def shift(self, periods: int = 1, fill_value: object = None) -> "ExtensionArray": """ diff --git a/pandas/core/arrays/datetimes.py b/pandas/core/arrays/datetimes.py index 277556e750407..f267c8369e31c 100644 --- a/pandas/core/arrays/datetimes.py +++ b/pandas/core/arrays/datetimes.py @@ -1842,7 +1842,15 @@ def to_julian_date(self): month[testarr] += 12 return ( day - + np.fix((153 * month - 457) / 5) + # pandas\core\arrays\datetimes.py:1845: error: Unsupported operand + # types for / ("generic" and "int") [operator] + # pandas\core\arrays\datetimes.py:1845: error: Unsupported operand + # types for - ("generic" and "int") [operator] + # pandas\core\arrays\datetimes.py:1845: note: Left operand is of + # type "Union[ndarray, generic]" + # pandas\core\arrays\datetimes.py:1845: note: Left operand is of + # type "Union[ndarray, generic, int]" + + np.fix((153 * month - 457) / 5) # type: ignore[operator] + 365 * year + np.floor(year / 4) - np.floor(year / 100) diff --git a/pandas/core/arrays/period.py b/pandas/core/arrays/period.py index 2a1e176665971..7f799a488fc79 100644 --- a/pandas/core/arrays/period.py +++ b/pandas/core/arrays/period.py @@ -890,7 +890,18 @@ def period_array( if not isinstance(data, (np.ndarray, list, tuple, ABCSeries)): data = list(data) - data = np.asarray(data) + # pandas\core\arrays\period.py:893: error: Incompatible types in assignment + # (expression has type "ndarray", variable has type + # "Union[Sequence[Optional[Any]], ExtensionArray]") [assignment] + + # pandas\core\arrays\period.py:893: error: Incompatible types in assignment + # (expression has type "ndarray", variable has type + # "Union[Sequence[Optional[Any]], Index]") [assignment] + + # pandas\core\arrays\period.py:893: error: Incompatible types in assignment + # (expression has type "ndarray", variable has type + # "Union[Sequence[Optional[Any]], Series]") [assignment] + data = np.asarray(data) # type: ignore[assignment] dtype: Optional[PeriodDtype] if freq: diff --git a/pandas/core/frame.py b/pandas/core/frame.py index c0709c04c9a12..2995bf2e744f1 100644 --- a/pandas/core/frame.py +++ b/pandas/core/frame.py @@ -4008,7 +4008,11 @@ def lookup(self, row_labels, col_labels) -> np.ndarray: if (cidx == -1).any(): raise KeyError("One or more column labels was not found") flat_index = ridx * len(self.columns) + cidx - result = values.flat[flat_index] + # pandas\core\frame.py:4011: error: Invalid index type + # "Union[ndarray, generic]" for "flatiter[ndarray]"; expected type + # "Union[int, integer, Sequence[Union[int, integer]], + # Sequence[Sequence[Any]], ndarray, slice, ellipsis]" [index] + result = values.flat[flat_index] # type: ignore [index] else: result = np.empty(n, dtype="O") for i, (r, c) in enumerate(zip(row_labels, col_labels)): diff --git a/pandas/core/indexes/multi.py b/pandas/core/indexes/multi.py index a157fdfdde447..9c033d602f619 100644 --- a/pandas/core/indexes/multi.py +++ b/pandas/core/indexes/multi.py @@ -3233,7 +3233,11 @@ def _reorder_indexer( # Set order as given in the indexer list level_indexer = self.levels[i].get_indexer(k) level_indexer = level_indexer[level_indexer >= 0] # Filter absent keys - key_order_map[level_indexer] = np.arange(len(level_indexer)) + # pandas\core\indexes\multi.py:3236: error: Unsupported target + # for indexed assignment ("Union[ndarray, generic]") [index] + key_order_map[level_indexer] = np.arange( # type: ignore[index] + len(level_indexer) + ) new_order = key_order_map[self.codes[i][indexer]] else: diff --git a/pandas/core/internals/concat.py b/pandas/core/internals/concat.py index f4f2a1bbdd4a3..718709d5b74a3 100644 --- a/pandas/core/internals/concat.py +++ b/pandas/core/internals/concat.py @@ -377,7 +377,10 @@ def _get_empty_dtype_and_na(join_units: Sequence[JoinUnit]) -> Tuple[DtypeObj, A else: dtypes[i] = unit.dtype - upcast_classes = _get_upcast_classes(join_units, dtypes) + # pandas\core\internals\concat.py:380: error: Argument 2 to + # "_get_upcast_classes" has incompatible type "List[None]"; expected + # "Sequence[Union[dtype, ExtensionDtype]]" [arg-type] + upcast_classes = _get_upcast_classes(join_units, dtypes) # type: ignore[arg-type] # TODO: de-duplicate with maybe_promote? # create the result diff --git a/pandas/core/nanops.py b/pandas/core/nanops.py index 96006bd522ec6..0147e508e53b6 100644 --- a/pandas/core/nanops.py +++ b/pandas/core/nanops.py @@ -699,7 +699,14 @@ def _get_counts_nanvar( """ dtype = get_dtype(dtype) count = _get_counts(value_counts, mask, axis, dtype=dtype) - d = count - dtype.type(ddof) + # pandas\core\nanops.py:702: error: Unsupported operand types for - ("int" + # and "generic") [operator] + + # pandas\core\nanops.py:702: error: Unsupported operand types for - + # ("float" and "generic") [operator] + + # pandas\core\nanops.py:702: note: Both left and right operands are unions + d = count - dtype.type(ddof) # type: ignore[operator] # always return NaN, never inf if is_scalar(count): @@ -875,7 +882,14 @@ def nansem( ) var = nanvar(values, axis, skipna, ddof=ddof) - return np.sqrt(var) / np.sqrt(count) + # pandas\core\nanops.py:878: error: Unsupported left operand type for / + # ("generic") [operator] + + # pandas\core\nanops.py:878: note: Both left and right operands are unions + + # pandas\core\nanops.py:878: error: Incompatible return value type (got + # "Union[ndarray, generic, Any]", expected "float") [return-value] + return np.sqrt(var) / np.sqrt(count) # type: ignore[operator,return-value] def _nanminmax(meth, fill_value_typ): @@ -1072,7 +1086,32 @@ def nanskew( m3 = _zero_out_fperr(m3) with np.errstate(invalid="ignore", divide="ignore"): - result = (count * (count - 1) ** 0.5 / (count - 2)) * (m3 / m2 ** 1.5) + # pandas\core\nanops.py:1075: error: Unsupported operand types for * + # ("int" and "generic") [operator] + + # pandas\core\nanops.py:1075: error: Unsupported operand types for * + # ("float" and "generic") [operator] + + # pandas\core\nanops.py:1075: note: Both left and right operands are + # unions + + # pandas\core\nanops.py:1075: error: Unsupported operand types for / + # ("float" and "generic") [operator] + + # pandas\core\nanops.py:1075: error: Unsupported operand types for / + # ("generic" and "float") [operator] + + # pandas\core\nanops.py:1075: error: Unsupported left operand type for + # / ("generic") [operator] + + # pandas\core\nanops.py:1075: error: Unsupported operand types for ** + # ("generic" and "float") [operator] + + # pandas\core\nanops.py:1075: note: Left operand is of type + # "Union[float, ndarray, generic]" + result = ( + count * (count - 1) ** 0.5 / (count - 2) # type: ignore[operator] + ) * (m3 / m2 ** 1.5) dtype = values.dtype if is_float_dtype(dtype): @@ -1151,9 +1190,68 @@ def nankurt( m4 = adjusted4.sum(axis, dtype=np.float64) with np.errstate(invalid="ignore", divide="ignore"): - adj = 3 * (count - 1) ** 2 / ((count - 2) * (count - 3)) - numer = count * (count + 1) * (count - 1) * m4 - denom = (count - 2) * (count - 3) * m2 ** 2 + # pandas\core\nanops.py:1154: error: Unsupported operand types for * + # ("int" and "generic") [operator] + + # pandas\core\nanops.py:1154: note: Right operand is of type + # "Union[float, ndarray, generic, Any]" + + # pandas\core\nanops.py:1154: error: Unsupported operand types for / + # ("float" and "generic") [operator] + + # pandas\core\nanops.py:1154: error: Unsupported operand types for / + # ("generic" and "float") [operator] + + # pandas\core\nanops.py:1154: error: Unsupported left operand type for + # / ("generic") [operator] + + # pandas\core\nanops.py:1154: note: Both left and right operands are + # unions + + # pandas\core\nanops.py:1154: error: Unsupported operand types for ** + # ("generic" and "int") [operator] + + # pandas\core\nanops.py:1154: note: Left operand is of type + # "Union[float, ndarray, generic]" + + # pandas\core\nanops.py:1154: error: Unsupported operand types for * + # ("float" and "generic") [operator] + + # pandas\core\nanops.py:1154: error: Unsupported operand types for * + # ("generic" and "float") [operator] + + # pandas\core\nanops.py:1154: error: Unsupported left operand type for + # * ("generic") [operator] + adj = ( + 3 * (count - 1) ** 2 / ((count - 2) * (count - 3)) # type: ignore[operator] + ) + # pandas\core\nanops.py:1155: error: Unsupported operand types for * + # ("int" and "generic") [operator] + + # pandas\core\nanops.py:1155: error: Unsupported operand types for * + # ("float" and "generic") [operator] + + # pandas\core\nanops.py:1155: note: Both left and right operands are + # unions + + # pandas\core\nanops.py:1155: error: Unsupported operand types for * + # ("generic" and "float") [operator] + + # pandas\core\nanops.py:1155: error: Unsupported left operand type for + # * ("generic") [operator] + numer = count * (count + 1) * (count - 1) * m4 # type: ignore[operator] + # pandas\core\nanops.py:1156: error: Unsupported operand types for * + # ("float" and "generic") [operator] + + # pandas\core\nanops.py:1156: error: Unsupported operand types for * + # ("generic" and "float") [operator] + + # pandas\core\nanops.py:1156: error: Unsupported left operand type for + # * ("generic") [operator] + + # pandas\core\nanops.py:1156: note: Both left and right operands are + # unions + denom = (count - 2) * (count - 3) * m2 ** 2 # type: ignore[operator] # floating point error # diff --git a/pandas/core/sorting.py b/pandas/core/sorting.py index e2e90b4ed3648..d3262a4ef8196 100644 --- a/pandas/core/sorting.py +++ b/pandas/core/sorting.py @@ -310,7 +310,12 @@ def lexsort_indexer( codes += 1 else: # not order means descending if na_position == "last": - codes = np.where(mask, n, n - codes - 1) + # pandas\core\sorting.py:313: error: Unsupported operand types + # for - ("generic" and "int") [operator] + + # pandas\core\sorting.py:313: note: Left operand is of type + # "Union[ndarray, generic]" + codes = np.where(mask, n, n - codes - 1) # type: ignore[operator] elif na_position == "first": codes = np.where(mask, 0, n - codes) if mask.any(): @@ -563,7 +568,14 @@ def get_group_index_sorter(group_index, ngroups: int): count = len(group_index) alpha = 0.0 # taking complexities literally; there may be beta = 1.0 # some room for fine-tuning these parameters - do_groupsort = count > 0 and ((alpha + beta * ngroups) < (count * np.log(count))) + # pandas\core\sorting.py:566: error: Unsupported operand types for * ("int" + # and "generic") [operator] + + # pandas\core\sorting.py:566: note: Right operand is of type + # "Union[ndarray, generic]" + do_groupsort = count > 0 and ( + (alpha + beta * ngroups) < (count * np.log(count)) # type: ignore[operator] + ) if do_groupsort: sorter, _ = algos.groupsort_indexer(ensure_int64(group_index), ngroups) return ensure_platform_int(sorter) diff --git a/pandas/core/strings/object_array.py b/pandas/core/strings/object_array.py index 2eb1a8917ed96..d1e05a810a4e8 100644 --- a/pandas/core/strings/object_array.py +++ b/pandas/core/strings/object_array.py @@ -56,7 +56,10 @@ def _str_map(self, f, na_value=None, dtype=None): return np.ndarray(0, dtype=dtype) # type: ignore[arg-type] if not isinstance(arr, np.ndarray): - arr = np.asarray(arr, dtype=object) + # pandas\core\strings\object_array.py:59: error: Incompatible types + # in assignment (expression has type "ndarray", variable has type + # "ObjectStringArrayMixin") [assignment] + arr = np.asarray(arr, dtype=object) # type: ignore[assignment] mask = isna(arr) convert = not np.all(mask) try: diff --git a/pandas/core/util/hashing.py b/pandas/core/util/hashing.py index df082c7285ae8..a57bf07e0f13f 100644 --- a/pandas/core/util/hashing.py +++ b/pandas/core/util/hashing.py @@ -45,10 +45,46 @@ def combine_hash_arrays(arrays, num_items: int): for i, a in enumerate(arrays): inverse_i = num_items - i out ^= a - out *= mult - mult += np.uint64(82520 + inverse_i + inverse_i) + # pandas\core\util\hashing.py:48: error: No overload variant of + # "__call__" of "_UnsignedIntOp" matches argument type "generic" + # [call-overload] + + # pandas\core\util\hashing.py:48: note: Possible overload variants: + + # pandas\core\util\hashing.py:48: note: def __call__(self, + # Union[bool, unsignedinteger]) -> unsignedinteger + + # pandas\core\util\hashing.py:48: note: def __call__(self, + # Union[int, signedinteger]) -> Union[signedinteger, float64] + + # pandas\core\util\hashing.py:48: note: <2 more similar overloads + # not shown, out of 4 total overloads> + + # pandas\core\util\hashing.py:48: note: Left operand is of type + # "Union[ndarray, generic]" + out *= mult # type: ignore[call-overload] + # pandas\core\util\hashing.py:49: error: Incompatible types in + # assignment (expression has type "unsignedinteger", variable has type + # "uint64") [assignment] + mult += np.uint64(82520 + inverse_i + inverse_i) # type: ignore[assignment] assert i + 1 == num_items, "Fed in wrong num_items" - out += np.uint64(97531) + # pandas\core\util\hashing.py:51: error: No overload variant of "__call__" + # of "_UnsignedIntOp" matches argument type "generic" [call-overload] + + # pandas\core\util\hashing.py:51: note: Possible overload variants: + + # pandas\core\util\hashing.py:51: note: def __call__(self, Union[bool, + # unsignedinteger]) -> unsignedinteger + + # pandas\core\util\hashing.py:51: note: def __call__(self, Union[int, + # signedinteger]) -> Union[signedinteger, float64] + + # pandas\core\util\hashing.py:51: note: <2 more similar overloads not + # shown, out of 4 total overloads> + + # pandas\core\util\hashing.py:51: note: Left operand is of type + # "Union[ndarray, generic]" + out += np.uint64(97531) # type: ignore[call-overload] return out diff --git a/pandas/core/window/ewm.py b/pandas/core/window/ewm.py index a054fd71f4b1f..9f7bdb0640f7b 100644 --- a/pandas/core/window/ewm.py +++ b/pandas/core/window/ewm.py @@ -48,8 +48,34 @@ def get_center_of_mass( elif halflife is not None: if halflife <= 0: raise ValueError("halflife must satisfy: halflife > 0") - decay = 1 - np.exp(np.log(0.5) / halflife) - comass = 1 / decay - 1 + # pandas\core\window\ewm.py:51: error: Unsupported operand types for - + # ("int" and "generic") [operator] + + # pandas\core\window\ewm.py:51: note: Right operand is of type + # "Union[ndarray, generic]" + + # pandas\core\window\ewm.py:51: error: Unsupported operand types for / + # ("generic" and "float") [operator] + + # pandas\core\window\ewm.py:51: note: Left operand is of type + # "Union[ndarray, generic]" + decay = 1 - np.exp(np.log(0.5) / halflife) # type: ignore[operator] + # pandas\core\window\ewm.py:52: error: Unsupported operand types for / + # ("int" and "generic") [operator] + + # pandas\core\window\ewm.py:52: note: Right operand is of type + # "Union[ndarray, generic, int]" + + # pandas\core\window\ewm.py:52: error: Unsupported operand types for - + # ("generic" and "int") [operator] + + # pandas\core\window\ewm.py:52: error: Incompatible types in assignment + # (expression has type "Union[ndarray, generic, float]", variable has + # type "Optional[float]") [assignment] + + # pandas\core\window\ewm.py:52: note: Left operand is of type + # "Union[ndarray, generic, float]" + comass = 1 / decay - 1 # type: ignore[operator, assignment] elif alpha is not None: if alpha <= 0 or alpha > 1: raise ValueError("alpha must satisfy: 0 < alpha <= 1") @@ -57,7 +83,10 @@ def get_center_of_mass( else: raise ValueError("Must pass one of comass, span, halflife, or alpha") - return float(comass) + # pandas\core\window\ewm.py:60: error: Argument 1 to "float" has + # incompatible type "Optional[float]"; expected "Union[SupportsFloat, + # _SupportsIndex, str, bytes, bytearray]" [arg-type] + return float(comass) # type: ignore[arg-type] class ExponentialMovingWindow(BaseWindow): @@ -249,7 +278,10 @@ def __init__( "halflife can only be a timedelta convertible argument if " "times is not None." ) - self.times = None + # pandas\core\window\ewm.py:252: error: Incompatible types in + # assignment (expression has type "None", variable has type + # "ndarray") [assignment] + self.times = None # type: ignore[assignment] self.halflife = None # error: Argument 3 to "get_center_of_mass" has incompatible type # "Union[float, Any, None, timedelta64, int64]"; expected diff --git a/pandas/io/formats/format.py b/pandas/io/formats/format.py index d23b693925aac..0b670785aeeb9 100644 --- a/pandas/io/formats/format.py +++ b/pandas/io/formats/format.py @@ -1648,44 +1648,21 @@ def format_percentiles( with np.errstate(invalid="ignore"): if ( not is_numeric_dtype(percentiles) - # pandas\io\formats\format.py:1649: error: Unsupported operand - # types for >= ("List[Union[int, float]]" and "int") [operator] - # pandas\io\formats\format.py:1649: error: Unsupported operand - # types for >= ("List[float]" and "int") [operator] - # pandas\io\formats\format.py:1649: error: Unsupported operand - # types for >= ("List[Union[str, float]]" and "int") [operator] - or not np.all(percentiles >= 0) # type: ignore[operator] - # pandas\io\formats\format.py:1650: error: Unsupported operand - # types for <= ("List[Union[int, float]]" and "int") [operator] - # pandas\io\formats\format.py:1650: error: Unsupported operand - # types for <= ("List[float]" and "int") [operator] - # pandas\io\formats\format.py:1650: error: Unsupported operand - # types for <= ("List[Union[str, float]]" and "int") [operator] - or not np.all(percentiles <= 1) # type: ignore[operator] + or not np.all(percentiles >= 0) + or not np.all(percentiles <= 1) ): raise ValueError("percentiles should all be in the interval [0,1]") - percentiles = 100 * percentiles - # pandas\io\formats\format.py:1669: error: Item "List[Union[int, float]]" - # of "Union[Any, List[Union[int, float]], List[Union[str, float]]]" has no - # attribute "astype" [union-attr] + # pandas\io\formats\format.py:1668: error: Incompatible types in assignment + # (expression has type "Union[ndarray, generic]", variable has type + # "Union[ndarray, List[Union[int, float]], List[float], List[Union[str, + # float]]]") [assignment] + percentiles = 100 * percentiles # type: ignore[assignment] - # pandas\io\formats\format.py:1669: error: Item "List[Union[str, float]]" - # of "Union[Any, List[Union[int, float]], List[Union[str, float]]]" has no - # attribute "astype" [union-attr] - int_idx = np.isclose( - percentiles.astype(int), percentiles # type: ignore[union-attr] - ) + int_idx = np.isclose(percentiles.astype(int), percentiles) if np.all(int_idx): - # pandas\io\formats\format.py:1672: error: Item "List[Union[int, - # float]]" of "Union[Any, List[Union[int, float]], List[Union[str, - # float]]]" has no attribute "astype" [union-attr] - - # pandas\io\formats\format.py:1672: error: Item "List[Union[str, - # float]]" of "Union[Any, List[Union[int, float]], List[Union[str, - # float]]]" has no attribute "astype" [union-attr] - out = percentiles.astype(int).astype(str) # type: ignore[union-attr] + out = percentiles.astype(int).astype(str) return [i + "%" for i in out] unique_pcts = np.unique(percentiles) @@ -1700,16 +1677,8 @@ def format_percentiles( # ndarray, generic]", variable has type "Union[ndarray, generic]") prec = max(1, prec) # type: ignore[assignment] out = np.empty_like(percentiles, dtype=object) - # error: No overload variant of "__getitem__" of "list" matches argument - # type "Union[bool_, ndarray]" - out[int_idx] = ( - percentiles[int_idx].astype(int).astype(str) # type: ignore[call-overload] - ) - # error: No overload variant of "__getitem__" of "list" matches argument - # type "Union[bool_, ndarray]" - out[~int_idx] = ( - percentiles[~int_idx].round(prec).astype(str) # type: ignore[call-overload] - ) + out[int_idx] = percentiles[int_idx].astype(int).astype(str) + out[~int_idx] = percentiles[~int_idx].round(prec).astype(str) return [i + "%" for i in out] diff --git a/pandas/io/pytables.py b/pandas/io/pytables.py index 4317d67126247..b569c23c28e9c 100644 --- a/pandas/io/pytables.py +++ b/pandas/io/pytables.py @@ -5114,14 +5114,20 @@ def _get_data_and_dtype_name(data: ArrayLike): dtype_name = data.dtype.name.split("[")[0] if data.dtype.kind in ["m", "M"]: - data = np.asarray(data.view("i8")) + # pandas\io\pytables.py:5117: error: Incompatible types in assignment + # (expression has type "ndarray", variable has type "ExtensionArray") + # [assignment] + data = np.asarray(data.view("i8")) # type: ignore[assignment] # TODO: we used to reshape for the dt64tz case, but no longer # doing that doesn't seem to break anything. why? elif isinstance(data, PeriodIndex): data = data.asi8 - data = np.asarray(data) + # pandas\io\pytables.py:5124: error: Incompatible types in assignment + # (expression has type "ndarray", variable has type "ExtensionArray") + # [assignment] + data = np.asarray(data) # type: ignore[assignment] return data, dtype_name diff --git a/pandas/plotting/_matplotlib/misc.py b/pandas/plotting/_matplotlib/misc.py index a1c62f9fce23c..d8c9181cef9e1 100644 --- a/pandas/plotting/_matplotlib/misc.py +++ b/pandas/plotting/_matplotlib/misc.py @@ -442,7 +442,12 @@ def autocorrelation_plot( if ax is None: ax = plt.gca(xlim=(1, n), ylim=(-1.0, 1.0)) mean = np.mean(data) - c0 = np.sum((data - mean) ** 2) / float(n) + # pandas\plotting\_matplotlib\misc.py:445: error: Unsupported operand types + # for ** ("generic" and "int") [operator] + + # pandas\plotting\_matplotlib\misc.py:445: note: Left operand is of type + # "Union[ndarray, generic]" + c0 = np.sum((data - mean) ** 2) / float(n) # type: ignore[operator] def r(h): return ((data[: n - h] - mean) * (data[h:] - mean)).sum() / float(n) / c0 @@ -451,11 +456,35 @@ def r(h): y = [r(loc) for loc in x] z95 = 1.959963984540054 z99 = 2.5758293035489004 - ax.axhline(y=z99 / np.sqrt(n), linestyle="--", color="grey") - ax.axhline(y=z95 / np.sqrt(n), color="grey") + # pandas\plotting\_matplotlib\misc.py:454: error: Unsupported operand types + # for / ("float" and "generic") [operator] + + # pandas\plotting\_matplotlib\misc.py:454: note: Right operand is of type + # "Union[ndarray, generic]" + ax.axhline( + y=z99 / np.sqrt(n), linestyle="--", color="grey" # type: ignore[operator] + ) + # pandas\plotting\_matplotlib\misc.py:455: error: Unsupported operand types + # for / ("float" and "generic") [operator] + + # pandas\plotting\_matplotlib\misc.py:455: note: Right operand is of type + # "Union[ndarray, generic]" + ax.axhline(y=z95 / np.sqrt(n), color="grey") # type: ignore[operator] ax.axhline(y=0.0, color="black") - ax.axhline(y=-z95 / np.sqrt(n), color="grey") - ax.axhline(y=-z99 / np.sqrt(n), linestyle="--", color="grey") + # pandas\plotting\_matplotlib\misc.py:457: error: Unsupported operand types + # for / ("float" and "generic") [operator] + + # pandas\plotting\_matplotlib\misc.py:457: note: Right operand is of type + # "Union[ndarray, generic]" + ax.axhline(y=-z95 / np.sqrt(n), color="grey") # type: ignore[operator] + # pandas\plotting\_matplotlib\misc.py:458: error: Unsupported operand types + # for / ("float" and "generic") [operator] + + # pandas\plotting\_matplotlib\misc.py:458: note: Right operand is of type + # "Union[ndarray, generic]" + ax.axhline( + y=-z99 / np.sqrt(n), linestyle="--", color="grey" # type: ignore[operator] + ) ax.set_xlabel("Lag") ax.set_ylabel("Autocorrelation") ax.plot(x, y, **kwds) diff --git a/pandas/util/_test_decorators.py b/pandas/util/_test_decorators.py index e3b779678c68b..1cdab088d4086 100644 --- a/pandas/util/_test_decorators.py +++ b/pandas/util/_test_decorators.py @@ -190,7 +190,11 @@ def skip_if_np_lt(ver_str: str, *args, reason: Optional[str] = None): if reason is None: reason = f"NumPy {ver_str} or greater required" return pytest.mark.skipif( - np.__version__ < LooseVersion(ver_str), *args, reason=reason + # pandas\util\_test_decorators.py:193: error: Module has no attribute + # "__version__"; maybe "version"? [attr-defined] + np.__version__ < LooseVersion(ver_str), # type: ignore[attr-defined] + *args, + reason=reason, ) From aa3e89a990784b945aa4207eec662ebae73c44a0 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 17 Oct 2020 11:40:40 +0100 Subject: [PATCH 23/86] numpy-1.20.0.dev0+07a54df --- pandas/core/arrays/boolean.py | 5 +- pandas/core/arrays/floating.py | 4 +- pandas/core/arrays/interval.py | 83 +++++++++++-- pandas/core/arrays/masked.py | 10 +- pandas/core/arrays/numpy_.py | 5 +- pandas/core/arrays/sparse/array.py | 12 +- pandas/core/arrays/sparse/dtype.py | 6 +- pandas/core/base.py | 18 ++- pandas/core/construction.py | 20 +++- pandas/core/dtypes/cast.py | 164 ++++++++++++++++++++++---- pandas/core/dtypes/missing.py | 14 ++- pandas/core/indexes/base.py | 31 ++++- pandas/core/indexes/range.py | 4 +- pandas/core/internals/blocks.py | 69 +++++++++-- pandas/core/internals/construction.py | 17 ++- pandas/core/internals/managers.py | 30 ++++- pandas/core/internals/ops.py | 10 +- pandas/core/missing.py | 22 +++- pandas/core/nanops.py | 94 ++++++++++++--- pandas/core/ops/mask_ops.py | 15 ++- pandas/core/ops/missing.py | 5 +- pandas/core/series.py | 23 +++- pandas/core/strings/accessor.py | 6 +- pandas/core/tools/datetimes.py | 7 +- pandas/core/window/expanding.py | 10 +- pandas/core/window/rolling.py | 4 +- pandas/io/formats/format.py | 15 ++- pandas/io/pytables.py | 3 +- pandas/io/sas/sas_xport.py | 6 +- pandas/io/stata.py | 25 ++-- pandas/plotting/_matplotlib/tools.py | 4 +- 31 files changed, 619 insertions(+), 122 deletions(-) diff --git a/pandas/core/arrays/boolean.py b/pandas/core/arrays/boolean.py index 30ca7cdd0d087..84fb4363253d7 100644 --- a/pandas/core/arrays/boolean.py +++ b/pandas/core/arrays/boolean.py @@ -607,7 +607,10 @@ def _logical_method(self, other, op): elif op.__name__ in {"xor", "rxor"}: result, mask = ops.kleene_xor(self._data, other, self._mask, mask) - return BooleanArray(result, mask) + # pandas\core\arrays\boolean.py:610: error: Argument 2 to + # "BooleanArray" has incompatible type "Optional[Any]"; expected + # "ndarray" [arg-type] + return BooleanArray(result, mask) # type: ignore[arg-type] def _cmp_method(self, other, op): from pandas.arrays import FloatingArray, IntegerArray diff --git a/pandas/core/arrays/floating.py b/pandas/core/arrays/floating.py index ae4d6c7f946ea..459c925a82c6c 100644 --- a/pandas/core/arrays/floating.py +++ b/pandas/core/arrays/floating.py @@ -402,7 +402,9 @@ def astype(self, dtype, copy: bool = True) -> ArrayLike: # error: Argument 2 to "to_numpy" of "BaseMaskedArray" has incompatible # type "**Dict[str, float]"; expected "bool" data = self.to_numpy(dtype=dtype, **kwargs) # type: ignore[arg-type] - return astype_nansafe(data, dtype, copy=False) + # pandas\core\arrays\floating.py:405: error: Incompatible return value + # type (got "ExtensionArray", expected "ndarray") [return-value] + return astype_nansafe(data, dtype, copy=False) # type: ignore[return-value] def _values_for_argsort(self) -> np.ndarray: return self._data diff --git a/pandas/core/arrays/interval.py b/pandas/core/arrays/interval.py index 3092bd295d369..8414ede5c1238 100644 --- a/pandas/core/arrays/interval.py +++ b/pandas/core/arrays/interval.py @@ -1363,8 +1363,12 @@ def _maybe_cast_inputs( copy: bool, dtype: Optional[Dtype], ) -> Tuple["Index", "Index"]: - left = ensure_index(left_orig, copy=copy) - right = ensure_index(right_orig, copy=copy) + # pandas\core\arrays\interval.py:1366: error: Value of type variable + # "AnyArrayLike" of "ensure_index" cannot be "object" [type-var] + left = ensure_index(left_orig, copy=copy) # type: ignore[type-var] + # pandas\core\arrays\interval.py:1367: error: Value of type variable + # "AnyArrayLike" of "ensure_index" cannot be "object" [type-var] + right = ensure_index(right_orig, copy=copy) # type: ignore[type-var] if dtype is not None: # GH#19262: dtype must be an IntervalDtype to override inferred @@ -1419,10 +1423,48 @@ def _get_combined_data( # For dt64/td64 we want DatetimeArray/TimedeltaArray instead of ndarray from pandas.core.ops.array_ops import maybe_upcast_datetimelike_array - left = maybe_upcast_datetimelike_array(left) - left = extract_array(left, extract_numpy=True) - right = maybe_upcast_datetimelike_array(right) - right = extract_array(right, extract_numpy=True) + # pandas\core\arrays\interval.py:1422: error: Value of type variable + # "ArrayLike" of "maybe_upcast_datetimelike_array" cannot be "Union[Index, + # ExtensionArray]" [type-var] + + # pandas\core\arrays\interval.py:1422: error: Value of type variable + # "ArrayLike" of "maybe_upcast_datetimelike_array" cannot be "Union[Index, + # ndarray]" [type-var] + left = maybe_upcast_datetimelike_array(left) # type: ignore[type-var] + # pandas\core\arrays\interval.py:1423: error: Value of type variable + # "AnyArrayLike" of "extract_array" cannot be "Union[Index, + # ExtensionArray]" [type-var] + + # pandas\core\arrays\interval.py:1423: error: Value of type variable + # "AnyArrayLike" of "extract_array" cannot be "Union[Index, ndarray]" + # [type-var] + + # pandas\core\arrays\interval.py:1423: error: Incompatible types in + # assignment (expression has type "ExtensionArray", variable has type + # "Union[Index, ndarray]") [assignment] + left = extract_array(left, extract_numpy=True) # type: ignore[type-var,assignment] + # pandas\core\arrays\interval.py:1424: error: Value of type variable + # "ArrayLike" of "maybe_upcast_datetimelike_array" cannot be "Union[Index, + # ExtensionArray]" [type-var] + + # pandas\core\arrays\interval.py:1424: error: Value of type variable + # "ArrayLike" of "maybe_upcast_datetimelike_array" cannot be "Union[Index, + # ndarray]" [type-var] + right = maybe_upcast_datetimelike_array(right) # type: ignore[type-var] + # pandas\core\arrays\interval.py:1425: error: Value of type variable + # "AnyArrayLike" of "extract_array" cannot be "Union[Index, + # ExtensionArray]" [type-var] + + # pandas\core\arrays\interval.py:1425: error: Value of type variable + # "AnyArrayLike" of "extract_array" cannot be "Union[Index, ndarray]" + # [type-var] + + # pandas\core\arrays\interval.py:1425: error: Incompatible types in + # assignment (expression has type "ExtensionArray", variable has type + # "Union[Index, ndarray]") [assignment] + right = extract_array( # type: ignore[type-var,assignment] + right, extract_numpy=True + ) lbase = getattr(left, "_ndarray", left).base rbase = getattr(right, "_ndarray", right).base @@ -1437,10 +1479,31 @@ def _get_combined_data( axis=1, ) else: - left = cast(Union["DatetimeArray", "TimedeltaArray"], left) - right = cast(Union["DatetimeArray", "TimedeltaArray"], right) - combined = type(left)._concat_same_type( - [left.reshape(-1, 1), right.reshape(-1, 1)], + # pandas\core\arrays\interval.py:1440: error: Incompatible types in + # assignment (expression has type "Union[DatetimeArray, + # TimedeltaArray]", variable has type "Union[Index, ndarray]") + # [assignment] + left = cast( # type: ignore[assignment] + Union["DatetimeArray", "TimedeltaArray"], left + ) + # pandas\core\arrays\interval.py:1441: error: Incompatible types in + # assignment (expression has type "Union[DatetimeArray, + # TimedeltaArray]", variable has type "Union[Index, ndarray]") + # [assignment] + right = cast( # type: ignore[assignment] + Union["DatetimeArray", "TimedeltaArray"], right + ) + # pandas\core\arrays\interval.py:1442: error: "Type[Index]" has no + # attribute "_concat_same_type" [attr-defined] + combined = type(left)._concat_same_type( # type: ignore[attr-defined] + # pandas\core\arrays\interval.py:1443: error: "Index" has no + # attribute "reshape"; maybe "shape"? [attr-defined] + # pandas\core\arrays\interval.py:1443: error: Item "Index" of + # "Union[Index, ndarray]" has no attribute "reshape" [union-attr] + [ + left.reshape(-1, 1), # type: ignore[attr-defined] + right.reshape(-1, 1), # type: ignore[union-attr] + ], axis=1, ) return combined diff --git a/pandas/core/arrays/masked.py b/pandas/core/arrays/masked.py index 0863ed1aaec14..7e90cc49dd578 100644 --- a/pandas/core/arrays/masked.py +++ b/pandas/core/arrays/masked.py @@ -138,7 +138,10 @@ def __len__(self) -> int: return len(self._data) def __invert__(self: BaseMaskedArrayT) -> BaseMaskedArrayT: - return type(self)(~self._data, self._mask) + # pandas\core\arrays\masked.py:141: error: Argument 1 to + # "BaseMaskedArray" has incompatible type "Union[ndarray, integer, + # bool_]"; expected "ndarray" [arg-type] + return type(self)(~self._data, self._mask) # type: ignore[arg-type] def to_numpy( self, dtype=None, copy: bool = False, na_value: Scalar = lib.no_default @@ -246,7 +249,10 @@ def _hasna(self) -> bool: # Note: this is expensive right now! The hope is that we can # make this faster by having an optional mask, but not have to change # source code using it.. - return self._mask.any() + + # pandas\core\arrays\masked.py:249: error: Incompatible return value + # type (got "bool_", expected "bool") [return-value] + return self._mask.any() # type: ignore[return-value] # error: Return type "ndarray" of "isna" incompatible with return type # "ArrayLike" in supertype "ExtensionArray" diff --git a/pandas/core/arrays/numpy_.py b/pandas/core/arrays/numpy_.py index 64718166cb8cb..aff686e81a627 100644 --- a/pandas/core/arrays/numpy_.py +++ b/pandas/core/arrays/numpy_.py @@ -373,7 +373,10 @@ def to_numpy( # Ops def __invert__(self): - return type(self)(~self._ndarray) + # pandas\core\arrays\numpy_.py:376: error: Argument 1 to "PandasArray" + # has incompatible type "Union[ndarray, integer, bool_]"; expected + # "Union[ndarray, PandasArray]" [arg-type] + return type(self)(~self._ndarray) # type: ignore[arg-type] def _cmp_method(self, other, op): if isinstance(other, PandasArray): diff --git a/pandas/core/arrays/sparse/array.py b/pandas/core/arrays/sparse/array.py index 8e51c41a9732f..b25d389cf03d8 100644 --- a/pandas/core/arrays/sparse/array.py +++ b/pandas/core/arrays/sparse/array.py @@ -1071,10 +1071,18 @@ def astype(self, dtype=None, copy=True): # TODO copy=False is broken for astype_nansafe with int -> float, so cannot # passthrough copy keyword: https://github.com/pandas-dev/pandas/issues/34456 sp_values = astype_nansafe(self.sp_values, subtype, copy=True) - if sp_values is self.sp_values and copy: + # pandas\core\arrays\sparse\array.py:1074: error: Non-overlapping + # identity check (left operand type: "ExtensionArray", right operand + # t...ype: "ndarray") [comparison-overlap] + if sp_values is self.sp_values and copy: # type: ignore[comparison-overlap] sp_values = sp_values.copy() - return self._simple_new(sp_values, self.sp_index, dtype) + # pandas\core\arrays\sparse\array.py:1077: error: Argument 1 to + # "_simple_new" of "SparseArray" has incompatible type + # "ExtensionArray"; expected "ndarray" [arg-type] + return self._simple_new( + sp_values, self.sp_index, dtype # type: ignore[arg-type] + ) def map(self, mapper): """ diff --git a/pandas/core/arrays/sparse/dtype.py b/pandas/core/arrays/sparse/dtype.py index c0662911d40da..e62bc8125394a 100644 --- a/pandas/core/arrays/sparse/dtype.py +++ b/pandas/core/arrays/sparse/dtype.py @@ -325,7 +325,11 @@ def update_dtype(self, dtype): if is_extension_array_dtype(dtype): raise TypeError("sparse arrays of extension dtypes not supported") - fill_value = astype_nansafe(np.array(self.fill_value), dtype).item() + # pandas\core\arrays\sparse\dtype.py:328: error: "ExtensionArray" + # has no attribute "item" [attr-defined] + fill_value = astype_nansafe( + np.array(self.fill_value), dtype + ).item() # type: ignore[attr-defined] dtype = cls(dtype, fill_value=fill_value) return dtype diff --git a/pandas/core/base.py b/pandas/core/base.py index 6af537dcd149a..c775a2b32bd2e 100644 --- a/pandas/core/base.py +++ b/pandas/core/base.py @@ -676,7 +676,15 @@ def argmax(self, axis=None, skipna: bool = True, *args, **kwargs) -> int: """ nv.validate_minmax_axis(axis) nv.validate_argmax_with_skipna(skipna, args, kwargs) - return nanops.nanargmax(self._values, skipna=skipna) + # pandas\core\base.py:679: error: Incompatible return value type (got + # "Union[int, ndarray]", expected "int") [return-value] + + # pandas\core\base.py:679: error: Argument 1 to "nanargmax" has + # incompatible type "Union[ExtensionArray, ndarray]"; expected + # "ndarray" [arg-type] + return nanops.nanargmax( # type: ignore[return-value] + self._values, skipna=skipna # type: ignore[arg-type] + ) def min(self, axis=None, skipna: bool = True, *args, **kwargs): """ @@ -726,7 +734,13 @@ def min(self, axis=None, skipna: bool = True, *args, **kwargs): def argmin(self, axis=None, skipna=True, *args, **kwargs) -> int: nv.validate_minmax_axis(axis) nv.validate_argmax_with_skipna(skipna, args, kwargs) - return nanops.nanargmin(self._values, skipna=skipna) + # pandas\core\base.py:729: error: Argument 1 to "nanargmin" has + # incompatible type "Union[ExtensionArray, ndarray]"; expected + # "ndarray" [arg-type] + result = nanops.nanargmin(self._values, skipna=skipna) # type: ignore[arg-type] + # pandas\core\base.py:732: error: Incompatible return value type (got + # "Union[int, ndarray]", expected "int") [return-value] + return result # type: ignore[return-value] def tolist(self): """ diff --git a/pandas/core/construction.py b/pandas/core/construction.py index 419ab07608be2..5b532e2648941 100644 --- a/pandas/core/construction.py +++ b/pandas/core/construction.py @@ -482,7 +482,11 @@ def sanitize_array( else: subarr = maybe_convert_platform(data) - subarr = maybe_cast_to_datetime(subarr, dtype) + # pandas\core\construction.py:485: error: Argument 2 to + # "maybe_cast_to_datetime" has incompatible type "Union[dtype, + # ExtensionDtype, None]"; expected "Union[dtype, ExtensionDtype]" + # [arg-type] + subarr = maybe_cast_to_datetime(subarr, dtype) # type: ignore[arg-type] elif isinstance(data, range): # GH#16804 @@ -592,10 +596,20 @@ def _try_cast(arr, dtype: Optional[DtypeObj], copy: bool, raise_cast_failure: bo # that we can convert the data to the requested dtype. if is_integer_dtype(dtype): # this will raise if we have e.g. floats - maybe_cast_to_integer_array(arr, dtype) + + # pandas\core\construction.py:595: error: Argument 2 to + # "maybe_cast_to_integer_array" has incompatible type "Union[dtype, + # ExtensionDtype, None]"; expected "Union[ExtensionDtype, str, + # dtype, Type[str], Type[float], Type[int], Type[complex], + # Type[bool], Type[object]]" [arg-type] + maybe_cast_to_integer_array(arr, dtype) # type: ignore[arg-type] subarr = arr else: - subarr = maybe_cast_to_datetime(arr, dtype) + # pandas\core\construction.py:598: error: Argument 2 to + # "maybe_cast_to_datetime" has incompatible type "Union[dtype, + # ExtensionDtype, None]"; expected "Union[dtype, ExtensionDtype]" + # [arg-type] + subarr = maybe_cast_to_datetime(arr, dtype) # type: ignore[arg-type] # Take care in creating object arrays (but iterators are not # supported): diff --git a/pandas/core/dtypes/cast.py b/pandas/core/dtypes/cast.py index c98d294ed0fc4..00e5c15014628 100644 --- a/pandas/core/dtypes/cast.py +++ b/pandas/core/dtypes/cast.py @@ -169,30 +169,80 @@ def maybe_downcast_to_dtype(result, dtype: Dtype): dtype = np.dtype(dtype) - converted = maybe_downcast_numeric(result, dtype, do_round) + # pandas\core\dtypes\cast.py:172: error: Argument 2 to + # "maybe_downcast_numeric" has incompatible type "Union[ExtensionDtype, + # dtype, Type[object]]"; expected "Union[dtype, ExtensionDtype]" + # [arg-type] + converted = maybe_downcast_numeric( + result, dtype, do_round # type: ignore[arg-type] + ) if converted is not result: return converted # a datetimelike # GH12821, iNaT is casted to float - if dtype.kind in ["M", "m"] and result.dtype.kind in ["i", "f"]: + + # pandas\core\dtypes\cast.py:178: error: Item "type" of + # "Union[ExtensionDtype, dtype, Type[object]]" has no attribute "kind" + # [union-attr] + if dtype.kind in ["M", "m"] and result.dtype.kind in [ # type: ignore[union-attr] + "i", + "f", + ]: if hasattr(dtype, "tz"): # not a numpy dtype - if dtype.tz: + + # pandas\core\dtypes\cast.py:181: error: Item "ExtensionDtype" of + # "Union[ExtensionDtype, dtype, Type[object]]" has no attribute + # "tz" [union-attr] + + # pandas\core\dtypes\cast.py:181: error: Item "dtype" of + # "Union[ExtensionDtype, dtype, Type[object]]" has no attribute + # "tz" [union-attr] + + # pandas\core\dtypes\cast.py:181: error: Item "type" of + # "Union[ExtensionDtype, dtype, Type[object]]" has no attribute + # "tz" [union-attr] + if dtype.tz: # type: ignore # convert to datetime and change timezone from pandas import to_datetime result = to_datetime(result).tz_localize("utc") - result = result.tz_convert(dtype.tz) + # pandas\core\dtypes\cast.py:186: error: Item "ExtensionDtype" + # of "Union[ExtensionDtype, dtype, Type[object]]" has no + # attribute "tz" [union-attr] + + # pandas\core\dtypes\cast.py:186: error: Item "dtype" of + # "Union[ExtensionDtype, dtype, Type[object]]" has no attribute + # "tz" [union-attr] + + # pandas\core\dtypes\cast.py:186: error: Item "type" of + # "Union[ExtensionDtype, dtype, Type[object]]" has no attribute + # "tz" [union-attr] + result = result.tz_convert(dtype.tz) # type: ignore[union-attr] else: result = result.astype(dtype) - elif dtype.type is Period: + # pandas\core\dtypes\cast.py:190: error: Item "type" of + # "Union[ExtensionDtype, dtype, Type[object]]" has no attribute "type" + # [union-attr] + elif dtype.type is Period: # type: ignore[union-attr] # TODO(DatetimeArray): merge with previous elif from pandas.core.arrays import PeriodArray try: - return PeriodArray(result, freq=dtype.freq) + # pandas\core\dtypes\cast.py:195: error: Item "ExtensionDtype" of + # "Union[ExtensionDtype, dtype, Type[object]]" has no attribute + # "freq" [union-attr] + + # pandas\core\dtypes\cast.py:195: error: Item "dtype" of + # "Union[ExtensionDtype, dtype, Type[object]]" has no attribute + # "freq" [union-attr] + + # pandas\core\dtypes\cast.py:195: error: Item "type" of + # "Union[ExtensionDtype, dtype, Type[object]]" has no attribute + # "freq" [union-attr] + return PeriodArray(result, freq=dtype.freq) # type: ignore[union-attr] except TypeError: # e.g. TypeError: int() argument must be a string, a # bytes-like object or a number, not 'Period @@ -498,7 +548,10 @@ def maybe_casted_values( fill_value = na_value_for_dtype(dtype) values = construct_1d_arraylike_from_scalar(fill_value, len(mask), dtype) else: - values = values.take(codes) + # pandas\core\dtypes\cast.py:501: error: Argument 1 to "take" of + # "ExtensionArray" has incompatible type "ndarray"; expected + # "Sequence[int]" [arg-type] + values = values.take(codes) # type: ignore[arg-type] # TODO(https://github.com/pandas-dev/pandas/issues/24206) # Push this into maybe_upcast_putmask? @@ -522,7 +575,14 @@ def maybe_casted_values( if issubclass(values_type, DatetimeLikeArrayMixin): values = values_type(values, dtype=values_dtype) - return values + # pandas\core\dtypes\cast.py:525: error: Incompatible return value type + # (got "Union[ExtensionArray, ndarray]", expected "ExtensionArray") + # [return-value] + + # pandas\core\dtypes\cast.py:525: error: Incompatible return value type + # (got "Union[ExtensionArray, ndarray]", expected "ndarray") + # [return-value] + return values # type: ignore[return-value] def maybe_promote(dtype, fill_value=np.nan): @@ -843,7 +903,10 @@ def infer_dtype_from_array( (dtype('O'), [1, '1']) """ if isinstance(arr, np.ndarray): - return arr.dtype, arr + # pandas\core\dtypes\cast.py:846: error: Incompatible return value type + # (got "Tuple[dtype, ndarray]", expected "Tuple[Union[dtype, + # ExtensionDtype], ExtensionArray]") [return-value] + return arr.dtype, arr # type: ignore[return-value] if not is_list_like(arr): arr = [arr] @@ -852,7 +915,10 @@ def infer_dtype_from_array( return arr.dtype, arr elif isinstance(arr, ABCSeries): - return arr.dtype, np.asarray(arr) + # pandas\core\dtypes\cast.py:855: error: Incompatible return value type + # (got "Tuple[Any, ndarray]", expected "Tuple[Union[dtype, + # ExtensionDtype], ExtensionArray]") [return-value] + return arr.dtype, np.asarray(arr) # type: ignore[return-value] # don't force numpy coerce with nan's inferred = lib.infer_dtype(arr, skipna=False) @@ -900,7 +966,11 @@ def maybe_infer_dtype_type(element): def maybe_upcast( values: ArrayLike, fill_value: Scalar = np.nan, - dtype: Dtype = None, + # pandas\core\dtypes\cast.py:903: error: Incompatible default for argument + # "dtype" (default has type "None", argument has type + # "Union[ExtensionDtype, str, dtype, Type[str], Type[float], Type[int], + # Type[complex], Type[bool], Type[object]]") [assignment] + dtype: Dtype = None, # type: ignore[assignment] copy: bool = False, ) -> Tuple[ArrayLike, Scalar]: """ @@ -1016,7 +1086,12 @@ def astype_nansafe( """ # dispatch on extension dtype if needed if is_extension_array_dtype(dtype): - return dtype.construct_array_type()._from_sequence(arr, dtype=dtype, copy=copy) + # pandas\core\dtypes\cast.py:1019: error: Item "dtype" of "Union[dtype, + # ExtensionDtype]" has no attribute "construct_array_type" + # [union-attr] + return dtype.construct_array_type()._from_sequence( # type: ignore[union-attr] + arr, dtype=dtype, copy=copy + ) if not isinstance(dtype, np.dtype): dtype = pandas_dtype(dtype) @@ -1062,7 +1137,13 @@ def astype_nansafe( raise TypeError(f"cannot astype a timedelta from [{arr.dtype}] to [{dtype}]") - elif np.issubdtype(arr.dtype, np.floating) and np.issubdtype(dtype, np.integer): + # pandas\core\dtypes\cast.py:1065: error: Argument 1 to "issubdtype" has + # incompatible type "Union[dtype, ExtensionDtype]"; expected "Union[dtype, + # None, type, _SupportsDtype, str, Tuple[Any, int], Tuple[Any, Union[int, + # Sequence[int]]], List[Any], _DtypeDict, Tuple[Any, Any]]" [arg-type] + elif np.issubdtype(arr.dtype, np.floating) and np.issubdtype( + dtype, np.integer # type: ignore[arg-type] + ): if not np.isfinite(arr).all(): raise ValueError("Cannot convert non-finite values (NA or inf) to integer") @@ -1079,11 +1160,19 @@ def astype_nansafe( elif is_datetime64_dtype(dtype): from pandas import to_datetime - return astype_nansafe(to_datetime(arr).values, dtype, copy=copy) + # pandas\core\dtypes\cast.py:1082: error: Incompatible return value + # type (got "ExtensionArray", expected "ndarray") [return-value] + return astype_nansafe( # type: ignore[return-value] + to_datetime(arr).values, dtype, copy=copy + ) elif is_timedelta64_dtype(dtype): from pandas import to_timedelta - return astype_nansafe(to_timedelta(arr)._values, dtype, copy=copy) + # pandas\core\dtypes\cast.py:1086: error: Incompatible return value + # type (got "ExtensionArray", expected "ndarray") [return-value] + return astype_nansafe( # type: ignore[return-value] + to_timedelta(arr)._values, dtype, copy=copy + ) if dtype.name in ("datetime64", "timedelta64"): msg = ( @@ -1447,7 +1536,10 @@ def maybe_cast_to_datetime(value, dtype: DtypeObj, errors: str = "raise"): # pandas supports dtype whose granularity is less than [ns] # e.g., [ps], [fs], [as] - if dtype <= np.dtype("M8[ns]"): + + # pandas\core\dtypes\cast.py:1450: error: Unsupported operand + # types for >= ("dtype" and "ExtensionDtype") [operator] + if dtype <= np.dtype("M8[ns]"): # type: ignore[operator] if dtype.name == "datetime64": raise ValueError(msg) dtype = DT64NS_DTYPE @@ -1465,7 +1557,10 @@ def maybe_cast_to_datetime(value, dtype: DtypeObj, errors: str = "raise"): # pandas supports dtype whose granularity is less than [ns] # e.g., [ps], [fs], [as] - if dtype <= np.dtype("m8[ns]"): + + # pandas\core\dtypes\cast.py:1468: error: Unsupported operand + # types for >= ("dtype" and "ExtensionDtype") [operator] + if dtype <= np.dtype("m8[ns]"): # type: ignore[operator] if dtype.name == "timedelta64": raise ValueError(msg) dtype = TD64NS_DTYPE @@ -1501,11 +1596,33 @@ def maybe_cast_to_datetime(value, dtype: DtypeObj, errors: str = "raise"): value = to_datetime(value, errors=errors).array if is_dt_string: # Strings here are naive, so directly localize - value = value.tz_localize(dtype.tz) + + # pandas\core\dtypes\cast.py:1504: error: Item + # "dtype" of "Union[dtype, ExtensionDtype]" has + # no attribute "tz" [union-attr] + + # pandas\core\dtypes\cast.py:1504: error: Item + # "ExtensionDtype" of "Union[dtype, + # ExtensionDtype]" has no attribute "tz" + # [union-attr] + value = value.tz_localize( + dtype.tz # type: ignore[union-attr] + ) else: # Numeric values are UTC at this point, # so localize and convert - value = value.tz_localize("UTC").tz_convert(dtype.tz) + + # pandas\core\dtypes\cast.py:1508: error: Item + # "dtype" of "Union[dtype, ExtensionDtype]" has + # no attribute "tz" [union-attr] + + # pandas\core\dtypes\cast.py:1508: error: Item + # "ExtensionDtype" of "Union[dtype, + # ExtensionDtype]" has no attribute "tz" + # [union-attr] + value = value.tz_localize("UTC").tz_convert( + dtype.tz # type: ignore[union-attr] + ) elif is_timedelta64: value = to_timedelta(value, errors=errors)._values except OutOfBoundsDatetime: @@ -1808,7 +1925,14 @@ def maybe_cast_to_integer_array(arr, dtype: Dtype, copy: bool = False): try: if not hasattr(arr, "astype"): - casted = np.array(arr, dtype=dtype, copy=copy) + # pandas\core\dtypes\cast.py:1811: error: Argument "dtype" to + # "array" has incompatible type "Union[ExtensionDtype, str, dtype, + # Type[str], Type[float], Type[int], Type[complex], Type[bool], + # Type[object]]"; expected "Union[dtype, None, type, + # _SupportsDtype, str, Tuple[Any, int], Tuple[Any, Union[int, + # Sequence[int]]], List[Any], _DtypeDict, Tuple[Any, Any]]" + # [arg-type] + casted = np.array(arr, dtype=dtype, copy=copy) # type: ignore[arg-type] else: casted = arr.astype(dtype, copy=copy) except OverflowError as err: diff --git a/pandas/core/dtypes/missing.py b/pandas/core/dtypes/missing.py index b4782227deb82..204a3034e87ce 100644 --- a/pandas/core/dtypes/missing.py +++ b/pandas/core/dtypes/missing.py @@ -192,7 +192,9 @@ def _use_inf_as_na(key): inf_as_na = get_option(key) globals()["_isna"] = partial(_isna, inf_as_na=inf_as_na) if inf_as_na: - globals()["nan_checker"] = lambda x: ~np.isfinite(x) + # pandas\core\dtypes\missing.py:195: error: Unsupported operand type + # for ~ ("Union[ndarray, generic]") [operator] + globals()["nan_checker"] = lambda x: ~np.isfinite(x) # type: ignore[operator] globals()["INF_AS_NA"] = True else: globals()["nan_checker"] = np.isnan @@ -230,7 +232,9 @@ def _isna_ndarraylike(obj, inf_as_na: bool = False): result = values.view("i8") == iNaT else: if inf_as_na: - result = ~np.isfinite(values) + # pandas\core\dtypes\missing.py:233: error: Unsupported operand + # type for ~ ("Union[ndarray, generic]") [operator] + result = ~np.isfinite(values) # type: ignore[operator] else: result = np.isnan(values) @@ -455,7 +459,11 @@ def array_equivalent( def _array_equivalent_float(left, right): - return ((left == right) | (np.isnan(left) & np.isnan(right))).all() + # pandas\core\dtypes\missing.py:458: error: Unsupported left operand type + # for & ("generic") [operator] + return ( + (left == right) | (np.isnan(left) & np.isnan(right)) # type: ignore[operator] + ).all() def _array_equivalent_datetimelike(left, right): diff --git a/pandas/core/indexes/base.py b/pandas/core/indexes/base.py index ff074bf2df1a7..7e5ab972a795c 100644 --- a/pandas/core/indexes/base.py +++ b/pandas/core/indexes/base.py @@ -3561,11 +3561,17 @@ def join(self, other, how="left", level=None, return_indexers=False, sort=False) if return_indexers: if join_index is self: - lindexer = None + # pandas\core\indexes\base.py:3564: error: Incompatible types + # in assignment (expression has type "None", variable has type + # "ndarray") [assignment] + lindexer = None # type: ignore[assignment] else: lindexer = self.get_indexer(join_index) if join_index is other: - rindexer = None + # pandas\core\indexes\base.py:3568: error: Incompatible types + # in assignment (expression has type "None", variable has type + # "ndarray") [assignment] + rindexer = None # type: ignore[assignment] else: rindexer = other.get_indexer(join_index) return join_index, lindexer, rindexer @@ -5390,7 +5396,12 @@ def _cmp_method(self, other, op): else: with np.errstate(all="ignore"): - result = ops.comparison_op(self._values, np.asarray(other), op) + # pandas\core\indexes\base.py:5393: error: Value of type + # variable "ArrayLike" of "comparison_op" cannot be + # "Union[ExtensionArray, ndarray]" [type-var] + result = ops.comparison_op( + self._values, np.asarray(other), op # type: ignore[type-var] + ) return result @@ -5800,7 +5811,12 @@ def _maybe_cast_data_without_dtype(subarr): if inferred == "integer": try: - data = _try_convert_to_int_array(subarr, False, None) + # pandas\core\indexes\base.py:5803: error: Argument 3 to + # "_try_convert_to_int_array" has incompatible type "None"; + # expected "dtype" [arg-type] + data = _try_convert_to_int_array( + subarr, False, None # type: ignore[arg-type] + ) return data, data.dtype except ValueError: pass @@ -5833,7 +5849,12 @@ def _maybe_cast_data_without_dtype(subarr): pass elif inferred.startswith("timedelta"): - data = TimedeltaArray._from_sequence(subarr, copy=False) + # pandas\core\indexes\base.py:5836: error: Incompatible types in + # assignment (expression has type "TimedeltaArray", variable has + # type "ndarray") [assignment] + data = TimedeltaArray._from_sequence( # type: ignore[assignment] + subarr, copy=False + ) return data, data.dtype elif inferred == "period": try: diff --git a/pandas/core/indexes/range.py b/pandas/core/indexes/range.py index 74ec892d5f8a0..f80ea26fbebe1 100644 --- a/pandas/core/indexes/range.py +++ b/pandas/core/indexes/range.py @@ -864,7 +864,9 @@ def _arith_method(self, other, op): # apply if we have an override if step: with np.errstate(all="ignore"): - rstep = step(left.step, right) + # pandas\core\indexes\range.py:867: error: "bool" not + # callable [operator] + rstep = step(left.step, right) # type: ignore[operator] # we don't have a representable op # so return a base index diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index 495a9c8065943..024808d74f6c3 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -229,7 +229,10 @@ def array_values(self) -> ExtensionArray: """ The array that Series.array returns. Always an ExtensionArray. """ - return PandasArray(self.values) + # pandas\core\internals\blocks.py:232: error: Argument 1 to + # "PandasArray" has incompatible type "Union[ndarray, ExtensionArray]"; + # expected "Union[ndarray, PandasArray]" [arg-type] + return PandasArray(self.values) # type: ignore[arg-type] def get_values(self, dtype=None): """ @@ -841,12 +844,21 @@ def comp(s: Scalar, mask: np.ndarray, regex: bool = False) -> np.ndarray: an element-wise regular expression matching """ if isna(s): - return ~mask + # pandas\core\internals\blocks.py:844: error: Incompatible + # return value type (got "Union[ndarray, integer, bool_]", + # expected "ndarray") [return-value] + return ~mask # type: ignore[return-value] s = com.maybe_box_datetimelike(s) # error: Incompatible return value type (got "Union[ndarray, # bool]", expected "ndarray") - tmp = compare_or_regex_search(self.values, s, regex, mask) + + # pandas\core\internals\blocks.py:849: error: Value of type + # variable "ArrayLike" of "compare_or_regex_search" cannot be + # "Union[ndarray, ExtensionArray]" [type-var] + tmp = compare_or_regex_search( + self.values, s, regex, mask # type: ignore[type-var] + ) return tmp # type: ignore[return-value] # Calculate the mask once, prior to the call of comp @@ -912,7 +924,12 @@ def setitem(self, indexer, value): # We only get here for non-Extension Blocks, so _try_coerce_args # is only relevant for DatetimeBlock and TimedeltaBlock if lib.is_scalar(value): - value = convert_scalar_for_putitemlike(value, values.dtype) + # pandas\core\internals\blocks.py:915: error: Argument 2 to + # "convert_scalar_for_putitemlike" has incompatible type + # "Union[dtype, ExtensionDtype]"; expected "dtype" [arg-type] + value = convert_scalar_for_putitemlike( + value, values.dtype # type: ignore[arg-type] + ) else: # current dtype cannot store value, coerce to common dtype @@ -1028,7 +1045,12 @@ def putmask( # We only get here for non-Extension Blocks, so _try_coerce_args # is only relevant for DatetimeBlock and TimedeltaBlock if lib.is_scalar(new): - new = convert_scalar_for_putitemlike(new, self.values.dtype) + # pandas\core\internals\blocks.py:1031: error: Argument 2 to + # "convert_scalar_for_putitemlike" has incompatible type + # "Union[dtype, ExtensionDtype]"; expected "dtype" [arg-type] + new = convert_scalar_for_putitemlike( + new, self.values.dtype # type: ignore[arg-type] + ) if transpose: new_values = new_values.T @@ -1373,9 +1395,20 @@ def shift(self, periods: int, axis: int = 0, fill_value=None): """ shift the block by periods, possibly upcast """ # convert integer to float if necessary. need to do a lot more than # that, handle boolean etc also - new_values, fill_value = maybe_upcast(self.values, fill_value) - new_values = shift(new_values, periods, axis, fill_value) + # pandas\core\internals\blocks.py:1376: error: Value of type variable + # "ArrayLike" of "maybe_upcast" cannot be "Union[ndarray, + # ExtensionArray]" [type-var] + new_values, fill_value = maybe_upcast( + self.values, fill_value # type: ignore[type-var] + ) + + # pandas\core\internals\blocks.py:1378: error: Argument 1 to "shift" + # has incompatible type "Union[ndarray, ExtensionArray]"; expected + # "ndarray" [arg-type] + new_values = shift( + new_values, periods, axis, fill_value # type: ignore[arg-type] + ) return [self.make_block(new_values)] @@ -1475,7 +1508,17 @@ def where_func(cond, values, other): for m in [mask, ~mask]: if m.any(): result = cast(np.ndarray, result) # EABlock overrides where - taken = result.take(m.nonzero()[0], axis=axis) + + # pandas\core\internals\blocks.py:1478: error: Item "integer" + # of "Union[ndarray, integer, bool_]" has no attribute + # "nonzero" [union-attr] + + # pandas\core\internals\blocks.py:1478: error: Item "bool_" of + # "Union[ndarray, integer, bool_]" has no attribute "nonzero" + # [union-attr] + taken = result.take( + m.nonzero()[0], axis=axis # type: ignore[union-attr] + ) r = maybe_downcast_numeric(taken, self.dtype) nb = self.make_block(r.T, placement=self.mgr_locs[m]) result_blocks.append(nb) @@ -1800,7 +1843,10 @@ def to_native_types(self, na_rep="nan", quoting=None, **kwargs): values = self.values mask = isna(values) - values = np.asarray(values.astype(object)) + # pandas\core\internals\blocks.py:1803: error: Incompatible types in + # assignment (expression has type "ndarray", variable has type + # "ExtensionArray") [assignment] + values = np.asarray(values.astype(object)) # type: ignore[assignment] values[mask] = na_rep # TODO(EA2D): reshape not needed with 2D EAs @@ -2627,7 +2673,10 @@ def re_replacer(s): f = np.vectorize(re_replacer, otypes=[self.dtype]) if mask is None: - new_values[:] = f(new_values) + # pandas\core\internals\blocks.py:2630: error: Invalid index type + # "slice" for "ExtensionArray"; expected type "Union[int, ndarray]" + # [index] + new_values[:] = f(new_values) # type: ignore[index] else: new_values[mask] = f(new_values[mask]) diff --git a/pandas/core/internals/construction.py b/pandas/core/internals/construction.py index f28e9695a4263..519664b81f5f8 100644 --- a/pandas/core/internals/construction.py +++ b/pandas/core/internals/construction.py @@ -609,14 +609,17 @@ def _list_of_series_to_arrays( values = np.vstack(aligned_values) if values.dtype == np.object_: - # error: "ExtensionArray" has no attribute "T" - content = list(values.T) # type: ignore[attr-defined] + content = list(values.T) columns = _validate_or_indexify_columns(content, columns) content = _convert_object_array(content, dtype=dtype, coerce_float=coerce_float) return content, columns else: - # error: "ExtensionArray" has no attribute "T" - return values.T, columns # type: ignore[attr-defined] + # pandas\core\internals\construction.py:619: error: Incompatible return + # value type (got "Tuple[ExtensionArray, Union[Index, List[Any]]]", + # expected "Tuple[List[Union[Union[str, int, float, bool], Union[Any, + # Any, Any, Any]]], Union[Index, List[Union[str, int]]]]") + # [return-value] + return values.T, columns # type: ignore[return-value] def _list_of_dict_to_arrays( @@ -739,7 +742,11 @@ def _convert_object_array( def convert(arr): if dtype != np.dtype("O"): arr = lib.maybe_convert_objects(arr, try_float=coerce_float) - arr = maybe_cast_to_datetime(arr, dtype) + # pandas\core\internals\construction.py:742: error: Argument 2 to + # "maybe_cast_to_datetime" has incompatible type "Union[dtype, + # ExtensionDtype, None]"; expected "Union[dtype, ExtensionDtype]" + # [arg-type] + arr = maybe_cast_to_datetime(arr, dtype) # type: ignore[arg-type] return arr arrays = [convert(arr) for arr in content] diff --git a/pandas/core/internals/managers.py b/pandas/core/internals/managers.py index 737dc3d512ce2..68c0461ea2640 100644 --- a/pandas/core/internals/managers.py +++ b/pandas/core/internals/managers.py @@ -841,9 +841,13 @@ def as_array( blk = self.blocks[0] if blk.is_extension: # Avoid implicit conversion of extension blocks to object - arr = blk.values.to_numpy(dtype=dtype, na_value=na_value).reshape( - blk.shape - ) + + # pandas\core\internals\managers.py:844: error: Item "ndarray" + # of "Union[ndarray, ExtensionArray]" has no attribute + # "to_numpy" [union-attr] + arr = blk.values.to_numpy( # type: ignore[union-attr] + dtype=dtype, na_value=na_value + ).reshape(blk.shape) else: arr = np.asarray(blk.get_values()) if dtype: @@ -886,7 +890,13 @@ def _interleave(self, dtype=None, na_value=lib.no_default) -> np.ndarray: rl = blk.mgr_locs if blk.is_extension: # Avoid implicit conversion of extension blocks to object - arr = blk.values.to_numpy(dtype=dtype, na_value=na_value) + + # pandas\core\internals\managers.py:889: error: Item "ndarray" + # of "Union[ndarray, ExtensionArray]" has no attribute + # "to_numpy" [union-attr] + arr = blk.values.to_numpy( # type: ignore[union-attr] + dtype=dtype, na_value=na_value + ) else: arr = blk.get_values(dtype) result[rl.indexer] = arr @@ -1425,7 +1435,12 @@ def _make_na_block(self, placement, fill_value=None): block_shape[0] = len(placement) dtype, fill_value = infer_dtype_from_scalar(fill_value) - block_values = np.empty(block_shape, dtype=dtype) + # pandas\core\internals\managers.py:1428: error: Argument "dtype" to + # "empty" has incompatible type "Union[dtype, ExtensionDtype]"; + # expected "Union[dtype, None, type, _SupportsDtype, str, Tuple[Any, + # int], Tuple[Any, Union[int, Sequence[int]]], List[Any], _DtypeDict, + # Tuple[Any, Any]]" [arg-type] + block_values = np.empty(block_shape, dtype=dtype) # type: ignore[arg-type] block_values.fill(fill_value) return make_block(block_values, placement=placement) @@ -1469,7 +1484,10 @@ def equals(self, other: object) -> bool: return False left = self.blocks[0].values right = other.blocks[0].values - return array_equals(left, right) + # pandas\core\internals\managers.py:1472: error: Value of type + # variable "ArrayLike" of "array_equals" cannot be "Union[ndarray, + # ExtensionArray]" [type-var] + return array_equals(left, right) # type: ignore[type-var] return blockwise_all(self, other, array_equals) diff --git a/pandas/core/internals/ops.py b/pandas/core/internals/ops.py index d7ea5d613d96a..4db1f9e0e5ad4 100644 --- a/pandas/core/internals/ops.py +++ b/pandas/core/internals/ops.py @@ -117,7 +117,15 @@ def _get_same_shape_values( assert rvals.shape[0] == 1, rvals.shape rvals = rvals[0, :] - return lvals, rvals + # pandas\core\internals\ops.py:120: error: Incompatible return value type + # (got "Tuple[Union[ndarray, ExtensionArray], Union[ndarray, + # ExtensionArray]]", expected "Tuple[ExtensionArray, ExtensionArray]") + # [return-value] + + # pandas\core\internals\ops.py:120: error: Incompatible return value type + # (got "Tuple[Union[ndarray, ExtensionArray], Union[ndarray, + # ExtensionArray]]", expected "Tuple[ndarray, ndarray]") [return-value] + return lvals, rvals # type: ignore[return-value] def blockwise_all(left: "BlockManager", right: "BlockManager", op) -> bool: diff --git a/pandas/core/missing.py b/pandas/core/missing.py index 52536583b9b0d..a9b6c6b7f1a1c 100644 --- a/pandas/core/missing.py +++ b/pandas/core/missing.py @@ -29,7 +29,12 @@ def mask_missing(arr, values_to_mask): dtype, values_to_mask = infer_dtype_from_array(values_to_mask) try: - values_to_mask = np.array(values_to_mask, dtype=dtype) + # pandas\core\missing.py:32: error: Argument "dtype" to "array" has + # incompatible type "Union[dtype, ExtensionDtype]"; expected + # "Union[dtype, None, type, _SupportsDtype, str, Tuple[Any, int], + # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DtypeDict, + # Tuple[Any, Any]]" [arg-type] + values_to_mask = np.array(values_to_mask, dtype=dtype) # type: ignore[arg-type] except Exception: values_to_mask = np.array(values_to_mask, dtype=object) @@ -49,7 +54,10 @@ def mask_missing(arr, values_to_mask): # if x is a string and arr is not, then we get False and we must # expand the mask to size arr.shape if is_scalar(mask): - mask = np.zeros(arr.shape, dtype=bool) + # pandas\core\missing.py:52: error: Incompatible types in + # assignment (expression has type "ndarray", variable has type + # "Optional[bool]") [assignment] + mask = np.zeros(arr.shape, dtype=bool) # type: ignore[assignment] else: if is_numeric_v_string_like(arr, x): # GH#29553 prevent numpy deprecation warnings @@ -65,7 +73,10 @@ def mask_missing(arr, values_to_mask): # GH 21977 if mask is None: - mask = np.zeros(arr.shape, dtype=bool) + # pandas\core\missing.py:68: error: Incompatible types in assignment + # (expression has type "ndarray", variable has type "Optional[bool]") + # [assignment] + mask = np.zeros(arr.shape, dtype=bool) # type: ignore[assignment] return mask @@ -718,7 +729,10 @@ def inner(invalid, limit): return f_idx else: b_idx_inv = list(inner(invalid[::-1], bw_limit)) - b_idx = set(N - 1 - np.asarray(b_idx_inv)) + # pandas\core\missing.py:721: error: Argument 1 to "set" has + # incompatible type "Union[ndarray, generic]"; expected + # "Iterable[Any]" [arg-type] + b_idx = set(N - 1 - np.asarray(b_idx_inv)) # type: ignore[arg-type] if fw_limit == 0: return b_idx diff --git a/pandas/core/nanops.py b/pandas/core/nanops.py index 0147e508e53b6..d698542f84188 100644 --- a/pandas/core/nanops.py +++ b/pandas/core/nanops.py @@ -159,7 +159,9 @@ def _has_infs(result) -> bool: elif result.dtype == "f4": return lib.has_infs_f4(result.ravel("K")) try: - return np.isinf(result).any() + # pandas\core\nanops.py:162: error: Incompatible return value type (got + # "bool_", expected "bool") [return-value] + return np.isinf(result).any() # type: ignore[return-value] except (TypeError, NotImplementedError): # if it doesn't support infs, then it can't have infs return False @@ -440,7 +442,9 @@ def nanany( False """ values, _, _, _, _ = _get_values(values, skipna, fill_value=False, mask=mask) - return values.any(axis) + # pandas\core\nanops.py:443: error: Incompatible return value type (got + # "Union[bool_, ndarray]", expected "bool") [return-value] + return values.any(axis) # type: ignore[return-value] def nanall( @@ -477,7 +481,9 @@ def nanall( False """ values, _, _, _, _ = _get_values(values, skipna, fill_value=True, mask=mask) - return values.all(axis) + # pandas\core\nanops.py:480: error: Incompatible return value type (got + # "Union[bool_, ndarray]", expected "bool") [return-value] + return values.all(axis) # type: ignore[return-value] @disallow("M8") @@ -522,7 +528,16 @@ def nansum( # "Type[float64]", variable has type "dtype") dtype_sum = np.float64 # type: ignore[assignment] the_sum = values.sum(axis, dtype=dtype_sum) - the_sum = _maybe_null_out(the_sum, axis, mask, values.shape, min_count=min_count) + # pandas\core\nanops.py:525: error: Incompatible types in assignment + # (expression has type "float", variable has type "Union[number, ndarray]") + # [assignment] + + # pandas\core\nanops.py:525: error: Argument 1 to "_maybe_null_out" has + # incompatible type "Union[number, ndarray]"; expected "ndarray" + # [arg-type] + the_sum = _maybe_null_out( # type: ignore[assignment] + the_sum, axis, mask, values.shape, min_count=min_count # type: ignore[arg-type] + ) return _wrap_results(the_sum, dtype) @@ -968,7 +983,9 @@ def nanargmax( array([2, 2, 1, 1], dtype=int64) """ values, mask, _, _, _ = _get_values(values, True, fill_value_typ="-inf", mask=mask) - result = values.argmax(axis) + # pandas\core\nanops.py:971: error: Need type annotation for 'result' + # [var-annotated] + result = values.argmax(axis) # type: ignore[var-annotated] result = _maybe_arg_null_out(result, axis, mask, skipna) return result @@ -1012,7 +1029,9 @@ def nanargmin( array([0, 0, 1, 1], dtype=int64) """ values, mask, _, _, _ = _get_values(values, True, fill_value_typ="+inf", mask=mask) - result = values.argmin(axis) + # pandas\core\nanops.py:1015: error: Need type annotation for 'result' + # [var-annotated] + result = values.argmin(axis) # type: ignore[var-annotated] result = _maybe_arg_null_out(result, axis, mask, skipna) return result @@ -1073,8 +1092,12 @@ def nanskew( adjusted = values - mean if skipna and mask is not None: np.putmask(adjusted, mask, 0) - adjusted2 = adjusted ** 2 - adjusted3 = adjusted2 * adjusted + # pandas\core\nanops.py:1076: error: Unsupported operand types for ** + # ("generic" and "int") [operator] + adjusted2 = adjusted ** 2 # type: ignore[operator] + # pandas\core\nanops.py:1077: error: Unsupported left operand type for * + # ("generic") [operator] + adjusted3 = adjusted2 * adjusted # type: ignore[operator] m2 = adjusted2.sum(axis, dtype=np.float64) m3 = adjusted3.sum(axis, dtype=np.float64) @@ -1184,8 +1207,12 @@ def nankurt( adjusted = values - mean if skipna and mask is not None: np.putmask(adjusted, mask, 0) - adjusted2 = adjusted ** 2 - adjusted4 = adjusted2 ** 2 + # pandas\core\nanops.py:1187: error: Unsupported operand types for ** + # ("generic" and "int") [operator] + adjusted2 = adjusted ** 2 # type: ignore[operator] + # pandas\core\nanops.py:1188: error: Unsupported operand types for ** + # ("generic" and "int") [operator] + adjusted4 = adjusted2 ** 2 # type: ignore[operator] m2 = adjusted2.sum(axis, dtype=np.float64) m4 = adjusted4.sum(axis, dtype=np.float64) @@ -1239,7 +1266,13 @@ def nankurt( # pandas\core\nanops.py:1155: error: Unsupported left operand type for # * ("generic") [operator] - numer = count * (count + 1) * (count - 1) * m4 # type: ignore[operator] + + # pandas\core\nanops.py:1242: error: Argument 1 to "__call__" of + # "_NumberOp" has incompatible type "generic"; expected "Union[int, + # float, complex, number, bool_]" [arg-type] + numer = ( + count * (count + 1) * (count - 1) * m4 # type: ignore[operator,arg-type] + ) # pandas\core\nanops.py:1156: error: Unsupported operand types for * # ("float" and "generic") [operator] @@ -1318,7 +1351,12 @@ def nanprod( values = values.copy() values[mask] = 1 result = values.prod(axis) - return _maybe_null_out(result, axis, mask, values.shape, min_count=min_count) + # pandas\core\nanops.py:1321: error: Argument 1 to "_maybe_null_out" has + # incompatible type "Union[number, ndarray]"; expected "ndarray" + # [arg-type] + return _maybe_null_out( + result, axis, mask, values.shape, min_count=min_count # type: ignore[arg-type] + ) def _maybe_arg_null_out( @@ -1386,14 +1424,26 @@ def _get_counts( if mask is not None: count = mask.shape[axis] - mask.sum(axis) else: - count = values_shape[axis] + # pandas\core\nanops.py:1389: error: Incompatible types in assignment + # (expression has type "int", variable has type "Union[ndarray, + # generic]") [assignment] + count = values_shape[axis] # type: ignore[assignment] if is_scalar(count): # error: Incompatible return value type (got "Union[Any, generic]", # expected "Union[int, float, ndarray]") return dtype.type(count) # type: ignore[return-value] try: - return count.astype(dtype) + # pandas\core\nanops.py:1396: error: Incompatible return value type + # (got "Union[ndarray, generic]", expected "Union[int, float, + # ndarray]") [return-value] + + # pandas\core\nanops.py:1396: error: Argument 1 to "astype" of + # "_ArrayOrScalarCommon" has incompatible type "Union[ExtensionDtype, + # dtype]"; expected "Union[dtype, None, type, _SupportsDtype, str, + # Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], List[Any], + # _DtypeDict, Tuple[Any, Any]]" [arg-type] + return count.astype(dtype) # type: ignore[return-value,arg-type] except AttributeError: # error: Argument "dtype" to "array" has incompatible type # "Union[ExtensionDtype, dtype]"; expected "Union[dtype, None, type, @@ -1416,7 +1466,11 @@ def _maybe_null_out( The product of all elements on a given axis. ( NaNs are treated as 1) """ if mask is not None and axis is not None and getattr(result, "ndim", False): - null_mask = (mask.shape[axis] - mask.sum(axis) - min_count) < 0 + # pandas\core\nanops.py:1419: error: Unsupported operand types for - + # ("generic" and "int") [operator] + null_mask = ( + mask.shape[axis] - mask.sum(axis) - min_count # type: ignore[operator] + ) < 0 if np.any(null_mask): if is_numeric_dtype(result): if np.iscomplexobj(result): @@ -1753,7 +1807,15 @@ def na_accum_func(values: ArrayLike, accum_func, skipna: bool) -> ArrayLike: result[mask] = iNaT elif accum_func == np.minimum.accumulate: # Restore NaTs that we masked previously - nz = (~np.asarray(mask)).nonzero()[0] + + # pandas\core\nanops.py:1756: error: Item "integer" of + # "Union[ndarray, integer, bool_]" has no attribute "nonzero" + # [union-attr] + + # pandas\core\nanops.py:1756: error: Item "bool_" of + # "Union[ndarray, integer, bool_]" has no attribute "nonzero" + # [union-attr] + nz = (~np.asarray(mask)).nonzero()[0] # type: ignore[union-attr] if len(nz): # everything up to the first non-na entry stays NaT result[: nz[0]] = iNaT diff --git a/pandas/core/ops/mask_ops.py b/pandas/core/ops/mask_ops.py index 092c7a1260cdc..ff266130ca7b5 100644 --- a/pandas/core/ops/mask_ops.py +++ b/pandas/core/ops/mask_ops.py @@ -46,7 +46,10 @@ def kleene_or( if right is libmissing.NA: result = left.copy() else: - result = left | right + # pandas\core\ops\mask_ops.py:49: error: Incompatible types in + # assignment (expression has type "Union[ndarray, integer, bool_]", + # variable has type "ndarray") [assignment] + result = left | right # type: ignore[assignment] if right_mask is not None: # output is unknown where (False & NA), (NA & False), (NA & NA) @@ -113,7 +116,10 @@ def kleene_xor( else: mask = left_mask.copy() else: - mask = left_mask | right_mask + # pandas\core\ops\mask_ops.py:116: error: Incompatible types in + # assignment (expression has type "Union[ndarray, integer, bool_]", + # variable has type "ndarray") [assignment] + mask = left_mask | right_mask # type: ignore[assignment] return result, mask @@ -154,7 +160,10 @@ def kleene_and( if right is libmissing.NA: result = np.zeros_like(left) else: - result = left & right + # pandas\core\ops\mask_ops.py:157: error: Incompatible types in + # assignment (expression has type "Union[ndarray, integer, bool_]", + # variable has type "ndarray") [assignment] + result = left & right # type: ignore[assignment] if right_mask is None: # Scalar `right` diff --git a/pandas/core/ops/missing.py b/pandas/core/ops/missing.py index c33cb32dcec19..b1907bf106164 100644 --- a/pandas/core/ops/missing.py +++ b/pandas/core/ops/missing.py @@ -58,7 +58,10 @@ def fill_zeros(result, x, y): # GH#7325, mask and nans must be broadcastable (also: GH#9308) # Raveling and then reshaping makes np.putmask faster - mask = ((y == 0) & ~np.isnan(result)).ravel() + + # pandas\core\ops\missing.py:61: error: Unsupported operand type + # for ~ ("Union[ndarray, generic]") [operator] + mask = ((y == 0) & ~np.isnan(result)).ravel() # type: ignore[operator] shape = result.shape result = result.astype("float64", copy=False).ravel() diff --git a/pandas/core/series.py b/pandas/core/series.py index 9e1188b3362cd..dbb60190591f1 100644 --- a/pandas/core/series.py +++ b/pandas/core/series.py @@ -264,7 +264,9 @@ def __init__( copy = False elif isinstance(data, np.ndarray): - if len(data.dtype): + # pandas\core\series.py:267: error: Argument 1 to "len" has + # incompatible type "dtype"; expected "Sized" [arg-type] + if len(data.dtype): # type: ignore[arg-type] # GH#13296 we are dealing with a compound dtype, which # should be treated as 2D raise ValueError( @@ -372,7 +374,11 @@ def _init_dict(self, data, index=None, dtype=None): # Input is now list-like, so rely on "standard" construction: # TODO: passing np.float64 to not break anything yet. See GH-17261 - s = create_series_with_explicit_dtype( + + # pandas\core\series.py:375: error: Value of type variable "ArrayLike" + # of "create_series_with_explicit_dtype" cannot be "Tuple[Any, ...]" + # [type-var] + s = create_series_with_explicit_dtype( # type: ignore[type-var] values, index=keys, dtype=dtype, dtype_if_empty=np.float64 ) @@ -1047,7 +1053,10 @@ def __setitem__(self, key, value): def _set_with_engine(self, key, value): # fails with AttributeError for IntervalIndex loc = self.index._engine.get_loc(key) - validate_numeric_casting(self.dtype, value) + # pandas\core\series.py:1050: error: Argument 1 to + # "validate_numeric_casting" has incompatible type "Union[dtype, + # ExtensionDtype]"; expected "dtype" [arg-type] + validate_numeric_casting(self.dtype, value) # type: ignore[arg-type] self._values[loc] = value def _set_with(self, key, value): @@ -2975,7 +2984,13 @@ def combine(self, other, func, fill_value=None) -> "Series": # TODO: can we do this for only SparseDtype? # The function can return something of any type, so check # if the type is compatible with the calling EA. - new_values = maybe_cast_to_extension_array(type(self._values), new_values) + + # pandas\core\series.py:2978: error: Value of type variable + # "ArrayLike" of "maybe_cast_to_extension_array" cannot be + # "List[Any]" [type-var] + new_values = maybe_cast_to_extension_array( + type(self._values), new_values # type: ignore[type-var] + ) return self._constructor(new_values, index=new_index, name=new_name) def combine_first(self, other) -> "Series": diff --git a/pandas/core/strings/accessor.py b/pandas/core/strings/accessor.py index b5ba88641d2fe..d02adce4c79c6 100644 --- a/pandas/core/strings/accessor.py +++ b/pandas/core/strings/accessor.py @@ -613,7 +613,11 @@ def cat(self, others=None, sep=None, na_rep=None, join="left"): result = Series( # type: ignore[assignment] result, dtype=dtype, index=data.index, name=self._orig.name ) - result = result.__finalize__(self._orig, method="str_cat") + # pandas\core\strings\accessor.py:616: error: "ndarray" has no + # attribute "__finalize__" [attr-defined] + result = result.__finalize__( # type: ignore[attr-defined] + self._orig, method="str_cat" + ) return result _shared_docs[ diff --git a/pandas/core/tools/datetimes.py b/pandas/core/tools/datetimes.py index 9422bb80f2e91..558e946251dbc 100644 --- a/pandas/core/tools/datetimes.py +++ b/pandas/core/tools/datetimes.py @@ -389,7 +389,12 @@ def _convert_listlike_datetimes( arg, _ = maybe_convert_dtype(arg, copy=False) except TypeError: if errors == "coerce": - result = np.array(["NaT"], dtype="datetime64[ns]").repeat(len(arg)) + # pandas\core\tools\datetimes.py:392: error: Incompatible types in + # assignment (expression has type "ndarray", variable has type + # "ExtensionArray") [assignment] + result = np.array( # type: ignore[assignment] + ["NaT"], dtype="datetime64[ns]" + ).repeat(len(arg)) return DatetimeIndex(result, name=name) elif errors == "ignore": # error: Incompatible types in assignment (expression has type diff --git a/pandas/core/window/expanding.py b/pandas/core/window/expanding.py index aa1dfe8567c15..d5e62e4789116 100644 --- a/pandas/core/window/expanding.py +++ b/pandas/core/window/expanding.py @@ -88,8 +88,14 @@ def _get_window( axis = self.obj._get_axis(self.axis) length = len(axis) + (other is not None) * len(axis) - other = self.min_periods or -1 - return max(length, other) + # pandas\core\window\expanding.py:91: error: Incompatible types in + # assignment (expression has type "int", variable has type + # "Union[ndarray, FrameOrSeries, None]") [assignment] + other = self.min_periods or -1 # type: ignore[assignment] + # pandas\core\window\expanding.py:92: error: Incompatible return value + # type (got "Union[int, ndarray, FrameOrSeries, None]", expected "int") + # [return-value] + return max(length, other) # type: ignore[return-value] _agg_see_also_doc = dedent( """ diff --git a/pandas/core/window/rolling.py b/pandas/core/window/rolling.py index fa85b17b8063f..7240528d898b6 100644 --- a/pandas/core/window/rolling.py +++ b/pandas/core/window/rolling.py @@ -1140,7 +1140,9 @@ def _center_window(self, result: np.ndarray, offset: int) -> np.ndarray: result = np.copy(result[tuple(lead_indexer)]) return result - def _get_window_weights( + # pandas\core\window\rolling.py:1143: error: Missing return statement + # [return] + def _get_window_weights( # type: ignore[return] self, win_type: Optional[Union[str, Tuple]] = None ) -> np.ndarray: """ diff --git a/pandas/io/formats/format.py b/pandas/io/formats/format.py index ec1ef4a33f263..cb0d8e9554e4f 100644 --- a/pandas/io/formats/format.py +++ b/pandas/io/formats/format.py @@ -973,7 +973,16 @@ def _join_multiline(self, *args) -> str: strcols = list(args) if self.index: idx = strcols.pop(0) - lwidth -= np.array([self.adj.len(x) for x in idx]).max() + adjoin_width + # pandas\io\formats\format.py:976: error: Argument 1 to "__call__" + # of "_NumberOp" has incompatible type "None"; expected "Union[int, + # float, complex, number, bool_]" [arg-type] + + # pandas\io\formats\format.py:976: error: Incompatible types in + # assignment (expression has type "number", variable has type + # "Optional[int]") [assignment] + lwidth -= ( # type: ignore[arg-type,assignment] + np.array([self.adj.len(x) for x in idx]).max() + adjoin_width + ) col_widths = [ np.array([self.adj.len(x) for x in col]).max() if len(col) > 0 else 0 @@ -981,7 +990,9 @@ def _join_multiline(self, *args) -> str: ] assert lwidth is not None - col_bins = _binify(col_widths, lwidth) + # pandas\io\formats\format.py:984: error: Argument 1 to "_binify" has + # incompatible type "List[object]"; expected "List[int]" [arg-type] + col_bins = _binify(col_widths, lwidth) # type: ignore[arg-type] nbins = len(col_bins) if self.is_truncated_vertically: diff --git a/pandas/io/pytables.py b/pandas/io/pytables.py index d5aca8a4e0438..d930f2e19e2e9 100644 --- a/pandas/io/pytables.py +++ b/pandas/io/pytables.py @@ -4506,8 +4506,7 @@ def read( index_ = cols cols_ = Index(index, name=getattr(index, "name", None)) else: - # error: "ExtensionArray" has no attribute "T" - values = cvalues.T # type: ignore[attr-defined] + values = cvalues.T index_ = Index(index, name=getattr(index, "name", None)) cols_ = cols diff --git a/pandas/io/sas/sas_xport.py b/pandas/io/sas/sas_xport.py index 2a48abe9fbd63..05c9ccf049fa4 100644 --- a/pandas/io/sas/sas_xport.py +++ b/pandas/io/sas/sas_xport.py @@ -215,7 +215,11 @@ def _parse_float_vec(vec): # order 3 bits of the first half since we're only shifting by # 1, 2, or 3. ieee1 >>= shift - ieee2 = (xport2 >> shift) | ((xport1 & 0x00000007) << (29 + (3 - shift))) + # pandas\io\sas\sas_xport.py:218: error: Unsupported operand types for + + # ("int" and "generic") [operator] + ieee2 = (xport2 >> shift) | ( + (xport1 & 0x00000007) << (29 + (3 - shift)) # type: ignore[operator] + ) # clear the 1 bit to the left of the binary point ieee1 &= 0xFFEFFFFF diff --git a/pandas/io/stata.py b/pandas/io/stata.py index 19e6e6e4f2562..03940ec4b3346 100644 --- a/pandas/io/stata.py +++ b/pandas/io/stata.py @@ -878,11 +878,14 @@ def __init__(self): self.DTYPE_MAP = dict( list(zip(range(1, 245), ["a" + str(i) for i in range(1, 245)])) + [ - (251, np.int8), - (252, np.int16), - (253, np.int32), - (254, np.float32), - (255, np.float64), + (251, np.int8), # type:ignore[list-item] + (252, np.int16), # type:ignore[list-item] + (253, np.int32), # type:ignore[list-item] + (254, np.float32), # type:ignore[list-item] + # pandas\io\stata.py:885: error: List item 4 has incompatible + # type "Tuple[int, Type[float64]]"; expected "Tuple[int, str]" + # [list-item] + (255, np.float64), # type:ignore[list-item] ] ) self.DTYPE_MAP_XML = dict( @@ -1223,7 +1226,10 @@ def g(typ: int) -> Union[str, np.dtype]: if typ <= 2045: return str(typ) try: - return self.DTYPE_MAP_XML[typ] + # pandas\io\stata.py:1226: error: Incompatible return value + # type (got "Type[number]", expected "Union[str, dtype]") + # [return-value] + return self.DTYPE_MAP_XML[typ] # type: ignore[return-value] except KeyError as err: raise ValueError(f"cannot convert stata dtype [{typ}]") from err @@ -1356,7 +1362,12 @@ def _read_old_header(self, first_char: bytes) -> None: invalid_types = ",".join(str(x) for x in typlist) raise ValueError(f"cannot convert stata types [{invalid_types}]") from err try: - self.dtyplist = [self.DTYPE_MAP[typ] for typ in typlist] + # pandas\io\stata.py:1359: error: List comprehension has + # incompatible type List[str]; expected List[Union[int, dtype]] + # [misc] + self.dtyplist = [ + self.DTYPE_MAP[typ] for typ in typlist # type: ignore[misc] + ] except ValueError as err: invalid_dtypes = ",".join(str(x) for x in typlist) raise ValueError(f"cannot convert stata dtypes [{invalid_dtypes}]") from err diff --git a/pandas/plotting/_matplotlib/tools.py b/pandas/plotting/_matplotlib/tools.py index 97df7900d25a3..bec1f48f5e64a 100644 --- a/pandas/plotting/_matplotlib/tools.py +++ b/pandas/plotting/_matplotlib/tools.py @@ -403,9 +403,7 @@ def handle_shared_axes( def flatten_axes(axes: Union["Axes", Sequence["Axes"]]) -> np.ndarray: if not is_list_like(axes): - # error: Incompatible return value type (got "ndarray", expected - # "Sequence[Any]") - return np.array([axes]) # type: ignore[return-value] + return np.array([axes]) elif isinstance(axes, (np.ndarray, ABCIndexClass)): return np.asarray(axes).ravel() return np.array(axes) From 75a2d3501da494f9231021e64cc5e68d452c36b7 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 24 Oct 2020 14:35:33 +0100 Subject: [PATCH 24/86] 1.20.0.dev0+7b0a764 --- pandas/core/arrays/numpy_.py | 8 +++----- pandas/core/generic.py | 6 +++++- pandas/core/indexes/base.py | 4 +++- pandas/core/internals/construction.py | 6 +++++- pandas/tests/arrays/sparse/test_combine_concat.py | 8 +++++++- 5 files changed, 23 insertions(+), 9 deletions(-) diff --git a/pandas/core/arrays/numpy_.py b/pandas/core/arrays/numpy_.py index aff686e81a627..d42945b0bf941 100644 --- a/pandas/core/arrays/numpy_.py +++ b/pandas/core/arrays/numpy_.py @@ -40,11 +40,9 @@ class PandasDtype(ExtensionDtype): _metadata = ("_dtype",) def __init__(self, dtype: object): - # error: Argument 1 to "dtype" has incompatible type "object"; expected - # "Union[dtype, None, type, _SupportsDtype, str, Tuple[Any, int], - # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DtypeDict, - # Tuple[Any, Any]]" - self._dtype = np.dtype(dtype) # type: ignore[arg-type] + # pandas\core\arrays\numpy_.py:47: error: No overload variant of "dtype" + # matches argument type "object" [call-overload] + self._dtype = np.dtype(dtype) # type: ignore[call-overload] def __repr__(self) -> str: return f"PandasDtype({repr(self.name)})" diff --git a/pandas/core/generic.py b/pandas/core/generic.py index e232aef0d9f29..21405092b1547 100644 --- a/pandas/core/generic.py +++ b/pandas/core/generic.py @@ -7070,7 +7070,11 @@ def interpolate( f"`limit_direction` must be 'backward' for method `{method}`" ) - if obj.ndim == 2 and np.all(obj.dtypes == np.dtype(object)): + # pandas\core\generic.py:7073: error: Value of type variable + # "_DTypeScalar" of "dtype" cannot be "object" [type-var] + if obj.ndim == 2 and np.all( + obj.dtypes == np.dtype(object) # type: ignore[type-var] + ): raise TypeError( "Cannot interpolate with all object-dtype columns " "in the DataFrame. Try setting at least one " diff --git a/pandas/core/indexes/base.py b/pandas/core/indexes/base.py index 80a0fc9b2960f..fdebd994728e8 100644 --- a/pandas/core/indexes/base.py +++ b/pandas/core/indexes/base.py @@ -118,7 +118,9 @@ str_t = str -_o_dtype = np.dtype(object) +# pandas\core\indexes\base.py:121: error: Value of type variable "_DTypeScalar" +# of "dtype" cannot be "object" [type-var] +_o_dtype = np.dtype(object) # type: ignore[type-var] _Identity = NewType("_Identity", object) diff --git a/pandas/core/internals/construction.py b/pandas/core/internals/construction.py index 519664b81f5f8..c5ac91c295f82 100644 --- a/pandas/core/internals/construction.py +++ b/pandas/core/internals/construction.py @@ -269,7 +269,11 @@ def init_dict(data: Dict, index, columns, dtype: Optional[DtypeObj] = None): and np.issubdtype(dtype, np.flexible) # type: ignore[arg-type] ): # GH#1783 - nan_dtype = np.dtype(object) + + # pandas\core\internals\construction.py:272: error: Value of + # type variable "_DTypeScalar" of "dtype" cannot be "object" + # [type-var] + nan_dtype = np.dtype(object) # type: ignore[type-var] else: # error: Incompatible types in assignment (expression has type # "Union[dtype, ExtensionDtype]", variable has type "dtype") diff --git a/pandas/tests/arrays/sparse/test_combine_concat.py b/pandas/tests/arrays/sparse/test_combine_concat.py index 0f09af269148b..899f7d434b9ed 100644 --- a/pandas/tests/arrays/sparse/test_combine_concat.py +++ b/pandas/tests/arrays/sparse/test_combine_concat.py @@ -46,7 +46,13 @@ def test_uses_first_kind(self, kind): (pd.Series([3, 4, 5], dtype="category"), np.dtype("int64")), (pd.Series([1.5, 2.5, 3.5], dtype="category"), np.dtype("float64")), # categorical with incompatible categories -> object dtype - (pd.Series(["a", "b", "c"], dtype="category"), np.dtype(object)), + ( + pd.Series(["a", "b", "c"], dtype="category"), + # pandas\tests\arrays\sparse\test_combine_concat.py:49: error: Value + # of type variable "_DTypeScalar" of "dtype" cannot be "object" + # [type-var] + np.dtype(object), # type: ignore[type-var] + ), ], ) def test_concat_with_non_sparse(other, expected_dtype): From 87b4e2a9d83ceb7115a5736592a97494aa172e78 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 24 Oct 2020 15:52:44 +0100 Subject: [PATCH 25/86] update ignores --- pandas/core/algorithms.py | 7 +--- pandas/core/arrays/interval.py | 1 - pandas/core/arrays/timedeltas.py | 5 ++- pandas/core/dtypes/cast.py | 29 +++++------------ pandas/core/indexes/extension.py | 4 ++- pandas/core/nanops.py | 27 ++++++++++++--- pandas/core/window/common.py | 8 +++-- pandas/core/window/rolling.py | 6 +++- pandas/io/formats/string.py | 15 +++++++-- pandas/io/stata.py | 56 +++++++++++--------------------- setup.cfg | 6 ++++ 11 files changed, 87 insertions(+), 77 deletions(-) diff --git a/pandas/core/algorithms.py b/pandas/core/algorithms.py index 4b7be5a5a130c..0ce7e46270559 100644 --- a/pandas/core/algorithms.py +++ b/pandas/core/algorithms.py @@ -2137,12 +2137,7 @@ def safe_sort( if not isinstance(values, (np.ndarray, ABCExtensionArray)): # don't convert to string types dtype, _ = infer_dtype_from_array(values) - # pandas\core\algorithms.py:2121: error: Argument "dtype" to "asarray" - # has incompatible type "Union[dtype, ExtensionDtype]"; expected - # "Union[dtype, None, type, _SupportsDtype, str, Tuple[Any, int], - # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DtypeDict, - # Tuple[Any, Any]]" [arg-type] - values = np.asarray(values, dtype=dtype) # type: ignore[arg-type] + values = np.asarray(values, dtype=dtype) def sort_mixed(values): # order ints before strings, safe in py3 diff --git a/pandas/core/arrays/interval.py b/pandas/core/arrays/interval.py index 8414ede5c1238..85827bc052519 100644 --- a/pandas/core/arrays/interval.py +++ b/pandas/core/arrays/interval.py @@ -1426,7 +1426,6 @@ def _get_combined_data( # pandas\core\arrays\interval.py:1422: error: Value of type variable # "ArrayLike" of "maybe_upcast_datetimelike_array" cannot be "Union[Index, # ExtensionArray]" [type-var] - # pandas\core\arrays\interval.py:1422: error: Value of type variable # "ArrayLike" of "maybe_upcast_datetimelike_array" cannot be "Union[Index, # ndarray]" [type-var] diff --git a/pandas/core/arrays/timedeltas.py b/pandas/core/arrays/timedeltas.py index fb0e64cc48ac3..844036c2e3f74 100644 --- a/pandas/core/arrays/timedeltas.py +++ b/pandas/core/arrays/timedeltas.py @@ -393,7 +393,10 @@ def sum( ) if is_scalar(result): return Timedelta(result) - return self._from_backing_data(result) + # pandas\core\arrays\timedeltas.py:396: error: Argument 1 to + # "_from_backing_data" of "DatetimeLikeArrayMixin" has incompatible + # type "float"; expected "ndarray" [arg-type] + return self._from_backing_data(result) # type: ignore[arg-type] def std( self, diff --git a/pandas/core/dtypes/cast.py b/pandas/core/dtypes/cast.py index a0814e29e66f9..bfc94be4e8a13 100644 --- a/pandas/core/dtypes/cast.py +++ b/pandas/core/dtypes/cast.py @@ -176,7 +176,10 @@ def maybe_downcast_to_dtype(result, dtype: Union[str, np.dtype]): with suppress(TypeError): # e.g. TypeError: int() argument must be a string, a # bytes-like object or a number, not 'Period - return PeriodArray(result, freq=dtype.freq) + + # pandas\core\dtypes\cast.py:179: error: "dtype[Any]" has no + # attribute "freq" [attr-defined] + return PeriodArray(result, freq=dtype.freq) # type: ignore[attr-defined] converted = maybe_downcast_numeric(result, dtype, do_round) if converted is not result: @@ -184,14 +187,7 @@ def maybe_downcast_to_dtype(result, dtype: Union[str, np.dtype]): # a datetimelike # GH12821, iNaT is cast to float - - # pandas\core\dtypes\cast.py:178: error: Item "type" of - # "Union[ExtensionDtype, dtype, Type[object]]" has no attribute "kind" - # [union-attr] - if dtype.kind in ["M", "m"] and result.dtype.kind in [ # type: ignore[union-attr] - "i", - "f", - ]: + if dtype.kind in ["M", "m"] and result.dtype.kind in ["i", "f"]: if hasattr(dtype, "tz"): # not a numpy dtype @@ -211,18 +207,9 @@ def maybe_downcast_to_dtype(result, dtype: Union[str, np.dtype]): from pandas import to_datetime result = to_datetime(result).tz_localize("utc") - # pandas\core\dtypes\cast.py:186: error: Item "ExtensionDtype" - # of "Union[ExtensionDtype, dtype, Type[object]]" has no - # attribute "tz" [union-attr] - - # pandas\core\dtypes\cast.py:186: error: Item "dtype" of - # "Union[ExtensionDtype, dtype, Type[object]]" has no attribute - # "tz" [union-attr] - - # pandas\core\dtypes\cast.py:186: error: Item "type" of - # "Union[ExtensionDtype, dtype, Type[object]]" has no attribute - # "tz" [union-attr] - result = result.tz_convert(dtype.tz) # type: ignore[union-attr] + # pandas\core\dtypes\cast.py:225: error: "dtype[Any]" has no + # attribute "tz" [attr-defined] + result = result.tz_convert(dtype.tz) # type: ignore[attr-defined] else: result = result.astype(dtype) diff --git a/pandas/core/indexes/extension.py b/pandas/core/indexes/extension.py index 4da1a43468b57..4fed26339ae7f 100644 --- a/pandas/core/indexes/extension.py +++ b/pandas/core/indexes/extension.py @@ -280,4 +280,6 @@ def astype(self, dtype, copy=True): @cache_readonly def _isnan(self) -> np.ndarray: - return self._data.isna() + # pandas\core\indexes\extension.py:283: error: Incompatible return + # value type (got "ExtensionArray", expected "ndarray") [return-value] + return self._data.isna() # type: ignore[return-value] diff --git a/pandas/core/nanops.py b/pandas/core/nanops.py index 2e904d5d9cce5..cec0e1f4f69e5 100644 --- a/pandas/core/nanops.py +++ b/pandas/core/nanops.py @@ -549,8 +549,16 @@ def nansum( the_sum = _wrap_results(the_sum, dtype) if datetimelike and not skipna: - the_sum = _mask_datetimelike_result(the_sum, axis, mask, orig_values) - return the_sum + # pandas\core\nanops.py:552: error: Argument 1 to + # "_mask_datetimelike_result" has incompatible type "Union[number, + # ndarray]"; expected "Union[ndarray, datetime64, timedelta64]" + # [arg-type] + the_sum = _mask_datetimelike_result( + the_sum, axis, mask, orig_values # type: ignore[arg-type] + ) + # pandas\core\nanops.py:553: error: Incompatible return value type (got + # "Union[number, ndarray]", expected "float") [return-value] + return the_sum # type: ignore[return-value] def _mask_datetimelike_result( @@ -616,12 +624,21 @@ def nanmean( datetimelike = False if dtype.kind in ["m", "M"]: datetimelike = True - dtype_sum = np.float64 + # pandas\core\nanops.py:619: error: Incompatible types in assignment + # (expression has type "Type[float64]", variable has type "dtype[Any]") + # [assignment] + dtype_sum = np.float64 # type: ignore[assignment] elif is_integer_dtype(dtype): - dtype_sum = np.float64 + # pandas\core\nanops.py:621: error: Incompatible types in assignment + # (expression has type "Type[float64]", variable has type "dtype[Any]") + # [assignment] + dtype_sum = np.float64 # type: ignore[assignment] elif is_float_dtype(dtype): dtype_sum = dtype - dtype_count = dtype + # pandas\core\nanops.py:624: error: Incompatible types in assignment + # (expression has type "dtype[Any]", variable has type "Type[float64]") + # [assignment] + dtype_count = dtype # type: ignore[assignment] count = _get_counts(values.shape, mask, axis, dtype=dtype_count) the_sum = _ensure_numeric(values.sum(axis, dtype=dtype_sum)) diff --git a/pandas/core/window/common.py b/pandas/core/window/common.py index 938f1846230cb..82cfca01fced2 100644 --- a/pandas/core/window/common.py +++ b/pandas/core/window/common.py @@ -165,10 +165,14 @@ def zsqrt(x): if isinstance(x, ABCDataFrame): if mask._values.any(): - result[mask] = 0 + # pandas\core\window\common.py:168: error: Unsupported target for + # indexed assignment ("Union[ndarray, generic]") [index] + result[mask] = 0 # type: ignore[index] else: if mask.any(): - result[mask] = 0 + # pandas\core\window\common.py:171: error: Unsupported target for + # indexed assignment ("Union[ndarray, generic]") [index] + result[mask] = 0 # type: ignore[index] return result diff --git a/pandas/core/window/rolling.py b/pandas/core/window/rolling.py index 64d17eeeb6b79..005d8183ac99e 100644 --- a/pandas/core/window/rolling.py +++ b/pandas/core/window/rolling.py @@ -1162,7 +1162,11 @@ def homogeneous_func(values: np.ndarray): def calc(x): additional_nans = np.array([np.nan] * offset) x = np.concatenate((x, additional_nans)) - return func(x, window, self.min_periods or len(window)) + # pandas\core\window\rolling.py:1165: error: Argument 2 has + # incompatible type "ndarray"; expected "int" [arg-type] + return func( + x, window, self.min_periods or len(window) # type: ignore[arg-type] + ) with np.errstate(all="ignore"): if values.ndim > 1: diff --git a/pandas/io/formats/string.py b/pandas/io/formats/string.py index 4ebb78f29c739..715cbccda3ac8 100644 --- a/pandas/io/formats/string.py +++ b/pandas/io/formats/string.py @@ -113,7 +113,16 @@ def _join_multiline(self, strcols_input: Iterable[List[str]]) -> str: if self.fmt.index: idx = strcols.pop(0) - lwidth -= np.array([self.adj.len(x) for x in idx]).max() + adjoin_width + # pandas\io\formats\string.py:116: error: Argument 1 to "__call__" + # of "_NumberOp" has incompatible type "None"; expected "Union[int, + # float, complex, number, bool_]" [arg-type] + + # pandas\io\formats\string.py:116: error: Incompatible types in + # assignment (expression has type "number", variable has type + # "Optional[int]") [assignment] + lwidth -= ( # type: ignore[assignment,arg-type] + np.array([self.adj.len(x) for x in idx]).max() + adjoin_width + ) col_widths = [ np.array([self.adj.len(x) for x in col]).max() if len(col) > 0 else 0 @@ -121,7 +130,9 @@ def _join_multiline(self, strcols_input: Iterable[List[str]]) -> str: ] assert lwidth is not None - col_bins = _binify(col_widths, lwidth) + # pandas\io\formats\string.py:124: error: Argument 1 to "_binify" has + # incompatible type "List[object]"; expected "List[int]" [arg-type] + col_bins = _binify(col_widths, lwidth) # type: ignore[arg-type] nbins = len(col_bins) if self.fmt.is_truncated_vertically: diff --git a/pandas/io/stata.py b/pandas/io/stata.py index 9c7335a85fed9..14f47b6fa31ca 100644 --- a/pandas/io/stata.py +++ b/pandas/io/stata.py @@ -563,9 +563,7 @@ def _cast_to_stata_types(data: DataFrame) -> DataFrame: dtype = c_data[1] else: dtype = c_data[2] - # error: Non-overlapping equality check (left operand type: - # "Type[signedinteger]", right operand type: "Type[float64]") - if c_data[2] == np.int64: # type: ignore[comparison-overlap] + if c_data[2] == np.int64: # Warn if necessary if data[col].max() >= 2 ** 53: ws = precision_loss_doc.format("uint64", "float64") @@ -658,13 +656,8 @@ def __init__(self, catarray: Series, encoding: str = "latin-1"): ) # Ensure int32 - - # error: Incompatible types in assignment (expression has type - # "ndarray", variable has type "List[int]") - self.off = np.array(offsets, dtype=np.int32) # type: ignore[assignment] - # error: Incompatible types in assignment (expression has type - # "ndarray", variable has type "List[int]") [assignment] - self.val = np.array(values, dtype=np.int32) # type: ignore[assignment] + self.off = np.array(offsets, dtype=np.int32) + self.val = np.array(values, dtype=np.int32) # Total length self.len = 4 + 4 + 4 * self.n + 4 * self.n + self.text_len @@ -878,14 +871,11 @@ def __init__(self): self.DTYPE_MAP = dict( list(zip(range(1, 245), [np.dtype("a" + str(i)) for i in range(1, 245)])) + [ - (251, np.dtype(np.int8)), # type:ignore[list-item] - (252, np.dtype(np.int16)), # type:ignore[list-item] - (253, np.dtype(np.int32)), # type:ignore[list-item] - (254, np.dtype(np.float32)), # type:ignore[list-item] - # pandas\io\stata.py:885: error: List item 4 has incompatible - # type "Tuple[int, Type[float64]]"; expected "Tuple[int, str]" - # [list-item] - (255, np.dtype(np.float64)), # type:ignore[list-item] + (251, np.dtype(np.int8)), + (252, np.dtype(np.int16)), + (253, np.dtype(np.int32)), + (254, np.dtype(np.float32)), + (255, np.dtype(np.float64)), ] ) self.DTYPE_MAP_XML = dict( @@ -1236,10 +1226,7 @@ def g(typ: int) -> Union[str, np.dtype]: dtyplist = [g(x) for x in raw_typlist] - # error: Incompatible return value type (got "Tuple[List[Union[int, - # str]], List[Union[str, dtype]]]", expected "Tuple[List[Union[int, - # str]], List[Union[int, dtype]]]") - return typlist, dtyplist # type: ignore[return-value] + return typlist, dtyplist def _get_varlist(self) -> List[str]: # 33 in order formats, 129 in formats 118 and 119 @@ -1363,12 +1350,7 @@ def _read_old_header(self, first_char: bytes) -> None: invalid_types = ",".join(str(x) for x in typlist) raise ValueError(f"cannot convert stata types [{invalid_types}]") from err try: - # pandas\io\stata.py:1359: error: List comprehension has - # incompatible type List[str]; expected List[Union[int, dtype]] - # [misc] - self.dtyplist = [ - self.DTYPE_MAP[typ] for typ in typlist # type: ignore[misc] - ] + self.dtyplist = [self.DTYPE_MAP[typ] for typ in typlist] except ValueError as err: invalid_dtypes = ",".join(str(x) for x in typlist) raise ValueError(f"cannot convert stata dtypes [{invalid_dtypes}]") from err @@ -1429,12 +1411,9 @@ def _setup_dtype(self) -> np.dtype: dtypes.append(("s" + str(i), self.byteorder + self.NUMPY_TYPE_MAP[typ])) else: dtypes.append(("s" + str(i), "S" + str(typ))) - # error: Incompatible types in assignment (expression has type "dtype", - # variable has type "None") - self._dtype = np.dtype(dtypes) # type: ignore[assignment] + self._dtype = np.dtype(dtypes) - # error: Incompatible return value type (got "None", expected "dtype") - return self._dtype # type: ignore[return-value] + return self._dtype def _calcsize(self, fmt: Union[int, str]) -> int: if isinstance(fmt, int): @@ -1674,7 +1653,12 @@ def read( if self.dtyplist[i] is not None: col = data.columns[i] dtype = data[col].dtype - if dtype != np.dtype(object) and dtype != self.dtyplist[i]: + # pandas\io\stata.py:1677: error: Value of type variable + # "_DTypeScalar" of "dtype" cannot be "object" [type-var] + if ( + dtype != np.dtype(object) # type: ignore[type-var] + and dtype != self.dtyplist[i] + ): requires_type_conversion = True data_formatted.append( (col, Series(data[col], ix, self.dtyplist[i])) @@ -1845,9 +1829,7 @@ def _do_convert_categoricals( warnings.warn( categorical_conversion_warning, CategoricalConversionWarning ) - # error: Incompatible types in assignment (expression has - # type "None", variable has type "ndarray") - initial_categories = None # type: ignore[assignment] + initial_categories = None cat_data = Categorical( column, categories=initial_categories, ordered=order_categoricals ) diff --git a/setup.cfg b/setup.cfg index 04982564618c9..96fa02049cf5b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -340,3 +340,9 @@ ignore_errors=True [mypy-pandas.tests.window.test_dtypes] ignore_errors=True +[mypy-pandas.tests.indexing.multiindex.test_indexing_slow] +ignore_errors=True + +[mypy-pandas.tests.series.test_dt_accessor] +ignore_errors=True + From e7d5c08c787f20b841fa7d49c9b9cb48f3ee01a6 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 31 Oct 2020 17:51:01 +0000 Subject: [PATCH 26/86] numpy-1.20.0.dev0+43683b3 --- pandas/core/algorithms.py | 10 +++- pandas/core/arrays/_ranges.py | 8 +++- pandas/core/arrays/categorical.py | 9 +++- pandas/core/arrays/integer.py | 5 +- pandas/core/arrays/period.py | 5 +- pandas/core/reshape/tile.py | 9 +++- pandas/core/util/hashing.py | 42 ++-------------- pandas/io/formats/format.py | 9 ++-- pandas/plotting/_matplotlib/converter.py | 61 ++++++++++-------------- 9 files changed, 72 insertions(+), 86 deletions(-) diff --git a/pandas/core/algorithms.py b/pandas/core/algorithms.py index 0ce7e46270559..67d1ca6061155 100644 --- a/pandas/core/algorithms.py +++ b/pandas/core/algorithms.py @@ -1261,7 +1261,10 @@ def compute(self, method: str) -> Series: # fast method arr, pandas_dtype = _ensure_data(dropped.values) if method == "nlargest": - arr = -arr + # pandas\core\algorithms.py:1264: error: Incompatible types in + # assignment (expression has type "Union[ndarray, generic]", + # variable has type "ndarray") [assignment] + arr = -arr # type: ignore[assignment] if is_integer_dtype(pandas_dtype): # GH 21426: ensure reverse ordering at boundaries arr -= 1 @@ -1272,7 +1275,10 @@ def compute(self, method: str) -> Series: # pandas\core\algorithms.py:1269: error: Incompatible types in # assignment (expression has type "Union[ndarray, generic]", # variable has type "ndarray") [assignment] - arr = 1 - (-arr) # type: ignore[assignment] + + # pandas\core\algorithms.py:1275: error: Unsupported operand + # types for - ("int" and "generic") [operator] + arr = 1 - (-arr) # type: ignore[assignment,operator] if self.keep == "last": arr = arr[::-1] diff --git a/pandas/core/arrays/_ranges.py b/pandas/core/arrays/_ranges.py index bf62cafa39467..25503d83c6152 100644 --- a/pandas/core/arrays/_ranges.py +++ b/pandas/core/arrays/_ranges.py @@ -105,7 +105,13 @@ def _generate_range_overflow_safe( # if periods * strides cannot be multiplied within the *uint64* bounds, # we cannot salvage the operation by recursing, so raise try: - addend = np.uint64(periods) * np.uint64(np.abs(stride)) + # pandas\core\arrays\_ranges.py:108: error: Argument 1 to + # "unsignedinteger" has incompatible type "Union[ndarray, + # generic]"; expected "Union[SupportsInt, Union[str, bytes], + # SupportsIndex]" [arg-type] + addend = np.uint64(periods) * np.uint64( + np.abs(stride) # type: ignore[arg-type] + ) except FloatingPointError as err: raise OutOfBoundsDatetime(msg) from err diff --git a/pandas/core/arrays/categorical.py b/pandas/core/arrays/categorical.py index 87871c6e13d50..8c2fc578b2d8c 100644 --- a/pandas/core/arrays/categorical.py +++ b/pandas/core/arrays/categorical.py @@ -367,8 +367,13 @@ def __init__( if null_mask.any(): # Reinsert -1 placeholders for previously removed missing values full_codes = -np.ones(null_mask.shape, dtype=codes.dtype) - full_codes[~null_mask] = codes - codes = full_codes + # pandas\core\arrays\categorical.py:370: error: Unsupported target + # for indexed assignment ("Union[ndarray, generic]") [index] + full_codes[~null_mask] = codes # type: ignore[index] + # pandas\core\arrays\categorical.py:371: error: Incompatible types + # in assignment (expression has type "Union[ndarray, generic]", + # variable has type "ndarray") [assignment] + codes = full_codes # type: ignore[assignment] self._dtype = self._dtype.update_dtype(dtype) self._codes = coerce_indexer_dtype(codes, dtype.categories) diff --git a/pandas/core/arrays/integer.py b/pandas/core/arrays/integer.py index 10a4480765c19..1856df04c1be9 100644 --- a/pandas/core/arrays/integer.py +++ b/pandas/core/arrays/integer.py @@ -359,7 +359,10 @@ def __init__(self, values: np.ndarray, mask: np.ndarray, copy: bool = False): super().__init__(values, mask, copy=copy) def __neg__(self): - return type(self)(-self._data, self._mask) + # pandas\core\arrays\integer.py:362: error: Argument 1 to + # "IntegerArray" has incompatible type "Union[ndarray, generic]"; + # expected "ndarray" [arg-type] + return type(self)(-self._data, self._mask) # type: ignore[arg-type] def __pos__(self): return self diff --git a/pandas/core/arrays/period.py b/pandas/core/arrays/period.py index 2b5f351ec8dba..451d6e14e85f0 100644 --- a/pandas/core/arrays/period.py +++ b/pandas/core/arrays/period.py @@ -676,7 +676,10 @@ def _addsub_int_array( """ assert op in [operator.add, operator.sub] if op is operator.sub: - other = -other + # pandas\core\arrays\period.py:679: error: Incompatible types in + # assignment (expression has type "Union[ndarray, generic]", + # variable has type "ndarray") [assignment] + other = -other # type: ignore[assignment] res_values = algos.checked_add_with_arr(self.asi8, other, arr_mask=self._isnan) res_values = res_values.view("i8") res_values[self._isnan] = iNaT diff --git a/pandas/core/reshape/tile.py b/pandas/core/reshape/tile.py index b62ae2279b1a3..2ca9ca431aa75 100644 --- a/pandas/core/reshape/tile.py +++ b/pandas/core/reshape/tile.py @@ -607,7 +607,14 @@ def _round_frac(x, precision: int): # error: 'numpy.generic' object is not iterable frac, whole = np.modf(x) # type: ignore[misc] if whole == 0: - digits = -int(np.floor(np.log10(abs(frac)))) - 1 + precision + # pandas\core\reshape\tile.py:610: error: Argument 1 to "int" has + # incompatible type "Union[ndarray, generic]"; expected "Union[str, + # bytes, SupportsInt, _SupportsIndex]" [arg-type] + digits = ( + -int(np.floor(np.log10(abs(frac)))) # type: ignore[arg-type] + - 1 + + precision + ) else: digits = precision return np.around(x, digits) diff --git a/pandas/core/util/hashing.py b/pandas/core/util/hashing.py index a57bf07e0f13f..df082c7285ae8 100644 --- a/pandas/core/util/hashing.py +++ b/pandas/core/util/hashing.py @@ -45,46 +45,10 @@ def combine_hash_arrays(arrays, num_items: int): for i, a in enumerate(arrays): inverse_i = num_items - i out ^= a - # pandas\core\util\hashing.py:48: error: No overload variant of - # "__call__" of "_UnsignedIntOp" matches argument type "generic" - # [call-overload] - - # pandas\core\util\hashing.py:48: note: Possible overload variants: - - # pandas\core\util\hashing.py:48: note: def __call__(self, - # Union[bool, unsignedinteger]) -> unsignedinteger - - # pandas\core\util\hashing.py:48: note: def __call__(self, - # Union[int, signedinteger]) -> Union[signedinteger, float64] - - # pandas\core\util\hashing.py:48: note: <2 more similar overloads - # not shown, out of 4 total overloads> - - # pandas\core\util\hashing.py:48: note: Left operand is of type - # "Union[ndarray, generic]" - out *= mult # type: ignore[call-overload] - # pandas\core\util\hashing.py:49: error: Incompatible types in - # assignment (expression has type "unsignedinteger", variable has type - # "uint64") [assignment] - mult += np.uint64(82520 + inverse_i + inverse_i) # type: ignore[assignment] + out *= mult + mult += np.uint64(82520 + inverse_i + inverse_i) assert i + 1 == num_items, "Fed in wrong num_items" - # pandas\core\util\hashing.py:51: error: No overload variant of "__call__" - # of "_UnsignedIntOp" matches argument type "generic" [call-overload] - - # pandas\core\util\hashing.py:51: note: Possible overload variants: - - # pandas\core\util\hashing.py:51: note: def __call__(self, Union[bool, - # unsignedinteger]) -> unsignedinteger - - # pandas\core\util\hashing.py:51: note: def __call__(self, Union[int, - # signedinteger]) -> Union[signedinteger, float64] - - # pandas\core\util\hashing.py:51: note: <2 more similar overloads not - # shown, out of 4 total overloads> - - # pandas\core\util\hashing.py:51: note: Left operand is of type - # "Union[ndarray, generic]" - out += np.uint64(97531) # type: ignore[call-overload] + out += np.uint64(97531) return out diff --git a/pandas/io/formats/format.py b/pandas/io/formats/format.py index 18051c0d14e13..1054052735567 100644 --- a/pandas/io/formats/format.py +++ b/pandas/io/formats/format.py @@ -1600,12 +1600,13 @@ def format_percentiles( to_end = 100 - unique_pcts[-1] if unique_pcts[-1] < 100 else None # Least precision that keeps percentiles unique after rounding - prec = -np.floor( + + # pandas\io\formats\format.py:1603: error: Unsupported operand type for + # unary - ("Union[ndarray, generic]") [operator] + prec = -np.floor( # type: ignore[operator] np.log10(np.min(np.ediff1d(unique_pcts, to_begin=to_begin, to_end=to_end))) ).astype(int) - # error: Incompatible types in assignment (expression has type "Union[int, - # ndarray, generic]", variable has type "Union[ndarray, generic]") - prec = max(1, prec) # type: ignore[assignment] + prec = max(1, prec) out = np.empty_like(percentiles, dtype=object) out[int_idx] = percentiles[int_idx].astype(int).astype(str) out[~int_idx] = percentiles[~int_idx].round(prec).astype(str) diff --git a/pandas/plotting/_matplotlib/converter.py b/pandas/plotting/_matplotlib/converter.py index 27c7b931b7136..1a235c18bc419 100644 --- a/pandas/plotting/_matplotlib/converter.py +++ b/pandas/plotting/_matplotlib/converter.py @@ -168,17 +168,13 @@ def __call__(self, x, pos=0) -> str: Parameters ---------- - x : float - The time of day specified as seconds since 00:00 (midnight), - with up to microsecond precision. - pos - Unused + x : float The time of day specified as seconds since 00:00 (midnight), + with up to microsecond precision. pos Unused Returns ------- - str - A string in HH:MM:SS.mmmuuu format. Microseconds, - milliseconds and seconds are only displayed if non-zero. + str A string in HH:MM:SS.mmmuuu format. Microseconds, milliseconds and + seconds are only displayed if non-zero. """ fmt = "%H:%M:%S.%f" s = int(x) @@ -222,8 +218,8 @@ def _convert_1d(values, units, axis): elif isinstance(values, Index): return values.map(lambda x: get_datevalue(x, axis.freq)) elif lib.infer_dtype(values, skipna=False) == "period": - # https://github.com/pandas-dev/pandas/issues/24304 - # convert ndarray[period] -> PeriodIndex + # https://github.com/pandas-dev/pandas/issues/24304 convert + # ndarray[period] -> PeriodIndex return PeriodIndex(values, freq=axis.freq).asi8 elif isinstance(values, (list, tuple, np.ndarray, Index)): return [get_datevalue(x, axis.freq) for x in values] @@ -273,8 +269,8 @@ def try_parse(values): return try_parse(values) elif isinstance(values, (list, tuple, np.ndarray, Index, Series)): if isinstance(values, Series): - # https://github.com/matplotlib/matplotlib/issues/11391 - # Series was skipped. Convert to DatetimeIndex to get asi8 + # https://github.com/matplotlib/matplotlib/issues/11391 Series + # was skipped. Convert to DatetimeIndex to get asi8 values = Index(values) if isinstance(values, Index): values = values.values @@ -298,8 +294,8 @@ def axisinfo(unit: Optional[tzinfo], axis) -> units.AxisInfo: """ Return the :class:`~matplotlib.units.AxisInfo` for *unit*. - *unit* is a tzinfo instance or None. - The *axis* argument is required but not used. + *unit* is a tzinfo instance or None. The *axis* argument is required + but not used. """ tz = unit @@ -479,10 +475,8 @@ def period_break(dates: PeriodIndex, period: str) -> np.ndarray: Parameters ---------- - dates : PeriodIndex - Array of intervals to monitor. - period : string - Name of the period to monitor. + dates : PeriodIndex Array of intervals to monitor. period : string Name of + the period to monitor. """ current = getattr(dates, period) previous = getattr(dates - 1 * dates.freq, period) @@ -891,16 +885,11 @@ class TimeSeries_DateLocator(Locator): Parameters ---------- - freq : {var} - Valid frequency specifier. - minor_locator : {False, True}, optional - Whether the locator is for minor ticks (True) or not. - dynamic_mode : {True, False}, optional - Whether the locator should work in dynamic mode. - base : {int}, optional - quarter : {int}, optional - month : {int}, optional - day : {int}, optional + freq : {var} Valid frequency specifier. minor_locator : {False, True}, + optional Whether the locator is for minor ticks (True) or not. + dynamic_mode : {True, False}, optional Whether the locator should work + in dynamic mode. base : {int}, optional quarter : {int}, optional month + : {int}, optional day : {int}, optional """ def __init__( @@ -982,12 +971,9 @@ class TimeSeries_DateFormatter(Formatter): Parameters ---------- - freq : {int, string} - Valid frequency specifier. - minor_locator : bool, default False - Whether the current formatter should apply to minor ticks (True) or - major ticks (False). - dynamic_mode : bool, default True + freq : {int, string} Valid frequency specifier. minor_locator : bool, + default False Whether the current formatter should apply to minor ticks + (True) or major ticks (False). dynamic_mode : bool, default True Whether the formatter works in dynamic mode or not. """ @@ -1072,7 +1058,12 @@ def format_timedelta_ticks(x, pos, n_decimals: int) -> str: def __call__(self, x, pos=0) -> str: (vmin, vmax) = tuple(self.axis.get_view_interval()) - n_decimals = int(np.ceil(np.log10(100 * 1e9 / (vmax - vmin)))) + # pandas\plotting\_matplotlib\converter.py:1075: error: Argument 1 to + # "int" has incompatible type "Union[ndarray, generic]"; expected + # "Union[str, bytes, SupportsInt, _SupportsIndex]" [arg-type] + n_decimals = int( + np.ceil(np.log10(100 * 1e9 / (vmax - vmin))) # type: ignore[arg-type] + ) if n_decimals > 9: n_decimals = 9 return self.format_timedelta_ticks(x, pos, n_decimals) From 22020e0f3f57ca221540dde9946bab66f6664c7d Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 31 Oct 2020 20:26:56 +0000 Subject: [PATCH 27/86] update ignores --- pandas/core/arrays/datetimelike.py | 5 ++++- pandas/core/indexes/interval.py | 14 ++++++++++++-- pandas/core/nanops.py | 21 +++++++++++++-------- pandas/core/window/rolling.py | 6 +----- pandas/io/pytables.py | 5 ++++- setup.cfg | 4 +++- 6 files changed, 37 insertions(+), 18 deletions(-) diff --git a/pandas/core/arrays/datetimelike.py b/pandas/core/arrays/datetimelike.py index bfdfb9866c31e..dfb5f12aa7020 100644 --- a/pandas/core/arrays/datetimelike.py +++ b/pandas/core/arrays/datetimelike.py @@ -1353,7 +1353,10 @@ def mean(self, skipna=True, axis: Optional[int] = 0): ) if axis is None or self.ndim == 1: return self._box_func(result) - return self._from_backing_data(result) + # pandas\core\arrays\datetimelike.py:1356: error: Argument 1 to + # "_from_backing_data" of "DatetimeLikeArrayMixin" has incompatible + # type "float"; expected "ndarray" [arg-type] + return self._from_backing_data(result) # type: ignore[arg-type] def median(self, axis: Optional[int] = None, skipna: bool = True, *args, **kwargs): nv.validate_median(args, kwargs) diff --git a/pandas/core/indexes/interval.py b/pandas/core/indexes/interval.py index 2a268e0003490..028a38fb96f00 100644 --- a/pandas/core/indexes/interval.py +++ b/pandas/core/indexes/interval.py @@ -1290,8 +1290,18 @@ def interval_range( else: # delegate to the appropriate range function if isinstance(endpoint, Timestamp): - breaks = date_range(start=start, end=end, periods=periods, freq=freq) + # pandas\core\indexes\interval.py:1293: error: Incompatible types + # in assignment (expression has type "DatetimeIndex", variable has + # type "ndarray") [assignment] + breaks = date_range( # type: ignore[assignment] + start=start, end=end, periods=periods, freq=freq + ) else: - breaks = timedelta_range(start=start, end=end, periods=periods, freq=freq) + # pandas\core\indexes\interval.py:1295: error: Incompatible types + # in assignment (expression has type "TimedeltaIndex", variable has + # type "ndarray") [assignment] + breaks = timedelta_range( # type: ignore[assignment] + start=start, end=end, periods=periods, freq=freq + ) return IntervalIndex.from_breaks(breaks, name=name, closed=closed) diff --git a/pandas/core/nanops.py b/pandas/core/nanops.py index 69ecfb1ead23f..ede92bd4c345f 100644 --- a/pandas/core/nanops.py +++ b/pandas/core/nanops.py @@ -659,7 +659,12 @@ def nanmean( the_mean = _wrap_results(the_mean, dtype) if datetimelike and not skipna: - the_mean = _mask_datetimelike_result(the_mean, axis, mask, orig_values) + # pandas\core\nanops.py:662: error: Argument 3 to + # "_mask_datetimelike_result" has incompatible type + # "Optional[ndarray]"; expected "ndarray" [arg-type] + the_mean = _mask_datetimelike_result( + the_mean, axis, mask, orig_values # type: ignore[arg-type] + ) return the_mean @@ -1009,15 +1014,15 @@ def reduction( result = getattr(values, meth)(axis) result = _wrap_results(result, dtype, fill_value) - # error: Incompatible return value type (got "float", expected - # "Union[ExtensionDtype, str, dtype, Type[str], Type[float], Type[int], - # Type[complex], Type[bool], Type[object]]") - result = _maybe_null_out( # type: ignore[return-value] - result, axis, mask, values.shape - ) + result = _maybe_null_out(result, axis, mask, values.shape) if datetimelike and not skipna: - result = _mask_datetimelike_result(result, axis, mask, orig_values) + # pandas\core\nanops.py:1020: error: Argument 3 to + # "_mask_datetimelike_result" has incompatible type + # "Optional[ndarray]"; expected "ndarray" [arg-type] + result = _mask_datetimelike_result( + result, axis, mask, orig_values # type: ignore[arg-type] + ) return result diff --git a/pandas/core/window/rolling.py b/pandas/core/window/rolling.py index 3d547e24087c8..23cd5133fd6df 100644 --- a/pandas/core/window/rolling.py +++ b/pandas/core/window/rolling.py @@ -1073,11 +1073,7 @@ def homogeneous_func(values: np.ndarray): def calc(x): additional_nans = np.array([np.nan] * offset) x = np.concatenate((x, additional_nans)) - # pandas\core\window\rolling.py:1165: error: Argument 2 has - # incompatible type "ndarray"; expected "int" [arg-type] - return func( - x, window, self.min_periods or len(window) # type: ignore[arg-type] - ) + return func(x, window, self.min_periods or len(window)) with np.errstate(all="ignore"): if values.ndim > 1: diff --git a/pandas/io/pytables.py b/pandas/io/pytables.py index 496753460b8df..6e94c80ce1526 100644 --- a/pandas/io/pytables.py +++ b/pandas/io/pytables.py @@ -2977,7 +2977,10 @@ def write_array(self, key: str, obj: FrameOrSeries, items: Optional[Index] = Non # that gets passed is DatetimeArray, and we never have # both self._filters and EA - value = extract_array(obj, extract_numpy=True) + # pandas\io\pytables.py:2980: error: Value of type variable + # "AnyArrayLike" of "extract_array" cannot be "FrameOrSeries" + # [type-var] + value = extract_array(obj, extract_numpy=True) # type: ignore[type-var] if key in self.group: self._handle.remove_node(self.group, key) diff --git a/setup.cfg b/setup.cfg index 502d251a029d3..a8c55e744f2fd 100644 --- a/setup.cfg +++ b/setup.cfg @@ -329,6 +329,8 @@ ignore_errors=True [mypy-pandas.tests.indexing.multiindex.test_indexing_slow] ignore_errors=True -[mypy-pandas.tests.series.test_dt_accessor] +[mypy-pandas.tests.series.accessors.test_dt_accessor] ignore_errors=True +[mypy-pandas.tests.frame.indexing.test_indexing] +ignore_errors=True From f766989e7b78417918c539f98cd0dd3349ec6c2c Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sun, 1 Nov 2020 10:22:33 +0000 Subject: [PATCH 28/86] update ignores --- pandas/core/groupby/groupby.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pandas/core/groupby/groupby.py b/pandas/core/groupby/groupby.py index 323303e5a446c..11d82bb78cd11 100644 --- a/pandas/core/groupby/groupby.py +++ b/pandas/core/groupby/groupby.py @@ -2193,7 +2193,10 @@ def pre_processor(vals: np.ndarray) -> Tuple[np.ndarray, Optional[Type]]: inference = "datetime64[ns]" # type: ignore[assignment] vals = np.asarray(vals).astype(float) elif is_timedelta64_dtype(vals.dtype): - inference = "timedelta64[ns]" + # pandas\core\groupby\groupby.py:2196: error: Incompatible + # types in assignment (expression has type "str", variable has + # type "Optional[Type[signedinteger[Any]]]") [assignment] + inference = "timedelta64[ns]" # type: ignore[assignment] vals = np.asarray(vals).astype(float) return vals, inference From 28e74468ba40f579c0c2abf6d43f688a3eb198a8 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sun, 8 Nov 2020 12:55:27 +0000 Subject: [PATCH 29/86] numpy-1.20.0.dev0+5f071c6 --- pandas/_testing.py | 4 +--- pandas/core/common.py | 6 ++---- pandas/core/internals/construction.py | 5 ++++- pandas/core/internals/managers.py | 6 +++++- pandas/core/missing.py | 6 +++++- 5 files changed, 17 insertions(+), 10 deletions(-) diff --git a/pandas/_testing.py b/pandas/_testing.py index fd72fe4fe7420..28a7a5af21f5e 100644 --- a/pandas/_testing.py +++ b/pandas/_testing.py @@ -16,9 +16,7 @@ import numpy as np -# pandas\_testing.py:17: error: Module 'numpy.random' has no attribute 'rand' -# pandas\_testing.py:17: error: Module 'numpy.random' has no attribute 'randn' -from numpy.random import rand, randn # type: ignore[attr-defined] +from numpy.random import rand, randn from pandas._config.localization import ( # noqa:F401 can_set_locale, diff --git a/pandas/core/common.py b/pandas/core/common.py index 029b90c436bfc..ac73141025147 100644 --- a/pandas/core/common.py +++ b/pandas/core/common.py @@ -426,10 +426,8 @@ def random_state(state=None): or is_array_like(state) or (not np_version_under1p18 and isinstance(state, np.random.BitGenerator)) ): - # error: Module has no attribute "RandomState"; maybe "__RandomState_ctor"? - return np.random.RandomState(state) # type: ignore[attr-defined] - # error: Module has no attribute "RandomState"; maybe "__RandomState_ctor"? - elif isinstance(state, np.random.RandomState): # type: ignore[attr-defined] + return np.random.RandomState(state) + elif isinstance(state, np.random.RandomState): return state elif state is None: return np.random diff --git a/pandas/core/internals/construction.py b/pandas/core/internals/construction.py index c5ac91c295f82..3420ef919c106 100644 --- a/pandas/core/internals/construction.py +++ b/pandas/core/internals/construction.py @@ -610,7 +610,10 @@ def _list_of_series_to_arrays( values = extract_array(s, extract_numpy=True) aligned_values.append(algorithms.take_1d(values, indexer)) - values = np.vstack(aligned_values) + # pandas\core\internals\construction.py:613: error: Incompatible types in + # assignment (expression has type "ndarray", variable has type + # "ExtensionArray") [assignment] + values = np.vstack(aligned_values) # type: ignore[assignment] if values.dtype == np.object_: content = list(values.T) diff --git a/pandas/core/internals/managers.py b/pandas/core/internals/managers.py index e29bbddce36c7..2f320247aa5dc 100644 --- a/pandas/core/internals/managers.py +++ b/pandas/core/internals/managers.py @@ -1928,7 +1928,11 @@ def _merge_blocks( # TODO: optimization potential in case all mgrs contain slices and # combination of those slices is a slice, too. new_mgr_locs = np.concatenate([b.mgr_locs.as_array for b in blocks]) - new_values = np.vstack([b.values for b in blocks]) + # pandas\core\internals\managers.py:1931: error: List comprehension has + # incompatible type List[Union[ndarray, ExtensionArray]]; expected + # List[Union[complex, generic, Sequence[Union[int, float, complex, str, + # bytes, generic]], Sequence[Sequence[Any]], _SupportsArray]] [misc] + new_values = np.vstack([b.values for b in blocks]) # type: ignore[misc] argsort = np.argsort(new_mgr_locs) new_values = new_values[argsort] diff --git a/pandas/core/missing.py b/pandas/core/missing.py index a9b6c6b7f1a1c..920e7d35078ff 100644 --- a/pandas/core/missing.py +++ b/pandas/core/missing.py @@ -753,4 +753,8 @@ def _rolling_window(a: np.ndarray, window: int): # https://stackoverflow.com/a/6811241 shape = a.shape[:-1] + (a.shape[-1] - window + 1, window) strides = a.strides + (a.strides[-1],) - return np.lib.stride_tricks.as_strided(a, shape=shape, strides=strides) + # pandas\core\missing.py:756: error: Module has no attribute + # "stride_tricks" [attr-defined] + return np.lib.stride_tricks.as_strided( # type: ignore[attr-defined] + a, shape=shape, strides=strides + ) From d597f892099147d8fd3b297385d16cedc66995f1 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sun, 8 Nov 2020 13:12:21 +0000 Subject: [PATCH 30/86] update setup.cfg --- setup.cfg | 91 +------------------------------------------------------ 1 file changed, 1 insertion(+), 90 deletions(-) diff --git a/setup.cfg b/setup.cfg index a8c55e744f2fd..43616d693691e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -209,19 +209,13 @@ check_untyped_defs=False [mypy-pandas.plotting._misc] check_untyped_defs=False -[mypy-pandas.conftest] -ignore_errors=True - [mypy-pandas.tests.arithmetic.conftest] ignore_errors=True -[mypy-pandas.tests.computation.test_eval] -ignore_errors=True - [mypy-pandas.tests.dtypes.test_common] ignore_errors=True -[mypy-pandas.tests.dtypes.test_generic] +[mypy-pandas.tests.frame.indexing.test_indexing] ignore_errors=True [mypy-pandas.tests.frame.methods.test_to_records] @@ -233,104 +227,21 @@ ignore_errors=True [mypy-pandas.tests.groupby.transform.test_transform] ignore_errors=True -[mypy-pandas.tests.indexes.datetimes.test_datetime] -ignore_errors=True - [mypy-pandas.tests.indexes.interval.test_interval] ignore_errors=True -[mypy-pandas.tests.indexes.timedeltas.test_timedelta] -ignore_errors=True - -[mypy-pandas.tests.indexing.multiindex.test_insert] -ignore_errors=True - -[mypy-pandas.tests.indexing.multiindex.test_setitem] -ignore_errors=True - -[mypy-pandas.tests.indexing.multiindex.test_sorted] -ignore_errors=True - [mypy-pandas.tests.indexing.test_categorical] ignore_errors=True -[mypy-pandas.tests.indexing.test_indexing] -ignore_errors=True - [mypy-pandas.tests.io.excel.test_writers] ignore_errors=True -[mypy-pandas.tests.io.test_clipboard] -ignore_errors=True - -[mypy-pandas.tests.io.test_html] -ignore_errors=True - -[mypy-pandas.tests.plotting.test_frame] -ignore_errors=True - -[mypy-pandas.tests.plotting.test_hist_method] -ignore_errors=True - -[mypy-pandas.tests.plotting.test_misc] -ignore_errors=True - -[mypy-pandas.tests.plotting.test_series] -ignore_errors=True - [mypy-pandas.tests.reductions.test_reductions] ignore_errors=True -[mypy-pandas.tests.resample.test_resample_api] -ignore_errors=True - -[mypy-pandas.tests.resample.test_time_grouper] -ignore_errors=True - -[mypy-pandas.tests.reshape.merge.test_join] -ignore_errors=True - -[mypy-pandas.tests.reshape.merge.test_multi] -ignore_errors=True - -[mypy-pandas.tests.reshape.test_concat] -ignore_errors=True - [mypy-pandas.tests.series.apply.test_series_apply] ignore_errors=True -[mypy-pandas.tests.series.indexing.test_get] -ignore_errors=True - -[mypy-pandas.tests.series.test_datetime_values] -ignore_errors=True - -[mypy-pandas.tests.series.test_duplicates] -ignore_errors=True - -[mypy-pandas.tests.test_algos] -ignore_errors=True - -[mypy-pandas.tests.test_expressions] -ignore_errors=True - -[mypy-pandas.tests.test_multilevel] -ignore_errors=True - -[mypy-pandas.tests.test_strings] -ignore_errors=True - -[mypy-pandas.tests.window.conftest] -ignore_errors=True - [mypy-pandas.tests.window.test_dtypes] ignore_errors=True -[mypy-pandas.tests.indexing.multiindex.test_indexing_slow] -ignore_errors=True - -[mypy-pandas.tests.series.accessors.test_dt_accessor] -ignore_errors=True - -[mypy-pandas.tests.frame.indexing.test_indexing] -ignore_errors=True From ae8a49119d1b804d1dd6d64beab905ebddfec6dc Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sun, 8 Nov 2020 13:54:42 +0000 Subject: [PATCH 31/86] update ignores --- pandas/core/algorithms.py | 4 +++- pandas/core/arrays/categorical.py | 4 +++- pandas/core/arrays/period.py | 23 +++++++++++++++++++---- setup.cfg | 2 +- 4 files changed, 26 insertions(+), 7 deletions(-) diff --git a/pandas/core/algorithms.py b/pandas/core/algorithms.py index fb7204946cfd0..99076ee3f6059 100644 --- a/pandas/core/algorithms.py +++ b/pandas/core/algorithms.py @@ -2214,7 +2214,9 @@ def _sort_mixed(values): return np.concatenate([nums, np.asarray(strs, dtype=object)]) -def _sort_tuples(values: np.ndarray[tuple]): +# pandas\core\algorithms.py:2217: error: "ndarray" expects no type arguments, +# but 1 given [type-arg] +def _sort_tuples(values: np.ndarray[tuple]): # type: ignore[type-arg] """ Convert array of tuples (1d) to array or array (2d). We need to keep the columns separately as they contain different types and diff --git a/pandas/core/arrays/categorical.py b/pandas/core/arrays/categorical.py index 954f5074870b9..b4e2cb81a815c 100644 --- a/pandas/core/arrays/categorical.py +++ b/pandas/core/arrays/categorical.py @@ -356,7 +356,9 @@ def __init__( dtype = CategoricalDtype(categories, dtype.ordered) elif is_categorical_dtype(values.dtype): - old_codes = extract_array(values).codes + # pandas\core\arrays\categorical.py:359: error: "ExtensionArray" + # has no attribute "codes" [attr-defined] + old_codes = extract_array(values).codes # type: ignore[attr-defined] codes = recode_for_categories( old_codes, values.dtype.categories, dtype.categories ) diff --git a/pandas/core/arrays/period.py b/pandas/core/arrays/period.py index 65de95ca87695..960568237625d 100644 --- a/pandas/core/arrays/period.py +++ b/pandas/core/arrays/period.py @@ -261,14 +261,24 @@ def _generate_range(cls, start, end, periods, freq, fields): # ----------------------------------------------------------------- # DatetimeLike Interface - def _unbox_scalar( + # pandas\core\arrays\period.py:264: error: Return type "int" of + # "_unbox_scalar" incompatible with return type + # "Union[signedinteger[_64Bit], datetime64, timedelta64]" in supertype + # "DatetimeLikeArrayMixin" [override] + def _unbox_scalar( # type: ignore[override] self, value: Union[Period, NaTType], setitem: bool = False ) -> int: if value is NaT: - return np.int64(value.value) + # pandas\core\arrays\period.py:268: error: Incompatible return + # value type (got "signedinteger[_64Bit]", expected "int") + # [return-value] + return np.int64(value.value) # type: ignore[return-value] elif isinstance(value, self._scalar_type): self._check_compatible_with(value, setitem=setitem) - return np.int64(value.ordinal) + # pandas\core\arrays\period.py:271: error: Incompatible return + # value type (got "signedinteger[_64Bit]", expected "int") + # [return-value] + return np.int64(value.ordinal) # type: ignore[return-value] else: raise ValueError(f"'value' should be a Period. Got '{value}' instead.") @@ -1095,9 +1105,14 @@ def _make_field_arrays(*fields): elif length is None: length = len(x) + # pandas\core\arrays\period.py:1101: error: Argument 2 to "repeat" has + # incompatible type "Optional[int]"; expected "Union[Union[int, + # integer[Any]], Union[bool, bool_], ndarray, Sequence[Union[int, + # integer[Any]]], Sequence[Union[bool, bool_]], Sequence[Sequence[Any]]]" + # [arg-type] return [ np.asarray(x) if isinstance(x, (np.ndarray, list, ABCSeries)) - else np.repeat(x, length) + else np.repeat(x, length) # type: ignore[arg-type] for x in fields ] diff --git a/setup.cfg b/setup.cfg index 43616d693691e..065282c5a6c37 100644 --- a/setup.cfg +++ b/setup.cfg @@ -215,7 +215,7 @@ ignore_errors=True [mypy-pandas.tests.dtypes.test_common] ignore_errors=True -[mypy-pandas.tests.frame.indexing.test_indexing] +[mypy-pandas.tests.frame.indexing.test_setitem] ignore_errors=True [mypy-pandas.tests.frame.methods.test_to_records] From f4d4c1339003e00e8a3dfb5bb90d479c1e7a3de1 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sun, 8 Nov 2020 13:58:18 +0000 Subject: [PATCH 32/86] fix mangled docstrings --- pandas/plotting/_matplotlib/converter.py | 54 +++++++++++++++--------- 1 file changed, 34 insertions(+), 20 deletions(-) diff --git a/pandas/plotting/_matplotlib/converter.py b/pandas/plotting/_matplotlib/converter.py index 1a235c18bc419..b9bbde9d85c0d 100644 --- a/pandas/plotting/_matplotlib/converter.py +++ b/pandas/plotting/_matplotlib/converter.py @@ -168,13 +168,17 @@ def __call__(self, x, pos=0) -> str: Parameters ---------- - x : float The time of day specified as seconds since 00:00 (midnight), - with up to microsecond precision. pos Unused + x : float + The time of day specified as seconds since 00:00 (midnight), + with up to microsecond precision. + pos + Unused Returns ------- - str A string in HH:MM:SS.mmmuuu format. Microseconds, milliseconds and - seconds are only displayed if non-zero. + str + A string in HH:MM:SS.mmmuuu format. Microseconds, + milliseconds and seconds are only displayed if non-zero. """ fmt = "%H:%M:%S.%f" s = int(x) @@ -218,8 +222,8 @@ def _convert_1d(values, units, axis): elif isinstance(values, Index): return values.map(lambda x: get_datevalue(x, axis.freq)) elif lib.infer_dtype(values, skipna=False) == "period": - # https://github.com/pandas-dev/pandas/issues/24304 convert - # ndarray[period] -> PeriodIndex + # https://github.com/pandas-dev/pandas/issues/24304 + # convert ndarray[period] -> PeriodIndex return PeriodIndex(values, freq=axis.freq).asi8 elif isinstance(values, (list, tuple, np.ndarray, Index)): return [get_datevalue(x, axis.freq) for x in values] @@ -269,8 +273,8 @@ def try_parse(values): return try_parse(values) elif isinstance(values, (list, tuple, np.ndarray, Index, Series)): if isinstance(values, Series): - # https://github.com/matplotlib/matplotlib/issues/11391 Series - # was skipped. Convert to DatetimeIndex to get asi8 + # https://github.com/matplotlib/matplotlib/issues/11391 + # Series was skipped. Convert to DatetimeIndex to get asi8 values = Index(values) if isinstance(values, Index): values = values.values @@ -294,8 +298,8 @@ def axisinfo(unit: Optional[tzinfo], axis) -> units.AxisInfo: """ Return the :class:`~matplotlib.units.AxisInfo` for *unit*. - *unit* is a tzinfo instance or None. The *axis* argument is required - but not used. + *unit* is a tzinfo instance or None. + The *axis* argument is required but not used. """ tz = unit @@ -475,8 +479,10 @@ def period_break(dates: PeriodIndex, period: str) -> np.ndarray: Parameters ---------- - dates : PeriodIndex Array of intervals to monitor. period : string Name of - the period to monitor. + dates : PeriodIndex + Array of intervals to monitor. + period : string + Name of the period to monitor. """ current = getattr(dates, period) previous = getattr(dates - 1 * dates.freq, period) @@ -885,11 +891,16 @@ class TimeSeries_DateLocator(Locator): Parameters ---------- - freq : {var} Valid frequency specifier. minor_locator : {False, True}, - optional Whether the locator is for minor ticks (True) or not. - dynamic_mode : {True, False}, optional Whether the locator should work - in dynamic mode. base : {int}, optional quarter : {int}, optional month - : {int}, optional day : {int}, optional + freq : {var} + Valid frequency specifier. + minor_locator : {False, True}, optional + Whether the locator is for minor ticks (True) or not. + dynamic_mode : {True, False}, optional + Whether the locator should work in dynamic mode. + base : {int}, optional + quarter : {int}, optional + month : {int}, optional + day : {int}, optional """ def __init__( @@ -971,9 +982,12 @@ class TimeSeries_DateFormatter(Formatter): Parameters ---------- - freq : {int, string} Valid frequency specifier. minor_locator : bool, - default False Whether the current formatter should apply to minor ticks - (True) or major ticks (False). dynamic_mode : bool, default True + freq : {int, string} + Valid frequency specifier. + minor_locator : bool, default False + Whether the current formatter should apply to minor ticks (True) or + major ticks (False). + dynamic_mode : bool, default True Whether the formatter works in dynamic mode or not. """ From d2514c6008986c6982d1def2936cc06a42ede442 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Mon, 9 Nov 2020 14:57:08 +0000 Subject: [PATCH 33/86] add more ignores for previously unchecked defs --- pandas/core/array_algos/replace.py | 5 ++++- pandas/core/generic.py | 14 ++++++++++++-- pandas/core/indexes/category.py | 8 +++++++- pandas/core/indexes/datetimes.py | 5 ++++- pandas/core/indexes/multi.py | 10 ++++++++-- pandas/core/internals/blocks.py | 5 ++++- pandas/core/nanops.py | 15 ++------------- pandas/core/reshape/merge.py | 14 ++++++++++++-- pandas/io/parsers.py | 11 +++++++++-- 9 files changed, 62 insertions(+), 25 deletions(-) diff --git a/pandas/core/array_algos/replace.py b/pandas/core/array_algos/replace.py index bb930561c2aed..5a23b11e03df6 100644 --- a/pandas/core/array_algos/replace.py +++ b/pandas/core/array_algos/replace.py @@ -130,6 +130,9 @@ def re_replacer(s): f = np.vectorize(re_replacer, otypes=[values.dtype]) if mask is None: - values[:] = f(values) + # pandas\core\array_algos\replace.py:133: error: Invalid index type + # "slice" for "ExtensionArray"; expected type "Union[int, ndarray]" + # [index] + values[:] = f(values) # type: ignore[index] else: values[mask] = f(values[mask]) diff --git a/pandas/core/generic.py b/pandas/core/generic.py index 7beba311a484f..703faf6f0ad6d 100644 --- a/pandas/core/generic.py +++ b/pandas/core/generic.py @@ -8960,7 +8960,12 @@ def _where( # we are the same shape, so create an actual object for alignment else: - other = self._constructor(other, **self._construct_axes_dict()) + # pandas\core\generic.py:8963: error: Argument 1 to "NDFrame" + # has incompatible type "ndarray"; expected "BlockManager" + # [arg-type] + other = self._constructor( + other, **self._construct_axes_dict() # type: ignore[arg-type] + ) if axis is None: axis = 0 @@ -10695,7 +10700,12 @@ def mad(self, axis=None, skipna=None, level=None): demeaned = data - data.mean(axis=0) else: demeaned = data.sub(data.mean(axis=1), axis=0) - return np.abs(demeaned).mean(axis=axis, skipna=skipna) + # pandas\core\generic.py:10698: error: No overload variant of "mean" of + # "_ArrayOrScalarCommon" matches argument types "Any", "Any" + # [call-overload] + return np.abs(demeaned).mean( + axis=axis, skipna=skipna # type: ignore[call-overload] + ) @classmethod def _add_numeric_operations(cls): diff --git a/pandas/core/indexes/category.py b/pandas/core/indexes/category.py index 859c26a40e50d..0ef0f4e20edbd 100644 --- a/pandas/core/indexes/category.py +++ b/pandas/core/indexes/category.py @@ -173,12 +173,18 @@ def _can_hold_strings(self): def _engine_type(self): # self.codes can have dtype int8, int16, int32 or int64, so we need # to return the corresponding engine type (libindex.Int8Engine, etc.). + + # pandas\core\indexes\category.py:181: error: Invalid index type + # "Type[generic]" for "Dict[Type[signedinteger[Any]], Any]"; expected + # type "Type[signedinteger[Any]]" [index] return { np.int8: libindex.Int8Engine, np.int16: libindex.Int16Engine, np.int32: libindex.Int32Engine, np.int64: libindex.Int64Engine, - }[self.codes.dtype.type] + }[ + self.codes.dtype.type # type: ignore[index] + ] _attributes = ["name"] diff --git a/pandas/core/indexes/datetimes.py b/pandas/core/indexes/datetimes.py index c84039357bec9..a473ec1a1a4e9 100644 --- a/pandas/core/indexes/datetimes.py +++ b/pandas/core/indexes/datetimes.py @@ -528,7 +528,10 @@ def to_series(self, keep_tz=lib.no_default, index=None, name=None): # preserve the tz & copy values = self.copy(deep=True) else: - values = self._values.view("M8[ns]").copy() + # pandas\core\indexes\datetimes.py:531: error: Incompatible types + # in assignment (expression has type "Union[ExtensionArray, + # ndarray]", variable has type "DatetimeIndex") [assignment] + values = self._values.view("M8[ns]").copy() # type: ignore[assignment] return Series(values, index=index, name=name) diff --git a/pandas/core/indexes/multi.py b/pandas/core/indexes/multi.py index 182195e8fe6cb..a55e686470021 100644 --- a/pandas/core/indexes/multi.py +++ b/pandas/core/indexes/multi.py @@ -2995,10 +2995,16 @@ def convert_indexer(start, stop, step, indexer=indexer, codes=level_codes): indexer = codes.take(ensure_platform_int(indexer)) result = Series(Index(indexer).isin(r).nonzero()[0]) m = result.map(mapper) - m = np.asarray(m) + # pandas\core\indexes\multi.py:2998: error: Incompatible types + # in assignment (expression has type "ndarray", variable has + # type "Series") [assignment] + m = np.asarray(m) # type: ignore[assignment] else: - m = np.zeros(len(codes), dtype=bool) + # pandas\core\indexes\multi.py:3001: error: Incompatible types + # in assignment (expression has type "ndarray", variable has + # type "Series") [assignment] + m = np.zeros(len(codes), dtype=bool) # type: ignore[assignment] m[np.in1d(codes, r, assume_unique=Index(codes).is_unique)] = True return m diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index 987e24eb4e62a..9a784a5857f55 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -852,7 +852,10 @@ def _replace_regex( rx = re.compile(to_replace) new_values = self.values if inplace else self.values.copy() - replace_regex(new_values, rx, value, mask) + # pandas\core\internals\blocks.py:855: error: Value of type variable + # "ArrayLike" of "replace_regex" cannot be "Union[ndarray, + # ExtensionArray]" [type-var] + replace_regex(new_values, rx, value, mask) # type: ignore[type-var] block = self.make_block(new_values) if convert: diff --git a/pandas/core/nanops.py b/pandas/core/nanops.py index 071c252f6f991..39ed3d42a6756 100644 --- a/pandas/core/nanops.py +++ b/pandas/core/nanops.py @@ -354,13 +354,7 @@ def _wrap_results(result, dtype: np.dtype, fill_value=None): result = np.int64(result).view("datetime64[ns]") else: # If we have float dtype, taking a view will give the wrong result - - # error: Argument 1 to "astype" of "_ArrayOrScalarCommon" has - # incompatible type "Union[dtype, ExtensionDtype]"; expected - # "Union[dtype, None, type, _SupportsDtype, str, Tuple[Any, int], - # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DtypeDict, - # Tuple[Any, Any]]" - result = result.astype(dtype) # type: ignore[arg-type] + result = result.astype(dtype) elif is_timedelta64_dtype(dtype): if not isinstance(result, np.ndarray): if result == fill_value: @@ -372,12 +366,7 @@ def _wrap_results(result, dtype: np.dtype, fill_value=None): result = Timedelta(result, unit="ns") else: - # error: Argument 1 to "view" of "_ArrayOrScalarCommon" has - # incompatible type "Union[dtype, ExtensionDtype]"; expected - # "Union[dtype, None, type, _SupportsDtype, str, Tuple[Any, int], - # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DtypeDict, - # Tuple[Any, Any]]" - result = result.astype("m8[ns]").view(dtype) # type: ignore[arg-type] + result = result.astype("m8[ns]").view(dtype) return result diff --git a/pandas/core/reshape/merge.py b/pandas/core/reshape/merge.py index a5b5877be287a..e8a874cf92460 100644 --- a/pandas/core/reshape/merge.py +++ b/pandas/core/reshape/merge.py @@ -1113,7 +1113,12 @@ def _maybe_coerce_merge_keys(self): # check whether ints and floats elif is_integer_dtype(rk.dtype) and is_float_dtype(lk.dtype): - if not (lk == lk.astype(rk.dtype))[~np.isnan(lk)].all(): + # pandas\core\reshape\merge.py:1116: error: Unsupported + # operand type for ~ ("Union[ndarray, generic]") + # [operator] + if not (lk == lk.astype(rk.dtype))[ + ~np.isnan(lk) # type: ignore[operator] + ].all(): warnings.warn( "You are merging on int and float " "columns where the float values " @@ -1123,7 +1128,12 @@ def _maybe_coerce_merge_keys(self): continue elif is_float_dtype(rk.dtype) and is_integer_dtype(lk.dtype): - if not (rk == rk.astype(lk.dtype))[~np.isnan(rk)].all(): + # pandas\core\reshape\merge.py:1126: error: Unsupported + # operand type for ~ ("Union[ndarray, generic]") + # [operator] + if not (rk == rk.astype(lk.dtype))[ + ~np.isnan(rk) # type: ignore[operator] + ].all(): warnings.warn( "You are merging on int and float " "columns where the float values " diff --git a/pandas/io/parsers.py b/pandas/io/parsers.py index 5725e2304e1d2..2edf5a987b0b1 100644 --- a/pandas/io/parsers.py +++ b/pandas/io/parsers.py @@ -1737,7 +1737,10 @@ def _infer_types(self, values, na_values, try_num_bool=True): na_count = 0 if issubclass(values.dtype.type, (np.number, np.bool_)): mask = algorithms.isin(values, list(na_values)) - na_count = mask.sum() + # pandas\io\parsers.py:1740: error: Incompatible types in + # assignment (expression has type "number[Any]", variable has type + # "int") [assignment] + na_count = mask.sum() # type: ignore[assignment] if na_count > 0: if is_integer_dtype(values): values = values.astype(np.float64) @@ -1795,7 +1798,11 @@ def _cast_types(self, values, cast_type, column): # TODO: this is for consistency with # c-parser which parses all categories # as strings - values = astype_nansafe(values, str) + + # pandas\io\parsers.py:1798: error: Argument 2 to + # "astype_nansafe" has incompatible type "Type[str]"; expected + # "Union[dtype[Any], ExtensionDtype]" [arg-type] + values = astype_nansafe(values, str) # type: ignore[arg-type] cats = Index(values).unique().dropna() values = Categorical._from_inferred_categories( From 5d55636a81f7101fe5de0224398f5e4b9c5e634e Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Mon, 9 Nov 2020 15:14:24 +0000 Subject: [PATCH 34/86] lint fixup --- pandas/_testing.py | 1 - pandas/core/dtypes/cast.py | 15 +++------------ pandas/core/frame.py | 2 +- pandas/core/groupby/generic.py | 3 ++- setup.cfg | 1 - 5 files changed, 6 insertions(+), 16 deletions(-) diff --git a/pandas/_testing.py b/pandas/_testing.py index a046a04403fca..89193ac27438c 100644 --- a/pandas/_testing.py +++ b/pandas/_testing.py @@ -15,7 +15,6 @@ import zipfile import numpy as np - from numpy.random import rand, randn from pandas._config.localization import ( # noqa:F401 diff --git a/pandas/core/dtypes/cast.py b/pandas/core/dtypes/cast.py index fdb67cabfba74..435b458bab41b 100644 --- a/pandas/core/dtypes/cast.py +++ b/pandas/core/dtypes/cast.py @@ -216,18 +216,9 @@ def maybe_downcast_to_dtype(result, dtype: Union[str, np.dtype]): if hasattr(dtype, "tz"): # not a numpy dtype - # pandas\core\dtypes\cast.py:181: error: Item "ExtensionDtype" of - # "Union[ExtensionDtype, dtype, Type[object]]" has no attribute - # "tz" [union-attr] - - # pandas\core\dtypes\cast.py:181: error: Item "dtype" of - # "Union[ExtensionDtype, dtype, Type[object]]" has no attribute - # "tz" [union-attr] - - # pandas\core\dtypes\cast.py:181: error: Item "type" of - # "Union[ExtensionDtype, dtype, Type[object]]" has no attribute - # "tz" [union-attr] - if dtype.tz: # type: ignore + # pandas\core\dtypes\cast.py:230: error: "dtype[Any]" has no + # attribute "tz" [attr-defined] + if dtype.tz: # type: ignore[attr-defined] # convert to datetime and change timezone from pandas import to_datetime diff --git a/pandas/core/frame.py b/pandas/core/frame.py index 29ce59959108c..bc57b321a21ab 100644 --- a/pandas/core/frame.py +++ b/pandas/core/frame.py @@ -4057,7 +4057,7 @@ def lookup(self, row_labels, col_labels) -> np.ndarray: # "Union[ndarray, generic]" for "flatiter[ndarray]"; expected type # "Union[int, integer, Sequence[Union[int, integer]], # Sequence[Sequence[Any]], ndarray, slice, ellipsis]" [index] - result = values.flat[flat_index] # type: ignore [index] + result = values.flat[flat_index] # type: ignore[index] else: result = np.empty(n, dtype="O") for i, (r, c) in enumerate(zip(row_labels, col_labels)): diff --git a/pandas/core/groupby/generic.py b/pandas/core/groupby/generic.py index e5a7ca3519da2..1c30a8aaabb9c 100644 --- a/pandas/core/groupby/generic.py +++ b/pandas/core/groupby/generic.py @@ -322,7 +322,8 @@ def _aggregate_multiple_funcs(self, arg): return results # Argument 1 to "_wrap_aggregated_output" of "SeriesGroupBy" has - # incompatible type "Dict[OutputKey, Union[DataFrame, Series]]"; + # incompatible type "Dict[OutputKey, Union[DataFrame, + # Series]]"; # expected "Mapping[OutputKey, Union[Series, ndarray]]" output = self._wrap_aggregated_output( results, index=None # type: ignore[arg-type] diff --git a/setup.cfg b/setup.cfg index 25e8fae73099b..2b550510ad0eb 100644 --- a/setup.cfg +++ b/setup.cfg @@ -167,4 +167,3 @@ ignore_errors=True [mypy-pandas.tests.window.test_dtypes] ignore_errors=True - From 4445f6db3dbb8cbfd638cdd92505237cca136ff6 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Wed, 18 Nov 2020 13:20:44 +0000 Subject: [PATCH 35/86] add ignores for newly reported mypy errors --- pandas/_testing.py | 10 ++++++++-- pandas/core/arrays/_mixins.py | 16 ++++++++++++++-- pandas/core/arrays/base.py | 5 ++++- pandas/core/arrays/interval.py | 18 ++++++++++++++++-- pandas/core/dtypes/missing.py | 7 ++++++- pandas/core/indexes/base.py | 15 +++++++++++++-- pandas/core/internals/blocks.py | 14 ++++++++++++-- pandas/core/internals/concat.py | 19 +++++++++++++++---- pandas/core/internals/ops.py | 21 +++++++++++++++++---- pandas/core/nanops.py | 7 ++++++- pandas/core/reshape/pivot.py | 9 ++++++++- 11 files changed, 119 insertions(+), 22 deletions(-) diff --git a/pandas/_testing.py b/pandas/_testing.py index 9f95984c2050b..9739f683f8fd2 100644 --- a/pandas/_testing.py +++ b/pandas/_testing.py @@ -1262,8 +1262,14 @@ def assert_extension_array_equal( left_na, right_na, obj="ExtensionArray NA mask", index_values=index_values ) - left_valid = np.asarray(left[~left_na].astype(object)) - right_valid = np.asarray(right[~right_na].astype(object)) + # pandas\_testing.py:1265: error: Invalid index type "Union[ndarray, + # integer[Any], bool_]" for "ExtensionArray"; expected type "Union[int, + # slice, ndarray]" [index] + left_valid = np.asarray(left[~left_na].astype(object)) # type: ignore[index] + # pandas\_testing.py:1266: error: Invalid index type "Union[ndarray, + # integer[Any], bool_]" for "ExtensionArray"; expected type "Union[int, + # slice, ndarray]" [index] + right_valid = np.asarray(right[~right_na].astype(object)) # type: ignore[index] if check_exact: assert_numpy_array_equal( left_valid, right_valid, obj="ExtensionArray", index_values=index_values diff --git a/pandas/core/arrays/_mixins.py b/pandas/core/arrays/_mixins.py index 741480a4b0991..39d2b5dc36dab 100644 --- a/pandas/core/arrays/_mixins.py +++ b/pandas/core/arrays/_mixins.py @@ -184,7 +184,10 @@ def _concat_same_type( new_values = [x._ndarray for x in to_concat] new_values = np.concatenate(new_values, axis=axis) - return to_concat[0]._from_backing_data(new_values) + # pandas\core\arrays\_mixins.py:187: error: Argument 1 to + # "_from_backing_data" of "NDArrayBackedExtensionArray" has + # incompatible type "List[ndarray]"; expected "ndarray" [arg-type] + return to_concat[0]._from_backing_data(new_values) # type: ignore[arg-type] @doc(ExtensionArray.searchsorted) def searchsorted(self, value, side="left", sorter=None): @@ -225,7 +228,16 @@ def __getitem__( return self._box_func(result) return self._from_backing_data(result) - key = extract_array(key, extract_numpy=True) + # pandas\core\arrays\_mixins.py:228: error: Value of type variable + # "AnyArrayLike" of "extract_array" cannot be "Union[int, slice, + # ndarray]" [type-var] + + # pandas\core\arrays\_mixins.py:228: error: Incompatible types in + # assignment (expression has type "ExtensionArray", variable has type + # "Union[int, slice, ndarray]") [assignment] + key = extract_array( # type: ignore[type-var,assignment] + key, extract_numpy=True + ) key = check_array_indexer(self, key) result = self._ndarray[key] if lib.is_scalar(result): diff --git a/pandas/core/arrays/base.py b/pandas/core/arrays/base.py index 3db016bf9af0c..849550bb29e06 100644 --- a/pandas/core/arrays/base.py +++ b/pandas/core/arrays/base.py @@ -1072,7 +1072,10 @@ def view(self, dtype=None) -> ArrayLike: # giving a view with the same dtype as self. if dtype is not None: raise NotImplementedError(dtype) - return self[:] + # pandas\core\arrays\base.py:1075: error: Incompatible return value + # type (got "Union[ExtensionArray, Any]", expected "ndarray") + # [return-value] + return self[:] # type: ignore[return-value] # ------------------------------------------------------------------------ # Printing diff --git a/pandas/core/arrays/interval.py b/pandas/core/arrays/interval.py index 28c1c31087fdf..a5a9cc55cfcc1 100644 --- a/pandas/core/arrays/interval.py +++ b/pandas/core/arrays/interval.py @@ -1537,8 +1537,22 @@ def _get_combined_data( # pandas\core\arrays\interval.py:1443: error: Item "Index" of # "Union[Index, ndarray]" has no attribute "reshape" [union-attr] [ - left.reshape(-1, 1), # type: ignore[attr-defined] - right.reshape(-1, 1), # type: ignore[union-attr] + # pandas\core\arrays\interval.py:1540: error: List item 0 has + # incompatible type "Union[DatetimeArray, TimedeltaArray]"; + # expected "DatetimeArray" [list-item] + + # pandas\core\arrays\interval.py:1540: error: List item 0 has + # incompatible type "Union[DatetimeArray, TimedeltaArray]"; + # expected "TimedeltaArray" [list-item] + left.reshape(-1, 1), # type: ignore[attr-defined,list-item] + # pandas\core\arrays\interval.py:1541: error: List item 1 has + # incompatible type "Union[DatetimeArray, TimedeltaArray]"; + # expected "DatetimeArray" [list-item] + + # pandas\core\arrays\interval.py:1541: error: List item 1 has + # incompatible type "Union[DatetimeArray, TimedeltaArray]"; + # expected "TimedeltaArray" [list-item] + right.reshape(-1, 1), # type: ignore[union-attr,list-item] ], axis=1, ) diff --git a/pandas/core/dtypes/missing.py b/pandas/core/dtypes/missing.py index 204a3034e87ce..8fa68e5763b99 100644 --- a/pandas/core/dtypes/missing.py +++ b/pandas/core/dtypes/missing.py @@ -652,7 +652,12 @@ def isna_all(arr: ArrayLike) -> bool: ) for i in range(0, total_len, chunk_len): - if not checker(arr[i : i + chunk_len]).all(): + # pandas\core\dtypes\missing.py:655: error: Argument 1 to "__call__" of + # "ufunc" has incompatible type "Union[ExtensionArray, Any]"; expected + # "Union[Union[int, float, complex, str, bytes, generic], + # Sequence[Union[int, float, complex, str, bytes, generic]], + # Sequence[Sequence[Any]], _SupportsArray]" [arg-type] + if not checker(arr[i : i + chunk_len]).all(): # type: ignore[arg-type] return False return True diff --git a/pandas/core/indexes/base.py b/pandas/core/indexes/base.py index 7dedb01ba574a..a6e0a66e8174d 100644 --- a/pandas/core/indexes/base.py +++ b/pandas/core/indexes/base.py @@ -3253,7 +3253,13 @@ def _filter_indexer_tolerance( tolerance, ) -> np.ndarray: # error: Unsupported left operand type for - ("ExtensionArray") - distance = abs(self._values[indexer] - target) # type: ignore[operator] + + # pandas\core\indexes\base.py:3256: error: Argument 1 to "abs" has + # incompatible type "Union[Any, ndarray, generic]"; expected + # "SupportsAbs[Any]" [arg-type] + distance = abs( + self._values[indexer] - target # type: ignore[operator,arg-type] + ) indexer = np.where(distance <= tolerance, indexer, -1) return indexer @@ -4280,7 +4286,12 @@ def __getitem__(self, key): result = getitem(key) if not is_scalar(result): - if np.ndim(result) > 1: + # pandas\core\indexes\base.py:4283: error: Argument 1 to "ndim" has + # incompatible type "Union[ExtensionArray, Any]"; expected + # "Union[Union[int, float, complex, str, bytes, generic], + # Sequence[Union[int, float, complex, str, bytes, generic]], + # Sequence[Sequence[Any]], _SupportsArray]" [arg-type] + if np.ndim(result) > 1: # type: ignore[arg-type] deprecate_ndim_indexing(result) return result return promote(result) diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index 1a7d2c4397e8a..0eb3c7646c48f 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -1500,7 +1500,14 @@ def where( and np.isnan(other) ): # convert datetime to datetime64, timedelta to timedelta64 - other = convert_scalar_for_putitemlike(other, values.dtype) + + # pandas\core\internals\blocks.py:1503: error: Argument 2 to + # "convert_scalar_for_putitemlike" has incompatible type + # "Union[dtype[Any], ExtensionDtype]"; expected "dtype[Any]" + # [arg-type] + other = convert_scalar_for_putitemlike( + other, values.dtype # type: ignore[arg-type] + ) # By the time we get here, we should have all Series/Index # args extracted to ndarray @@ -1737,7 +1744,10 @@ def iget(self, col): elif isinstance(col, slice): if col != slice(None): raise NotImplementedError(col) - return self.values[[loc]] + # pandas\core\internals\blocks.py:1740: error: Invalid index + # type "List[Any]" for "ExtensionArray"; expected type + # "Union[int, slice, ndarray]" [index] + return self.values[[loc]] # type: ignore[index] return self.values[loc] else: if col != 0: diff --git a/pandas/core/internals/concat.py b/pandas/core/internals/concat.py index c60a94c496eeb..58b179ab6ed9a 100644 --- a/pandas/core/internals/concat.py +++ b/pandas/core/internals/concat.py @@ -257,9 +257,11 @@ def get_reindexed_values(self, empty_dtype: DtypeObj, upcasted_na): elif getattr(self.block, "is_extension", False): pass elif is_extension_array_dtype(empty_dtype): - missing_arr = empty_dtype.construct_array_type()._from_sequence( - [], dtype=empty_dtype - ) + # pandas\core\internals\concat.py:260: error: Item + # "dtype[Any]" of "Union[dtype[Any], ExtensionDtype]" has + # no attribute "construct_array_type" [union-attr] + tmp = empty_dtype.construct_array_type() # type: ignore[union-attr] + missing_arr = tmp._from_sequence([], dtype=empty_dtype) ncols, nrows = self.shape assert ncols == 1, ncols empty_arr = -1 * np.ones((nrows,), dtype=np.intp) @@ -267,7 +269,16 @@ def get_reindexed_values(self, empty_dtype: DtypeObj, upcasted_na): empty_arr, allow_fill=True, fill_value=fill_value ) else: - missing_arr = np.empty(self.shape, dtype=empty_dtype) + # pandas\core\internals\concat.py:270: error: Argument + # "dtype" to "empty" has incompatible type + # "Union[dtype[Any], ExtensionDtype]"; expected + # "Union[dtype[Any], None, type, _SupportsDType, str, + # Union[Tuple[Any, int], Tuple[Any, Union[int, + # Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, + # Any]]]" [arg-type] + missing_arr = np.empty( + self.shape, dtype=empty_dtype # type: ignore[arg-type] + ) missing_arr.fill(fill_value) return missing_arr diff --git a/pandas/core/internals/ops.py b/pandas/core/internals/ops.py index 4db1f9e0e5ad4..45975cdf1a3a6 100644 --- a/pandas/core/internals/ops.py +++ b/pandas/core/internals/ops.py @@ -103,19 +103,32 @@ def _get_same_shape_values( # TODO(EA2D): with 2D EAs only this first clause would be needed if not (left_ea or right_ea): - lvals = lvals[rblk.mgr_locs.indexer, :] + # pandas\core\internals\ops.py:106: error: Invalid index type + # "Tuple[Any, slice]" for "Union[ndarray, ExtensionArray]"; expected + # type "Union[int, slice, ndarray]" [index] + lvals = lvals[rblk.mgr_locs.indexer, :] # type: ignore[index] assert lvals.shape == rvals.shape, (lvals.shape, rvals.shape) elif left_ea and right_ea: assert lvals.shape == rvals.shape, (lvals.shape, rvals.shape) elif right_ea: # lvals are 2D, rvals are 1D - lvals = lvals[rblk.mgr_locs.indexer, :] + + # pandas\core\internals\ops.py:112: error: Invalid index type + # "Tuple[Any, slice]" for "Union[ndarray, ExtensionArray]"; expected + # type "Union[int, slice, ndarray]" [index] + lvals = lvals[rblk.mgr_locs.indexer, :] # type: ignore[index] assert lvals.shape[0] == 1, lvals.shape - lvals = lvals[0, :] + # pandas\core\internals\ops.py:114: error: Invalid index type + # "Tuple[int, slice]" for "Union[Any, ExtensionArray]"; expected type + # "Union[int, slice, ndarray]" [index] + lvals = lvals[0, :] # type: ignore[index] else: # lvals are 1D, rvals are 2D assert rvals.shape[0] == 1, rvals.shape - rvals = rvals[0, :] + # pandas\core\internals\ops.py:118: error: Invalid index type + # "Tuple[int, slice]" for "Union[ndarray, ExtensionArray]"; expected + # type "Union[int, slice, ndarray]" [index] + rvals = rvals[0, :] # type: ignore[index] # pandas\core\internals\ops.py:120: error: Incompatible return value type # (got "Tuple[Union[ndarray, ExtensionArray], Union[ndarray, diff --git a/pandas/core/nanops.py b/pandas/core/nanops.py index 5ad548dfa4830..bbf4b830909c1 100644 --- a/pandas/core/nanops.py +++ b/pandas/core/nanops.py @@ -397,7 +397,12 @@ def new_func( if datetimelike: result = _wrap_results(result, orig_values.dtype, fill_value=iNaT) if not skipna: - result = _mask_datetimelike_result(result, axis, mask, orig_values) + # pandas\core\nanops.py:400: error: Argument 3 to + # "_mask_datetimelike_result" has incompatible type + # "Optional[ndarray]"; expected "ndarray" [arg-type] + result = _mask_datetimelike_result( + result, axis, mask, orig_values # type: ignore[arg-type] + ) return result diff --git a/pandas/core/reshape/pivot.py b/pandas/core/reshape/pivot.py index c1198cdfcda81..fafd8e1071209 100644 --- a/pandas/core/reshape/pivot.py +++ b/pandas/core/reshape/pivot.py @@ -452,7 +452,14 @@ def pivot( cols = [] append = index is None - indexed = data.set_index(cols + columns, append=append) + # pandas\core\reshape\pivot.py:455: error: Unsupported operand types + # for + ("List[Any]" and "ExtensionArray") [operator] + + # pandas\core\reshape\pivot.py:455: error: Unsupported left operand + # type for + ("ExtensionArray") [operator] + indexed = data.set_index( + cols + columns, append=append # type: ignore[operator] + ) else: if index is None: index = [Series(data.index, name=data.index.name)] From 5d508ab49966ee2a5ede042a4d50d9087c3ac010 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Wed, 18 Nov 2020 13:25:31 +0000 Subject: [PATCH 36/86] lint fixup --- pandas/core/arrays/interval.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/pandas/core/arrays/interval.py b/pandas/core/arrays/interval.py index a5a9cc55cfcc1..9a07083d08244 100644 --- a/pandas/core/arrays/interval.py +++ b/pandas/core/arrays/interval.py @@ -1540,7 +1540,6 @@ def _get_combined_data( # pandas\core\arrays\interval.py:1540: error: List item 0 has # incompatible type "Union[DatetimeArray, TimedeltaArray]"; # expected "DatetimeArray" [list-item] - # pandas\core\arrays\interval.py:1540: error: List item 0 has # incompatible type "Union[DatetimeArray, TimedeltaArray]"; # expected "TimedeltaArray" [list-item] @@ -1548,7 +1547,6 @@ def _get_combined_data( # pandas\core\arrays\interval.py:1541: error: List item 1 has # incompatible type "Union[DatetimeArray, TimedeltaArray]"; # expected "DatetimeArray" [list-item] - # pandas\core\arrays\interval.py:1541: error: List item 1 has # incompatible type "Union[DatetimeArray, TimedeltaArray]"; # expected "TimedeltaArray" [list-item] From 3b7565814fa03198091689502714255a34e954d4 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Wed, 18 Nov 2020 16:12:30 +0000 Subject: [PATCH 37/86] replace deprecated ::add-path:: --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 13014df4078b2..0f23ef7dcc8c1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -148,7 +148,7 @@ jobs: sudo apt-get install -y build-essential sudo apt-get clean - name: Setting conda path - run: echo ::add-path::$CONDA/bin + run: echo "$CONDA/bin" >> $GITHUB_PATH - name: Update conda run: | conda config --set quiet true --set always_yes true From 7b1d8df2453b5aacdd6b86632b1212438a4f2bfb Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Thu, 10 Dec 2020 19:26:48 +0000 Subject: [PATCH 38/86] numpy-1.20.0rc1 --- .github/workflows/ci.yml | 9 +- pandas/_testing.py | 10 +- pandas/core/algorithms.py | 15 +-- pandas/core/arrays/_ranges.py | 8 +- pandas/core/arrays/categorical.py | 9 +- pandas/core/arrays/datetimes.py | 10 +- pandas/core/arrays/integer.py | 9 +- pandas/core/arrays/masked.py | 5 +- pandas/core/arrays/numpy_.py | 5 +- pandas/core/arrays/period.py | 5 +- pandas/core/dtypes/missing.py | 14 +-- pandas/core/frame.py | 6 +- pandas/core/generic.py | 11 +- pandas/core/groupby/groupby.py | 8 +- pandas/core/indexes/multi.py | 6 +- pandas/core/indexes/numeric.py | 5 +- pandas/core/internals/blocks.py | 16 +-- pandas/core/missing.py | 5 +- pandas/core/nanops.py | 148 ++--------------------- pandas/core/ops/mask_ops.py | 15 +-- pandas/core/ops/missing.py | 5 +- pandas/core/reshape/merge.py | 14 +-- pandas/core/reshape/tile.py | 12 +- pandas/core/sorting.py | 16 +-- pandas/core/window/common.py | 8 +- pandas/core/window/ewm.py | 35 +----- pandas/io/formats/format.py | 59 +++++++-- pandas/io/sas/sas_xport.py | 6 +- pandas/plotting/_matplotlib/converter.py | 7 +- pandas/plotting/_matplotlib/misc.py | 39 +----- 30 files changed, 113 insertions(+), 407 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0f23ef7dcc8c1..1ff4216c5f35d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -160,10 +160,15 @@ jobs: run: | conda env update -n pandas-dev --file=environment.yml conda list - - name: Update numpy using wheel from nightly + # - name: Update numpy using wheel from nightly + # run: | + # source activate pandas-dev + # pip install -U --extra-index-url https://pypi.anaconda.org/scipy-wheels-nightly/simple --pre numpy + # conda list + - name: Update numpy to release candidate run: | source activate pandas-dev - pip install -U --extra-index-url https://pypi.anaconda.org/scipy-wheels-nightly/simple --pre numpy + pip install numpy==1.20.0rc1 conda list - name: Remove pandas run: | diff --git a/pandas/_testing.py b/pandas/_testing.py index 9739f683f8fd2..9f95984c2050b 100644 --- a/pandas/_testing.py +++ b/pandas/_testing.py @@ -1262,14 +1262,8 @@ def assert_extension_array_equal( left_na, right_na, obj="ExtensionArray NA mask", index_values=index_values ) - # pandas\_testing.py:1265: error: Invalid index type "Union[ndarray, - # integer[Any], bool_]" for "ExtensionArray"; expected type "Union[int, - # slice, ndarray]" [index] - left_valid = np.asarray(left[~left_na].astype(object)) # type: ignore[index] - # pandas\_testing.py:1266: error: Invalid index type "Union[ndarray, - # integer[Any], bool_]" for "ExtensionArray"; expected type "Union[int, - # slice, ndarray]" [index] - right_valid = np.asarray(right[~right_na].astype(object)) # type: ignore[index] + left_valid = np.asarray(left[~left_na].astype(object)) + right_valid = np.asarray(right[~right_na].astype(object)) if check_exact: assert_numpy_array_equal( left_valid, right_valid, obj="ExtensionArray", index_values=index_values diff --git a/pandas/core/algorithms.py b/pandas/core/algorithms.py index 75dac27213795..3e9c0f56e8a4b 100644 --- a/pandas/core/algorithms.py +++ b/pandas/core/algorithms.py @@ -1256,24 +1256,13 @@ def compute(self, method: str) -> Series: # fast method arr, pandas_dtype = _ensure_data(dropped.values) if method == "nlargest": - # pandas\core\algorithms.py:1264: error: Incompatible types in - # assignment (expression has type "Union[ndarray, generic]", - # variable has type "ndarray") [assignment] - arr = -arr # type: ignore[assignment] + arr = -arr if is_integer_dtype(pandas_dtype): # GH 21426: ensure reverse ordering at boundaries arr -= 1 elif is_bool_dtype(pandas_dtype): - # GH 26154: ensure False is smaller than True - - # pandas\core\algorithms.py:1269: error: Incompatible types in - # assignment (expression has type "Union[ndarray, generic]", - # variable has type "ndarray") [assignment] - - # pandas\core\algorithms.py:1275: error: Unsupported operand - # types for - ("int" and "generic") [operator] - arr = 1 - (-arr) # type: ignore[assignment,operator] + arr = 1 - (-arr) if self.keep == "last": arr = arr[::-1] diff --git a/pandas/core/arrays/_ranges.py b/pandas/core/arrays/_ranges.py index 25503d83c6152..bf62cafa39467 100644 --- a/pandas/core/arrays/_ranges.py +++ b/pandas/core/arrays/_ranges.py @@ -105,13 +105,7 @@ def _generate_range_overflow_safe( # if periods * strides cannot be multiplied within the *uint64* bounds, # we cannot salvage the operation by recursing, so raise try: - # pandas\core\arrays\_ranges.py:108: error: Argument 1 to - # "unsignedinteger" has incompatible type "Union[ndarray, - # generic]"; expected "Union[SupportsInt, Union[str, bytes], - # SupportsIndex]" [arg-type] - addend = np.uint64(periods) * np.uint64( - np.abs(stride) # type: ignore[arg-type] - ) + addend = np.uint64(periods) * np.uint64(np.abs(stride)) except FloatingPointError as err: raise OutOfBoundsDatetime(msg) from err diff --git a/pandas/core/arrays/categorical.py b/pandas/core/arrays/categorical.py index 6b502086d4804..46bff03c0a4c9 100644 --- a/pandas/core/arrays/categorical.py +++ b/pandas/core/arrays/categorical.py @@ -371,13 +371,8 @@ def __init__( if null_mask.any(): # Reinsert -1 placeholders for previously removed missing values full_codes = -np.ones(null_mask.shape, dtype=codes.dtype) - # pandas\core\arrays\categorical.py:370: error: Unsupported target - # for indexed assignment ("Union[ndarray, generic]") [index] - full_codes[~null_mask] = codes # type: ignore[index] - # pandas\core\arrays\categorical.py:371: error: Incompatible types - # in assignment (expression has type "Union[ndarray, generic]", - # variable has type "ndarray") [assignment] - codes = full_codes # type: ignore[assignment] + full_codes[~null_mask] = codes + codes = full_codes self._dtype = self._dtype.update_dtype(dtype) self._codes = coerce_indexer_dtype(codes, dtype.categories) diff --git a/pandas/core/arrays/datetimes.py b/pandas/core/arrays/datetimes.py index f01620038f559..f0e0513830079 100644 --- a/pandas/core/arrays/datetimes.py +++ b/pandas/core/arrays/datetimes.py @@ -1849,15 +1849,7 @@ def to_julian_date(self): month[testarr] += 12 return ( day - # pandas\core\arrays\datetimes.py:1845: error: Unsupported operand - # types for / ("generic" and "int") [operator] - # pandas\core\arrays\datetimes.py:1845: error: Unsupported operand - # types for - ("generic" and "int") [operator] - # pandas\core\arrays\datetimes.py:1845: note: Left operand is of - # type "Union[ndarray, generic]" - # pandas\core\arrays\datetimes.py:1845: note: Left operand is of - # type "Union[ndarray, generic, int]" - + np.fix((153 * month - 457) / 5) # type: ignore[operator] + + np.fix((153 * month - 457) / 5) + 365 * year + np.floor(year / 4) - np.floor(year / 100) diff --git a/pandas/core/arrays/integer.py b/pandas/core/arrays/integer.py index dd642d8de45c1..19b67ff52f21c 100644 --- a/pandas/core/arrays/integer.py +++ b/pandas/core/arrays/integer.py @@ -359,18 +359,13 @@ def __init__(self, values: np.ndarray, mask: np.ndarray, copy: bool = False): super().__init__(values, mask, copy=copy) def __neg__(self): - # pandas\core\arrays\integer.py:362: error: Argument 1 to - # "IntegerArray" has incompatible type "Union[ndarray, generic]"; - # expected "ndarray" [arg-type] - return type(self)(-self._data, self._mask) # type: ignore[arg-type] + return type(self)(-self._data, self._mask) def __pos__(self): return self def __abs__(self): - # error: Argument 1 to "IntegerArray" has incompatible type - # "Union[ndarray, generic]"; expected "ndarray" - return type(self)(np.abs(self._data), self._mask) # type: ignore[arg-type] + return type(self)(np.abs(self._data), self._mask) @classmethod def _from_sequence( diff --git a/pandas/core/arrays/masked.py b/pandas/core/arrays/masked.py index 03c70ad032a47..c4bdbb89024fa 100644 --- a/pandas/core/arrays/masked.py +++ b/pandas/core/arrays/masked.py @@ -142,10 +142,7 @@ def __len__(self) -> int: return len(self._data) def __invert__(self: BaseMaskedArrayT) -> BaseMaskedArrayT: - # pandas\core\arrays\masked.py:141: error: Argument 1 to - # "BaseMaskedArray" has incompatible type "Union[ndarray, integer, - # bool_]"; expected "ndarray" [arg-type] - return type(self)(~self._data, self._mask) # type: ignore[arg-type] + return type(self)(~self._data, self._mask) def to_numpy( self, dtype=None, copy: bool = False, na_value: Scalar = lib.no_default diff --git a/pandas/core/arrays/numpy_.py b/pandas/core/arrays/numpy_.py index 4bee2437c686a..d01f2e8218d9b 100644 --- a/pandas/core/arrays/numpy_.py +++ b/pandas/core/arrays/numpy_.py @@ -389,10 +389,7 @@ def to_numpy( # Ops def __invert__(self): - # pandas\core\arrays\numpy_.py:376: error: Argument 1 to "PandasArray" - # has incompatible type "Union[ndarray, integer, bool_]"; expected - # "Union[ndarray, PandasArray]" [arg-type] - return type(self)(~self._ndarray) # type: ignore[arg-type] + return type(self)(~self._ndarray) def _cmp_method(self, other, op): if isinstance(other, PandasArray): diff --git a/pandas/core/arrays/period.py b/pandas/core/arrays/period.py index 960568237625d..6cce7f3b91c28 100644 --- a/pandas/core/arrays/period.py +++ b/pandas/core/arrays/period.py @@ -682,10 +682,7 @@ def _addsub_int_array( """ assert op in [operator.add, operator.sub] if op is operator.sub: - # pandas\core\arrays\period.py:679: error: Incompatible types in - # assignment (expression has type "Union[ndarray, generic]", - # variable has type "ndarray") [assignment] - other = -other # type: ignore[assignment] + other = -other res_values = algos.checked_add_with_arr(self.asi8, other, arr_mask=self._isnan) res_values = res_values.view("i8") res_values[self._isnan] = iNaT diff --git a/pandas/core/dtypes/missing.py b/pandas/core/dtypes/missing.py index 8fa68e5763b99..2d2dfa376fe42 100644 --- a/pandas/core/dtypes/missing.py +++ b/pandas/core/dtypes/missing.py @@ -192,9 +192,7 @@ def _use_inf_as_na(key): inf_as_na = get_option(key) globals()["_isna"] = partial(_isna, inf_as_na=inf_as_na) if inf_as_na: - # pandas\core\dtypes\missing.py:195: error: Unsupported operand type - # for ~ ("Union[ndarray, generic]") [operator] - globals()["nan_checker"] = lambda x: ~np.isfinite(x) # type: ignore[operator] + globals()["nan_checker"] = lambda x: ~np.isfinite(x) globals()["INF_AS_NA"] = True else: globals()["nan_checker"] = np.isnan @@ -232,9 +230,7 @@ def _isna_ndarraylike(obj, inf_as_na: bool = False): result = values.view("i8") == iNaT else: if inf_as_na: - # pandas\core\dtypes\missing.py:233: error: Unsupported operand - # type for ~ ("Union[ndarray, generic]") [operator] - result = ~np.isfinite(values) # type: ignore[operator] + result = ~np.isfinite(values) else: result = np.isnan(values) @@ -459,11 +455,7 @@ def array_equivalent( def _array_equivalent_float(left, right): - # pandas\core\dtypes\missing.py:458: error: Unsupported left operand type - # for & ("generic") [operator] - return ( - (left == right) | (np.isnan(left) & np.isnan(right)) # type: ignore[operator] - ).all() + return ((left == right) | (np.isnan(left) & np.isnan(right))).all() def _array_equivalent_datetimelike(left, right): diff --git a/pandas/core/frame.py b/pandas/core/frame.py index 8a933787fe8ff..c4d5ae640412c 100644 --- a/pandas/core/frame.py +++ b/pandas/core/frame.py @@ -4057,11 +4057,7 @@ def lookup(self, row_labels, col_labels) -> np.ndarray: if (cidx == -1).any(): raise KeyError("One or more column labels was not found") flat_index = ridx * len(self.columns) + cidx - # pandas\core\frame.py:4011: error: Invalid index type - # "Union[ndarray, generic]" for "flatiter[ndarray]"; expected type - # "Union[int, integer, Sequence[Union[int, integer]], - # Sequence[Sequence[Any]], ndarray, slice, ellipsis]" [index] - result = values.flat[flat_index] # type: ignore[index] + result = values.flat[flat_index] else: result = np.empty(n, dtype="O") for i, (r, c) in enumerate(zip(row_labels, col_labels)): diff --git a/pandas/core/generic.py b/pandas/core/generic.py index a74e552840258..42118b670340f 100644 --- a/pandas/core/generic.py +++ b/pandas/core/generic.py @@ -10010,9 +10010,7 @@ def abs(self: FrameOrSeries) -> FrameOrSeries: 2 6 30 -30 3 7 40 -50 """ - # error: Incompatible return value type (got "Union[ndarray, generic]", - # expected "FrameOrSeries") - return np.abs(self) # type: ignore[return-value] + return np.abs(self) @final def describe( @@ -10840,12 +10838,7 @@ def mad(self, axis=None, skipna=None, level=None): demeaned = data - data.mean(axis=0) else: demeaned = data.sub(data.mean(axis=1), axis=0) - # pandas\core\generic.py:10698: error: No overload variant of "mean" of - # "_ArrayOrScalarCommon" matches argument types "Any", "Any" - # [call-overload] - return np.abs(demeaned).mean( - axis=axis, skipna=skipna # type: ignore[call-overload] - ) + return np.abs(demeaned).mean(axis=axis, skipna=skipna) @classmethod def _add_numeric_operations(cls): diff --git a/pandas/core/groupby/groupby.py b/pandas/core/groupby/groupby.py index 0b18587ae2c53..4ee7ca10d9670 100644 --- a/pandas/core/groupby/groupby.py +++ b/pandas/core/groupby/groupby.py @@ -1607,13 +1607,7 @@ def sem(self, ddof: int = 1): ) # TODO(GH-22046) - setting with iloc broken if labels are not unique # .values to remove labels - - # pandas\core\groupby\groupby.py:1567: error: Item "ndarray" of - # "Union[ndarray, generic]" has no attribute "values" [union-attr] - - # pandas\core\groupby\groupby.py:1567: error: Item "generic" of - # "Union[ndarray, generic]" has no attribute "values" [union-attr] - tmp = np.sqrt(self.count().iloc[:, cols]).values # type: ignore[union-attr] + tmp = np.sqrt(self.count().iloc[:, cols]).values result.iloc[:, cols] = result.iloc[:, cols].values / tmp return result diff --git a/pandas/core/indexes/multi.py b/pandas/core/indexes/multi.py index 08ec479c46de2..c08dde8d561e9 100644 --- a/pandas/core/indexes/multi.py +++ b/pandas/core/indexes/multi.py @@ -3253,11 +3253,7 @@ def _reorder_indexer( # Set order as given in the indexer list level_indexer = self.levels[i].get_indexer(k) level_indexer = level_indexer[level_indexer >= 0] # Filter absent keys - # pandas\core\indexes\multi.py:3236: error: Unsupported target - # for indexed assignment ("Union[ndarray, generic]") [index] - key_order_map[level_indexer] = np.arange( # type: ignore[index] - len(level_indexer) - ) + key_order_map[level_indexer] = np.arange(len(level_indexer)) new_order = key_order_map[self.codes[i][indexer]] else: diff --git a/pandas/core/indexes/numeric.py b/pandas/core/indexes/numeric.py index 5e8115b019ae9..3b9b102004a7c 100644 --- a/pandas/core/indexes/numeric.py +++ b/pandas/core/indexes/numeric.py @@ -427,10 +427,7 @@ def __contains__(self, other: Any) -> bool: if super().__contains__(other): return True - # error: Incompatible return value type (got "Union[Any, ndarray, - # generic]", expected "bool") - tmp = is_float(other) and np.isnan(other) and self.hasnans - return tmp # type: ignore[return-value] + return is_float(other) and np.isnan(other) and self.hasnans @cache_readonly def is_unique(self) -> bool: diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index 0eb3c7646c48f..2aa1d8d270c1d 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -866,10 +866,7 @@ def comp(s: Scalar, mask: np.ndarray, regex: bool = False) -> np.ndarray: an element-wise regular expression matching """ if isna(s): - # pandas\core\internals\blocks.py:844: error: Incompatible - # return value type (got "Union[ndarray, integer, bool_]", - # expected "ndarray") [return-value] - return ~mask # type: ignore[return-value] + return ~mask s = maybe_box_datetimelike(s) # error: Incompatible return value type (got "Union[ndarray, @@ -1530,16 +1527,7 @@ def where( if m.any(): result = cast(np.ndarray, result) # EABlock overrides where - # pandas\core\internals\blocks.py:1478: error: Item "integer" - # of "Union[ndarray, integer, bool_]" has no attribute - # "nonzero" [union-attr] - - # pandas\core\internals\blocks.py:1478: error: Item "bool_" of - # "Union[ndarray, integer, bool_]" has no attribute "nonzero" - # [union-attr] - taken = result.take( - m.nonzero()[0], axis=axis # type: ignore[union-attr] - ) + taken = result.take(m.nonzero()[0], axis=axis) r = maybe_downcast_numeric(taken, self.dtype) nb = self.make_block(r.T, placement=self.mgr_locs[m]) result_blocks.append(nb) diff --git a/pandas/core/missing.py b/pandas/core/missing.py index 920e7d35078ff..c82d6c592e5da 100644 --- a/pandas/core/missing.py +++ b/pandas/core/missing.py @@ -729,10 +729,7 @@ def inner(invalid, limit): return f_idx else: b_idx_inv = list(inner(invalid[::-1], bw_limit)) - # pandas\core\missing.py:721: error: Argument 1 to "set" has - # incompatible type "Union[ndarray, generic]"; expected - # "Iterable[Any]" [arg-type] - b_idx = set(N - 1 - np.asarray(b_idx_inv)) # type: ignore[arg-type] + b_idx = set(N - 1 - np.asarray(b_idx_inv)) if fw_limit == 0: return b_idx diff --git a/pandas/core/nanops.py b/pandas/core/nanops.py index bbf4b830909c1..024b6617a0905 100644 --- a/pandas/core/nanops.py +++ b/pandas/core/nanops.py @@ -164,9 +164,7 @@ def _has_infs(result) -> bool: elif result.dtype == "f4": return lib.has_infs_f4(result.ravel("K")) try: - # pandas\core\nanops.py:162: error: Incompatible return value type (got - # "bool_", expected "bool") [return-value] - return np.isinf(result).any() # type: ignore[return-value] + return np.isinf(result).any() except (TypeError, NotImplementedError): # if it doesn't support infs, then it can't have infs return False @@ -997,14 +995,7 @@ def nansem( ) var = nanvar(values, axis=axis, skipna=skipna, ddof=ddof) - # pandas\core\nanops.py:878: error: Unsupported left operand type for / - # ("generic") [operator] - - # pandas\core\nanops.py:878: note: Both left and right operands are unions - - # pandas\core\nanops.py:878: error: Incompatible return value type (got - # "Union[ndarray, generic, Any]", expected "float") [return-value] - return np.sqrt(var) / np.sqrt(count) # type: ignore[operator,return-value] + return np.sqrt(var) / np.sqrt(count) def _nanminmax(meth, fill_value_typ): @@ -1192,12 +1183,8 @@ def nanskew( adjusted = values - mean if skipna and mask is not None: np.putmask(adjusted, mask, 0) - # pandas\core\nanops.py:1076: error: Unsupported operand types for ** - # ("generic" and "int") [operator] - adjusted2 = adjusted ** 2 # type: ignore[operator] - # pandas\core\nanops.py:1077: error: Unsupported left operand type for * - # ("generic") [operator] - adjusted3 = adjusted2 * adjusted # type: ignore[operator] + adjusted2 = adjusted ** 2 + adjusted3 = adjusted2 * adjusted m2 = adjusted2.sum(axis, dtype=np.float64) m3 = adjusted3.sum(axis, dtype=np.float64) @@ -1209,32 +1196,7 @@ def nanskew( m3 = _zero_out_fperr(m3) with np.errstate(invalid="ignore", divide="ignore"): - # pandas\core\nanops.py:1075: error: Unsupported operand types for * - # ("int" and "generic") [operator] - - # pandas\core\nanops.py:1075: error: Unsupported operand types for * - # ("float" and "generic") [operator] - - # pandas\core\nanops.py:1075: note: Both left and right operands are - # unions - - # pandas\core\nanops.py:1075: error: Unsupported operand types for / - # ("float" and "generic") [operator] - - # pandas\core\nanops.py:1075: error: Unsupported operand types for / - # ("generic" and "float") [operator] - - # pandas\core\nanops.py:1075: error: Unsupported left operand type for - # / ("generic") [operator] - - # pandas\core\nanops.py:1075: error: Unsupported operand types for ** - # ("generic" and "float") [operator] - - # pandas\core\nanops.py:1075: note: Left operand is of type - # "Union[float, ndarray, generic]" - result = ( - count * (count - 1) ** 0.5 / (count - 2) # type: ignore[operator] - ) * (m3 / m2 ** 1.5) + result = (count * (count - 1) ** 0.5 / (count - 2)) * (m3 / m2 ** 1.5) dtype = values.dtype if is_float_dtype(dtype): @@ -1308,84 +1270,15 @@ def nankurt( adjusted = values - mean if skipna and mask is not None: np.putmask(adjusted, mask, 0) - # pandas\core\nanops.py:1187: error: Unsupported operand types for ** - # ("generic" and "int") [operator] - adjusted2 = adjusted ** 2 # type: ignore[operator] - # pandas\core\nanops.py:1188: error: Unsupported operand types for ** - # ("generic" and "int") [operator] - adjusted4 = adjusted2 ** 2 # type: ignore[operator] + adjusted2 = adjusted ** 2 + adjusted4 = adjusted2 ** 2 m2 = adjusted2.sum(axis, dtype=np.float64) m4 = adjusted4.sum(axis, dtype=np.float64) with np.errstate(invalid="ignore", divide="ignore"): - # pandas\core\nanops.py:1154: error: Unsupported operand types for * - # ("int" and "generic") [operator] - - # pandas\core\nanops.py:1154: note: Right operand is of type - # "Union[float, ndarray, generic, Any]" - - # pandas\core\nanops.py:1154: error: Unsupported operand types for / - # ("float" and "generic") [operator] - - # pandas\core\nanops.py:1154: error: Unsupported operand types for / - # ("generic" and "float") [operator] - - # pandas\core\nanops.py:1154: error: Unsupported left operand type for - # / ("generic") [operator] - - # pandas\core\nanops.py:1154: note: Both left and right operands are - # unions - - # pandas\core\nanops.py:1154: error: Unsupported operand types for ** - # ("generic" and "int") [operator] - - # pandas\core\nanops.py:1154: note: Left operand is of type - # "Union[float, ndarray, generic]" - - # pandas\core\nanops.py:1154: error: Unsupported operand types for * - # ("float" and "generic") [operator] - - # pandas\core\nanops.py:1154: error: Unsupported operand types for * - # ("generic" and "float") [operator] - - # pandas\core\nanops.py:1154: error: Unsupported left operand type for - # * ("generic") [operator] - adj = ( - 3 * (count - 1) ** 2 / ((count - 2) * (count - 3)) # type: ignore[operator] - ) - # pandas\core\nanops.py:1155: error: Unsupported operand types for * - # ("int" and "generic") [operator] - - # pandas\core\nanops.py:1155: error: Unsupported operand types for * - # ("float" and "generic") [operator] - - # pandas\core\nanops.py:1155: note: Both left and right operands are - # unions - - # pandas\core\nanops.py:1155: error: Unsupported operand types for * - # ("generic" and "float") [operator] - - # pandas\core\nanops.py:1155: error: Unsupported left operand type for - # * ("generic") [operator] - - # pandas\core\nanops.py:1242: error: Argument 1 to "__call__" of - # "_NumberOp" has incompatible type "generic"; expected "Union[int, - # float, complex, number, bool_]" [arg-type] - numer = ( - count * (count + 1) * (count - 1) * m4 # type: ignore[operator,arg-type] - ) - # pandas\core\nanops.py:1156: error: Unsupported operand types for * - # ("float" and "generic") [operator] - - # pandas\core\nanops.py:1156: error: Unsupported operand types for * - # ("generic" and "float") [operator] - - # pandas\core\nanops.py:1156: error: Unsupported left operand type for - # * ("generic") [operator] - - # pandas\core\nanops.py:1156: note: Both left and right operands are - # unions - denom = (count - 2) * (count - 3) * m2 ** 2 # type: ignore[operator] + adj = 3 * (count - 1) ** 2 / ((count - 2) * (count - 3)) + numer = count * (count + 1) * (count - 1) * m4 + denom = (count - 2) * (count - 3) * m2 ** 2 # floating point error # @@ -1526,10 +1419,7 @@ def _get_counts( if mask is not None: count = mask.shape[axis] - mask.sum(axis) else: - # pandas\core\nanops.py:1389: error: Incompatible types in assignment - # (expression has type "int", variable has type "Union[ndarray, - # generic]") [assignment] - count = values_shape[axis] # type: ignore[assignment] + count = values_shape[axis] if is_scalar(count): # error: Incompatible return value type (got "Union[Any, generic]", @@ -1568,11 +1458,7 @@ def _maybe_null_out( The product of all elements on a given axis. ( NaNs are treated as 1) """ if mask is not None and axis is not None and getattr(result, "ndim", False): - # pandas\core\nanops.py:1419: error: Unsupported operand types for - - # ("generic" and "int") [operator] - null_mask = ( - mask.shape[axis] - mask.sum(axis) - min_count # type: ignore[operator] - ) < 0 + null_mask = (mask.shape[axis] - mask.sum(axis) - min_count) < 0 if np.any(null_mask): if is_numeric_dtype(result): if np.iscomplexobj(result): @@ -1917,15 +1803,7 @@ def na_accum_func(values: ArrayLike, accum_func, *, skipna: bool) -> ArrayLike: result[mask] = iNaT elif accum_func == np.minimum.accumulate: # Restore NaTs that we masked previously - - # pandas\core\nanops.py:1756: error: Item "integer" of - # "Union[ndarray, integer, bool_]" has no attribute "nonzero" - # [union-attr] - - # pandas\core\nanops.py:1756: error: Item "bool_" of - # "Union[ndarray, integer, bool_]" has no attribute "nonzero" - # [union-attr] - nz = (~np.asarray(mask)).nonzero()[0] # type: ignore[union-attr] + nz = (~np.asarray(mask)).nonzero()[0] if len(nz): # everything up to the first non-na entry stays NaT result[: nz[0]] = iNaT diff --git a/pandas/core/ops/mask_ops.py b/pandas/core/ops/mask_ops.py index ff266130ca7b5..092c7a1260cdc 100644 --- a/pandas/core/ops/mask_ops.py +++ b/pandas/core/ops/mask_ops.py @@ -46,10 +46,7 @@ def kleene_or( if right is libmissing.NA: result = left.copy() else: - # pandas\core\ops\mask_ops.py:49: error: Incompatible types in - # assignment (expression has type "Union[ndarray, integer, bool_]", - # variable has type "ndarray") [assignment] - result = left | right # type: ignore[assignment] + result = left | right if right_mask is not None: # output is unknown where (False & NA), (NA & False), (NA & NA) @@ -116,10 +113,7 @@ def kleene_xor( else: mask = left_mask.copy() else: - # pandas\core\ops\mask_ops.py:116: error: Incompatible types in - # assignment (expression has type "Union[ndarray, integer, bool_]", - # variable has type "ndarray") [assignment] - mask = left_mask | right_mask # type: ignore[assignment] + mask = left_mask | right_mask return result, mask @@ -160,10 +154,7 @@ def kleene_and( if right is libmissing.NA: result = np.zeros_like(left) else: - # pandas\core\ops\mask_ops.py:157: error: Incompatible types in - # assignment (expression has type "Union[ndarray, integer, bool_]", - # variable has type "ndarray") [assignment] - result = left & right # type: ignore[assignment] + result = left & right if right_mask is None: # Scalar `right` diff --git a/pandas/core/ops/missing.py b/pandas/core/ops/missing.py index b1907bf106164..c33cb32dcec19 100644 --- a/pandas/core/ops/missing.py +++ b/pandas/core/ops/missing.py @@ -58,10 +58,7 @@ def fill_zeros(result, x, y): # GH#7325, mask and nans must be broadcastable (also: GH#9308) # Raveling and then reshaping makes np.putmask faster - - # pandas\core\ops\missing.py:61: error: Unsupported operand type - # for ~ ("Union[ndarray, generic]") [operator] - mask = ((y == 0) & ~np.isnan(result)).ravel() # type: ignore[operator] + mask = ((y == 0) & ~np.isnan(result)).ravel() shape = result.shape result = result.astype("float64", copy=False).ravel() diff --git a/pandas/core/reshape/merge.py b/pandas/core/reshape/merge.py index 1a74d18e5586a..615d272379d44 100644 --- a/pandas/core/reshape/merge.py +++ b/pandas/core/reshape/merge.py @@ -1114,12 +1114,7 @@ def _maybe_coerce_merge_keys(self): # check whether ints and floats elif is_integer_dtype(rk.dtype) and is_float_dtype(lk.dtype): - # pandas\core\reshape\merge.py:1116: error: Unsupported - # operand type for ~ ("Union[ndarray, generic]") - # [operator] - if not (lk == lk.astype(rk.dtype))[ - ~np.isnan(lk) # type: ignore[operator] - ].all(): + if not (lk == lk.astype(rk.dtype))[~np.isnan(lk)].all(): warnings.warn( "You are merging on int and float " "columns where the float values " @@ -1129,12 +1124,7 @@ def _maybe_coerce_merge_keys(self): continue elif is_float_dtype(rk.dtype) and is_integer_dtype(lk.dtype): - # pandas\core\reshape\merge.py:1126: error: Unsupported - # operand type for ~ ("Union[ndarray, generic]") - # [operator] - if not (rk == rk.astype(lk.dtype))[ - ~np.isnan(rk) # type: ignore[operator] - ].all(): + if not (rk == rk.astype(lk.dtype))[~np.isnan(rk)].all(): warnings.warn( "You are merging on int and float " "columns where the float values " diff --git a/pandas/core/reshape/tile.py b/pandas/core/reshape/tile.py index 2ca9ca431aa75..4c5347bd16e8b 100644 --- a/pandas/core/reshape/tile.py +++ b/pandas/core/reshape/tile.py @@ -604,17 +604,9 @@ def _round_frac(x, precision: int): if not np.isfinite(x) or x == 0: return x else: - # error: 'numpy.generic' object is not iterable - frac, whole = np.modf(x) # type: ignore[misc] + frac, whole = np.modf(x) if whole == 0: - # pandas\core\reshape\tile.py:610: error: Argument 1 to "int" has - # incompatible type "Union[ndarray, generic]"; expected "Union[str, - # bytes, SupportsInt, _SupportsIndex]" [arg-type] - digits = ( - -int(np.floor(np.log10(abs(frac)))) # type: ignore[arg-type] - - 1 - + precision - ) + digits = -int(np.floor(np.log10(abs(frac)))) - 1 + precision else: digits = precision return np.around(x, digits) diff --git a/pandas/core/sorting.py b/pandas/core/sorting.py index fc4dd032d960a..30f4854def24f 100644 --- a/pandas/core/sorting.py +++ b/pandas/core/sorting.py @@ -313,12 +313,7 @@ def lexsort_indexer( codes += 1 else: # not order means descending if na_position == "last": - # pandas\core\sorting.py:313: error: Unsupported operand types - # for - ("generic" and "int") [operator] - - # pandas\core\sorting.py:313: note: Left operand is of type - # "Union[ndarray, generic]" - codes = np.where(mask, n, n - codes - 1) # type: ignore[operator] + codes = np.where(mask, n, n - codes - 1) elif na_position == "first": codes = np.where(mask, 0, n - codes) if mask.any(): @@ -591,14 +586,7 @@ def get_group_index_sorter(group_index, ngroups: int): count = len(group_index) alpha = 0.0 # taking complexities literally; there may be beta = 1.0 # some room for fine-tuning these parameters - # pandas\core\sorting.py:566: error: Unsupported operand types for * ("int" - # and "generic") [operator] - - # pandas\core\sorting.py:566: note: Right operand is of type - # "Union[ndarray, generic]" - do_groupsort = count > 0 and ( - (alpha + beta * ngroups) < (count * np.log(count)) # type: ignore[operator] - ) + do_groupsort = count > 0 and ((alpha + beta * ngroups) < (count * np.log(count))) if do_groupsort: sorter, _ = algos.groupsort_indexer(ensure_int64(group_index), ngroups) return ensure_platform_int(sorter) diff --git a/pandas/core/window/common.py b/pandas/core/window/common.py index 82cfca01fced2..938f1846230cb 100644 --- a/pandas/core/window/common.py +++ b/pandas/core/window/common.py @@ -165,14 +165,10 @@ def zsqrt(x): if isinstance(x, ABCDataFrame): if mask._values.any(): - # pandas\core\window\common.py:168: error: Unsupported target for - # indexed assignment ("Union[ndarray, generic]") [index] - result[mask] = 0 # type: ignore[index] + result[mask] = 0 else: if mask.any(): - # pandas\core\window\common.py:171: error: Unsupported target for - # indexed assignment ("Union[ndarray, generic]") [index] - result[mask] = 0 # type: ignore[index] + result[mask] = 0 return result diff --git a/pandas/core/window/ewm.py b/pandas/core/window/ewm.py index aa02e21dd06f7..687fe1fbba16a 100644 --- a/pandas/core/window/ewm.py +++ b/pandas/core/window/ewm.py @@ -64,34 +64,8 @@ def get_center_of_mass( elif halflife is not None: if halflife <= 0: raise ValueError("halflife must satisfy: halflife > 0") - # pandas\core\window\ewm.py:51: error: Unsupported operand types for - - # ("int" and "generic") [operator] - - # pandas\core\window\ewm.py:51: note: Right operand is of type - # "Union[ndarray, generic]" - - # pandas\core\window\ewm.py:51: error: Unsupported operand types for / - # ("generic" and "float") [operator] - - # pandas\core\window\ewm.py:51: note: Left operand is of type - # "Union[ndarray, generic]" - decay = 1 - np.exp(np.log(0.5) / halflife) # type: ignore[operator] - # pandas\core\window\ewm.py:52: error: Unsupported operand types for / - # ("int" and "generic") [operator] - - # pandas\core\window\ewm.py:52: note: Right operand is of type - # "Union[ndarray, generic, int]" - - # pandas\core\window\ewm.py:52: error: Unsupported operand types for - - # ("generic" and "int") [operator] - - # pandas\core\window\ewm.py:52: error: Incompatible types in assignment - # (expression has type "Union[ndarray, generic, float]", variable has - # type "Optional[float]") [assignment] - - # pandas\core\window\ewm.py:52: note: Left operand is of type - # "Union[ndarray, generic, float]" - comass = 1 / decay - 1 # type: ignore[operator, assignment] + decay = 1 - np.exp(np.log(0.5) / halflife) + comass = 1 / decay - 1 elif alpha is not None: if alpha <= 0 or alpha > 1: raise ValueError("alpha must satisfy: 0 < alpha <= 1") @@ -99,10 +73,7 @@ def get_center_of_mass( else: raise ValueError("Must pass one of comass, span, halflife, or alpha") - # pandas\core\window\ewm.py:60: error: Argument 1 to "float" has - # incompatible type "Optional[float]"; expected "Union[SupportsFloat, - # _SupportsIndex, str, bytes, bytearray]" [arg-type] - return float(comass) # type: ignore[arg-type] + return float(comass) def wrap_result(obj: "Series", result: np.ndarray) -> "Series": diff --git a/pandas/io/formats/format.py b/pandas/io/formats/format.py index a41cbd329196e..47890896602d5 100644 --- a/pandas/io/formats/format.py +++ b/pandas/io/formats/format.py @@ -1607,16 +1607,36 @@ def format_percentiles( ): raise ValueError("percentiles should all be in the interval [0,1]") - # pandas\io\formats\format.py:1668: error: Incompatible types in assignment - # (expression has type "Union[ndarray, generic]", variable has type - # "Union[ndarray, List[Union[int, float]], List[float], List[Union[str, - # float]]]") [assignment] - percentiles = 100 * percentiles # type: ignore[assignment] + percentiles = 100 * percentiles - int_idx = np.isclose(percentiles.astype(int), percentiles) + # pandas/io/formats/format.py:1612: error: Item "List[Union[int, float]]" of + # "Union[ndarray, List[Union[int, float]], List[float], List[Union[str, float]]]" + # has no attribute "astype" [union-attr] + + # pandas/io/formats/format.py:1612: error: Item "List[float]" of "Union[ndarray, + # List[Union[int, float]], List[float], List[Union[str, float]]]" has no attribute + # "astype" [union-attr] + + # pandas/io/formats/format.py:1612: error: Item "List[Union[str, float]]" of + # "Union[ndarray, List[Union[int, float]], List[float], List[Union[str, float]]]" + # has no attribute "astype" [union-attr] + int_idx = np.isclose( + percentiles.astype(int), percentiles # type: ignore[union-attr] + ) if np.all(int_idx): - out = percentiles.astype(int).astype(str) + # pandas/io/formats/format.py:1615: error: Item "List[Union[int, float]]" of + # "Union[ndarray, List[Union[int, float]], List[float], List[Union[str, + # float]]]" has no attribute "astype" [union-attr] + + # pandas/io/formats/format.py:1615: error: Item "List[float]" of "Union[ndarray, + # List[Union[int, float]], List[float], List[Union[str, float]]]" has no + # attribute "astype" [union-attr] + + # pandas/io/formats/format.py:1615: error: Item "List[Union[str, float]]" of + # "Union[ndarray, List[Union[int, float]], List[float], List[Union[str, + # float]]]" has no attribute "astype" [union-attr] + out = percentiles.astype(int).astype(str) # type: ignore[union-attr] return [i + "%" for i in out] unique_pcts = np.unique(percentiles) @@ -1624,16 +1644,29 @@ def format_percentiles( to_end = 100 - unique_pcts[-1] if unique_pcts[-1] < 100 else None # Least precision that keeps percentiles unique after rounding - - # pandas\io\formats\format.py:1603: error: Unsupported operand type for - # unary - ("Union[ndarray, generic]") [operator] - prec = -np.floor( # type: ignore[operator] + prec = -np.floor( np.log10(np.min(np.ediff1d(unique_pcts, to_begin=to_begin, to_end=to_end))) ).astype(int) prec = max(1, prec) out = np.empty_like(percentiles, dtype=object) - out[int_idx] = percentiles[int_idx].astype(int).astype(str) - out[~int_idx] = percentiles[~int_idx].round(prec).astype(str) + # pandas/io/formats/format.py:1635: error: No overload variant of "__getitem__" of + # "list" matches argument type "Union[bool_, ndarray]" [call-overload] + out[int_idx] = ( + percentiles[int_idx].astype(int).astype(str) # type: ignore[call-overload] + ) + + # pandas/io/formats/format.py:1636: error: Item "float" of "Union[Any, + # float, str]" has no attribute "round" [union-attr] + + # pandas/io/formats/format.py:1636: error: Item "str" of "Union[Any, float, + # str]" has no attribute "round" [union-attr] + + # pandas/io/formats/format.py:1636: error: Invalid index type "Union[bool_, + # Any]" for "Union[ndarray, List[Union[int, float]], List[float], + # List[Union[str, float]]]"; expected type "int" [index] + out[~int_idx] = ( + percentiles[~int_idx].round(prec).astype(str) # type: ignore[union-attr,index] + ) return [i + "%" for i in out] diff --git a/pandas/io/sas/sas_xport.py b/pandas/io/sas/sas_xport.py index 699a7d5783092..2ecfbed8cc83f 100644 --- a/pandas/io/sas/sas_xport.py +++ b/pandas/io/sas/sas_xport.py @@ -216,11 +216,7 @@ def _parse_float_vec(vec): # order 3 bits of the first half since we're only shifting by # 1, 2, or 3. ieee1 >>= shift - # pandas\io\sas\sas_xport.py:218: error: Unsupported operand types for + - # ("int" and "generic") [operator] - ieee2 = (xport2 >> shift) | ( - (xport1 & 0x00000007) << (29 + (3 - shift)) # type: ignore[operator] - ) + ieee2 = (xport2 >> shift) | ((xport1 & 0x00000007) << (29 + (3 - shift))) # clear the 1 bit to the left of the binary point ieee1 &= 0xFFEFFFFF diff --git a/pandas/plotting/_matplotlib/converter.py b/pandas/plotting/_matplotlib/converter.py index 80c4463cc0966..38789fffed8a0 100644 --- a/pandas/plotting/_matplotlib/converter.py +++ b/pandas/plotting/_matplotlib/converter.py @@ -1072,12 +1072,7 @@ def format_timedelta_ticks(x, pos, n_decimals: int) -> str: def __call__(self, x, pos=0) -> str: (vmin, vmax) = tuple(self.axis.get_view_interval()) - # pandas\plotting\_matplotlib\converter.py:1075: error: Argument 1 to - # "int" has incompatible type "Union[ndarray, generic]"; expected - # "Union[str, bytes, SupportsInt, _SupportsIndex]" [arg-type] - n_decimals = int( - np.ceil(np.log10(100 * 1e9 / abs(vmax - vmin))) # type: ignore[arg-type] - ) + n_decimals = int(np.ceil(np.log10(100 * 1e9 / abs(vmax - vmin)))) if n_decimals > 9: n_decimals = 9 return self.format_timedelta_ticks(x, pos, n_decimals) diff --git a/pandas/plotting/_matplotlib/misc.py b/pandas/plotting/_matplotlib/misc.py index d8c9181cef9e1..a1c62f9fce23c 100644 --- a/pandas/plotting/_matplotlib/misc.py +++ b/pandas/plotting/_matplotlib/misc.py @@ -442,12 +442,7 @@ def autocorrelation_plot( if ax is None: ax = plt.gca(xlim=(1, n), ylim=(-1.0, 1.0)) mean = np.mean(data) - # pandas\plotting\_matplotlib\misc.py:445: error: Unsupported operand types - # for ** ("generic" and "int") [operator] - - # pandas\plotting\_matplotlib\misc.py:445: note: Left operand is of type - # "Union[ndarray, generic]" - c0 = np.sum((data - mean) ** 2) / float(n) # type: ignore[operator] + c0 = np.sum((data - mean) ** 2) / float(n) def r(h): return ((data[: n - h] - mean) * (data[h:] - mean)).sum() / float(n) / c0 @@ -456,35 +451,11 @@ def r(h): y = [r(loc) for loc in x] z95 = 1.959963984540054 z99 = 2.5758293035489004 - # pandas\plotting\_matplotlib\misc.py:454: error: Unsupported operand types - # for / ("float" and "generic") [operator] - - # pandas\plotting\_matplotlib\misc.py:454: note: Right operand is of type - # "Union[ndarray, generic]" - ax.axhline( - y=z99 / np.sqrt(n), linestyle="--", color="grey" # type: ignore[operator] - ) - # pandas\plotting\_matplotlib\misc.py:455: error: Unsupported operand types - # for / ("float" and "generic") [operator] - - # pandas\plotting\_matplotlib\misc.py:455: note: Right operand is of type - # "Union[ndarray, generic]" - ax.axhline(y=z95 / np.sqrt(n), color="grey") # type: ignore[operator] + ax.axhline(y=z99 / np.sqrt(n), linestyle="--", color="grey") + ax.axhline(y=z95 / np.sqrt(n), color="grey") ax.axhline(y=0.0, color="black") - # pandas\plotting\_matplotlib\misc.py:457: error: Unsupported operand types - # for / ("float" and "generic") [operator] - - # pandas\plotting\_matplotlib\misc.py:457: note: Right operand is of type - # "Union[ndarray, generic]" - ax.axhline(y=-z95 / np.sqrt(n), color="grey") # type: ignore[operator] - # pandas\plotting\_matplotlib\misc.py:458: error: Unsupported operand types - # for / ("float" and "generic") [operator] - - # pandas\plotting\_matplotlib\misc.py:458: note: Right operand is of type - # "Union[ndarray, generic]" - ax.axhline( - y=-z99 / np.sqrt(n), linestyle="--", color="grey" # type: ignore[operator] - ) + ax.axhline(y=-z95 / np.sqrt(n), color="grey") + ax.axhline(y=-z99 / np.sqrt(n), linestyle="--", color="grey") ax.set_xlabel("Lag") ax.set_ylabel("Autocorrelation") ax.plot(x, y, **kwds) From 0319d75da353a5c796ce0260f6551fe7614bd184 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Fri, 11 Dec 2020 14:34:17 +0000 Subject: [PATCH 39/86] add ignores --- pandas/core/algorithms.py | 100 ++++++++++++++++++++++---- pandas/core/arraylike.py | 8 ++- pandas/core/arrays/base.py | 8 ++- pandas/core/arrays/categorical.py | 25 +++++-- pandas/core/arrays/datetimelike.py | 17 +++-- pandas/core/arrays/floating.py | 4 +- pandas/core/arrays/interval.py | 6 +- pandas/core/arrays/sparse/array.py | 5 +- pandas/core/arrays/string_.py | 5 +- pandas/core/arrays/string_arrow.py | 19 +++-- pandas/core/base.py | 4 +- pandas/core/construction.py | 12 +--- pandas/core/dtypes/cast.py | 27 +++++-- pandas/core/frame.py | 5 +- pandas/core/generic.py | 4 +- pandas/core/groupby/generic.py | 16 +++-- pandas/core/groupby/ops.py | 24 +++++-- pandas/core/indexes/base.py | 39 ++++++++-- pandas/core/indexes/category.py | 5 +- pandas/core/indexes/numeric.py | 4 +- pandas/core/internals/blocks.py | 18 +++-- pandas/core/internals/construction.py | 11 +-- pandas/core/missing.py | 13 +++- pandas/core/nanops.py | 12 +--- pandas/core/ops/array_ops.py | 5 +- pandas/core/reshape/concat.py | 5 +- pandas/core/reshape/merge.py | 33 +++++---- pandas/io/formats/format.py | 6 +- pandas/io/pytables.py | 20 ++---- pandas/plotting/_matplotlib/core.py | 12 +++- setup.cfg | 9 +-- 31 files changed, 341 insertions(+), 140 deletions(-) diff --git a/pandas/core/algorithms.py b/pandas/core/algorithms.py index 08875cf34fc72..23baeb91fe83e 100644 --- a/pandas/core/algorithms.py +++ b/pandas/core/algorithms.py @@ -131,7 +131,10 @@ def _ensure_data( with catch_warnings(): simplefilter("ignore", np.ComplexWarning) values = ensure_float64(values) - return values, np.dtype("float64") + # pandas/core/algorithms.py:134: error: Incompatible return value type (got + # "Tuple[ExtensionArray, dtype[floating[_64Bit]]]", expected "Tuple[ndarray, + # Union[dtype[Any], ExtensionDtype]]") [return-value] + return values, np.dtype("float64") # type: ignore[return-value] except (TypeError, ValueError, OverflowError): # if we are trying to coerce to a dtype @@ -148,7 +151,10 @@ def _ensure_data( elif is_timedelta64_dtype(values.dtype) or is_timedelta64_dtype(dtype): from pandas import TimedeltaIndex - values = TimedeltaIndex(values)._data + # pandas/core/algorithms.py:151: error: Incompatible types in assignment + # (expression has type "TimedeltaArray", variable has type "ndarray") + # [assignment] + values = TimedeltaIndex(values)._data # type: ignore[assignment] dtype = values.dtype else: # Datetime @@ -165,29 +171,49 @@ def _ensure_data( from pandas import DatetimeIndex - values = DatetimeIndex(values)._data + # pandas/core/algorithms.py:168: error: Incompatible types in assignment + # (expression has type "DatetimeArray", variable has type "ndarray") + # [assignment] + values = DatetimeIndex(values)._data # type: ignore[assignment] dtype = values.dtype # error: Incompatible return value type (got "Tuple[Any, Union[dtype, # ExtensionDtype, None]]", expected "Tuple[ndarray, Union[dtype, # ExtensionDtype]]") - return values.asi8, dtype # type: ignore[return-value] + + # pandas/core/algorithms.py:174: error: Item "ndarray" of "Union[Any, ndarray]" + # has no attribute "asi8" [union-attr] + return values.asi8, dtype # type: ignore[return-value,union-attr] elif is_categorical_dtype(values.dtype) and ( is_categorical_dtype(dtype) or dtype is None ): - values = cast("Categorical", values) - values = values.codes + # pandas/core/algorithms.py:179: error: Incompatible types in assignment + # (expression has type "Categorical", variable has type "ndarray") [assignment] + values = cast("Categorical", values) # type: ignore[assignment] + # pandas/core/algorithms.py:180: error: Incompatible types in assignment + # (expression has type "ndarray", variable has type "ExtensionArray") + # [assignment] + + # pandas/core/algorithms.py:180: error: Item "ndarray" of "Union[Any, ndarray]" + # has no attribute "codes" [union-attr] + values = values.codes # type: ignore[assignment,union-attr] dtype = pandas_dtype("category") # we are actually coercing to int64 # until our algos support int* directly (not all do) values = ensure_int64(values) - return values, dtype + # pandas/core/algorithms.py:187: error: Incompatible return value type (got + # "Tuple[ExtensionArray, Union[dtype[Any], ExtensionDtype]]", expected + # "Tuple[ndarray, Union[dtype[Any], ExtensionDtype]]") [return-value] + return values, dtype # type: ignore[return-value] # we have failed, return object - values = np.asarray(values, dtype=object) + + # pandas/core/algorithms.py:190: error: Incompatible types in assignment (expression + # has type "ndarray", variable has type "ExtensionArray") [assignment] + values = np.asarray(values, dtype=object) # type: ignore[assignment] return ensure_object(values), np.dtype("object") @@ -212,7 +238,9 @@ def _reconstruct_data( return values if is_extension_array_dtype(dtype): - cls = dtype.construct_array_type() + # pandas/core/algorithms.py:215: error: Item "dtype[Any]" of "Union[dtype[Any], + # ExtensionDtype]" has no attribute "construct_array_type" [union-attr] + cls = dtype.construct_array_type() # type: ignore[union-attr] if isinstance(values, cls) and values.dtype == dtype: return values @@ -295,7 +323,9 @@ def _get_values_for_rank(values: ArrayLike): if is_categorical_dtype(values): values = cast("Categorical", values)._values_for_rank() - values, _ = _ensure_data(values) + # pandas/core/algorithms.py:298: error: Incompatible types in assignment (expression + # has type "ndarray", variable has type "ExtensionArray") [assignment] + values, _ = _ensure_data(values) # type: ignore[assignment] return values @@ -463,12 +493,34 @@ def isin(comps: AnyArrayLike, values: AnyArrayLike) -> np.ndarray: values = _ensure_arraylike(list(values)) elif isinstance(values, ABCMultiIndex): # Avoid raising in extract_array - values = np.array(values) + + # pandas/core/algorithms.py:466: error: Incompatible types in assignment + # (expression has type "ndarray", variable has type "ExtensionArray") + # [assignment] + + # pandas/core/algorithms.py:466: error: Incompatible types in assignment + # (expression has type "ndarray", variable has type "Index") [assignment] + + # pandas/core/algorithms.py:466: error: Incompatible types in assignment + # (expression has type "ndarray", variable has type "Series") [assignment] + values = np.array(values) # type: ignore[assignment] else: - values = extract_array(values, extract_numpy=True) + # pandas/core/algorithms.py:468: error: Incompatible types in assignment + # (expression has type "Union[Any, ExtensionArray]", variable has type "Index") + # [assignment] + + # pandas/core/algorithms.py:468: error: Incompatible types in assignment + # (expression has type "Union[Any, ExtensionArray]", variable has type "Series") + # [assignment] + values = extract_array(values, extract_numpy=True) # type: ignore[assignment] comps = _ensure_arraylike(comps) - comps = extract_array(comps, extract_numpy=True) + # pandas/core/algorithms.py:471: error: Incompatible types in assignment (expression + # has type "Union[Any, ExtensionArray]", variable has type "Index") [assignment] + + # pandas/core/algorithms.py:471: error: Incompatible types in assignment (expression + # has type "Union[Any, ExtensionArray]", variable has type "Series") [assignment] + comps = extract_array(comps, extract_numpy=True) # type: ignore[assignment] if is_categorical_dtype(comps.dtype): # TODO(extension) # handle categoricals @@ -502,7 +554,23 @@ def isin(comps: AnyArrayLike, values: AnyArrayLike) -> np.ndarray: f = np.in1d else: - common = np.find_common_type([values.dtype, comps.dtype], []) + # pandas/core/algorithms.py:505: error: List item 0 has incompatible type + # "Union[Any, dtype[Any], ExtensionDtype]"; expected "Union[dtype[Any], None, + # type, _SupportsDType, str, Tuple[Any, Union[int, Sequence[int]]], List[Any], + # _DTypeDict, Tuple[Any, Any]]" [list-item] + + # pandas/core/algorithms.py:505: error: List item 1 has incompatible type + # "Union[Any, ExtensionDtype]"; expected "Union[dtype[Any], None, type, + # _SupportsDType, str, Tuple[Any, Union[int, Sequence[int]]], List[Any], + # _DTypeDict, Tuple[Any, Any]]" [list-item] + + # pandas/core/algorithms.py:505: error: List item 1 has incompatible type + # "Union[dtype[Any], ExtensionDtype]"; expected "Union[dtype[Any], None, type, + # _SupportsDType, str, Tuple[Any, Union[int, Sequence[int]]], List[Any], + # _DTypeDict, Tuple[Any, Any]]" [list-item] + common = np.find_common_type( + [values.dtype, comps.dtype], [] # type: ignore[list-item] + ) values = values.astype(common, copy=False) comps = comps.astype(common, copy=False) name = common.name @@ -914,7 +982,9 @@ def duplicated(values: ArrayLike, keep: str = "first") -> np.ndarray: ------- duplicated : ndarray """ - values, _ = _ensure_data(values) + # pandas/core/algorithms.py:917: error: Incompatible types in assignment (expression + # has type "ndarray", variable has type "ExtensionArray") [assignment] + values, _ = _ensure_data(values) # type: ignore[assignment] ndtype = values.dtype.name f = getattr(htable, f"duplicated_{ndtype}") return f(values, keep=keep) diff --git a/pandas/core/arraylike.py b/pandas/core/arraylike.py index 6b28f8f135769..0907e7d7b7a56 100644 --- a/pandas/core/arraylike.py +++ b/pandas/core/arraylike.py @@ -168,7 +168,13 @@ def array_ufunc(self, ufunc: Callable, method: str, *inputs: Any, **kwargs: Any) return result # Determine if we should defer. - no_defer = (np.ndarray.__array_ufunc__, cls.__array_ufunc__) + + # pandas/core/arraylike.py:171: error: "Type[ndarray]" has no attribute + # "__array_ufunc__" [attr-defined] + no_defer = ( + np.ndarray.__array_ufunc__, # type: ignore[attr-defined] + cls.__array_ufunc__, + ) for item in inputs: higher_priority = ( diff --git a/pandas/core/arrays/base.py b/pandas/core/arrays/base.py index b6bf7b62b0ad9..394c4bfd6dc2c 100644 --- a/pandas/core/arrays/base.py +++ b/pandas/core/arrays/base.py @@ -366,13 +366,17 @@ def __contains__(self, item) -> bool: if not self._can_hold_na: return False elif item is self.dtype.na_value or isinstance(item, self.dtype.type): - return self.isna().any() + # pandas/core/arrays/base.py:369: error: "ExtensionArray" has no + # attribute "any" [attr-defined] + return self.isna().any() # type: ignore[attr-defined] else: return False else: return (item == self).any() - def __eq__(self, other: Any) -> ArrayLike: + # pandas/core/arrays/base.py:375: error: Signature of "__eq__" incompatible with + # supertype "object" [override] + def __eq__(self, other: Any) -> ArrayLike: # type: ignore[override] """ Return for `self == other` (element-wise equality). """ diff --git a/pandas/core/arrays/categorical.py b/pandas/core/arrays/categorical.py index 2e59b896e1b8c..255ad589ef7a9 100644 --- a/pandas/core/arrays/categorical.py +++ b/pandas/core/arrays/categorical.py @@ -359,9 +359,9 @@ def __init__( dtype = CategoricalDtype(categories, dtype.ordered) elif is_categorical_dtype(values.dtype): - # pandas\core\arrays\categorical.py:359: error: "ExtensionArray" - # has no attribute "codes" [attr-defined] - old_codes = extract_array(values).codes # type: ignore[attr-defined] + # pandas/core/arrays/categorical.py:362: error: Item "ExtensionArray" of + # "Union[Any, ExtensionArray]" has no attribute "codes" [union-attr] + old_codes = extract_array(values).codes # type: ignore[union-attr] codes = recode_for_categories( old_codes, values.dtype.categories, dtype.categories ) @@ -424,7 +424,20 @@ def astype(self, dtype: Dtype, copy: bool = True) -> ArrayLike: raise ValueError("Cannot convert float NaN to integer") elif len(self.codes) == 0 or len(self.categories) == 0: - result = np.array(self, dtype=dtype, copy=copy) + # pandas/core/arrays/categorical.py:425: error: Incompatible types in + # assignment (expression has type "ndarray", variable has type + # "Categorical") [assignment] + result = np.array( # type: ignore[assignment] + self, + # pandas/core/arrays/categorical.py:425: error: Argument "dtype" to + # "array" has incompatible type "Union[ExtensionDtype, str, dtype[Any], + # Type[str], Type[float], Type[int], Type[complex], Type[bool], + # Type[object]]"; expected "Union[dtype[Any], None, type, + # _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, Union[int, + # Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]]" [arg-type] + dtype=dtype, # type: ignore[arg-type] + copy=copy, + ) else: # GH8628 (PERF): astype category codes instead of astyping array @@ -440,7 +453,9 @@ def astype(self, dtype: Dtype, copy: bool = True) -> ArrayLike: astyped_cats = extract_array(astyped_cats, extract_numpy=True) result = take_1d(astyped_cats, libalgos.ensure_platform_int(self._codes)) - return result + # pandas/core/arrays/categorical.py:441: error: Incompatible return value type + # (got "Categorical", expected "ndarray") [return-value] + return result # type: ignore[return-value] @cache_readonly def itemsize(self) -> int: diff --git a/pandas/core/arrays/datetimelike.py b/pandas/core/arrays/datetimelike.py index 22d34cc691aab..823a3601a2a4d 100644 --- a/pandas/core/arrays/datetimelike.py +++ b/pandas/core/arrays/datetimelike.py @@ -763,7 +763,10 @@ def isin(self, values) -> np.ndarray: # ------------------------------------------------------------------ # Null Handling - def isna(self) -> np.ndarray: + # pandas/core/arrays/datetimelike.py:766: error: Return type "ndarray" of "isna" + # incompatible with return type "ArrayLike" in supertype "ExtensionArray" + # [override] + def isna(self) -> np.ndarray: # type: ignore[override] return self._isnan @property # NB: override with cache_readonly in immutable subclasses @@ -778,7 +781,9 @@ def _hasnans(self) -> np.ndarray: """ return if I have any nans; enables various perf speedups """ - return bool(self._isnan.any()) + # pandas/core/arrays/datetimelike.py:781: error: Incompatible return value type + # (got "bool", expected "ndarray") [return-value] + return bool(self._isnan.any()) # type: ignore[return-value] def _maybe_mask_results( self, result: np.ndarray, fill_value=iNaT, convert=None @@ -1119,9 +1124,11 @@ def _addsub_object_array(self, other: np.ndarray, op): res_values = op(self.astype("O"), np.asarray(other)) result = array(res_values.ravel()) - # error: "ExtensionArray" has no attribute "reshape"; maybe "shape"? - tmp = extract_array(result, extract_numpy=True) - result = tmp.reshape(self.shape) # type: ignore[attr-defined] + # pandas/core/arrays/datetimelike.py:1122: error: Item "ExtensionArray" of + # "Union[Any, ExtensionArray]" has no attribute "reshape" [union-attr] + result = extract_array(result, extract_numpy=True).reshape( # type: ignore[union-attr] # noqa + self.shape + ) return result def _time_shift(self, periods, freq=None): diff --git a/pandas/core/arrays/floating.py b/pandas/core/arrays/floating.py index f4a5552bb3503..3d364fe956e5f 100644 --- a/pandas/core/arrays/floating.py +++ b/pandas/core/arrays/floating.py @@ -394,7 +394,9 @@ def astype(self, dtype, copy: bool = True) -> ArrayLike: # In astype, we consider dtype=float to also mean na_value=np.nan kwargs = {"na_value": np.nan} elif is_datetime64_dtype(dtype): - kwargs = {"na_value": np.datetime64("NaT")} + # pandas/core/arrays/floating.py:397: error: Dict entry 0 has incompatible + # type "str": "datetime64"; expected "str": "float" [dict-item] + kwargs = {"na_value": np.datetime64("NaT")} # type: ignore[dict-item] else: kwargs = {} diff --git a/pandas/core/arrays/interval.py b/pandas/core/arrays/interval.py index be377b110cf4b..222da377dacf4 100644 --- a/pandas/core/arrays/interval.py +++ b/pandas/core/arrays/interval.py @@ -564,7 +564,11 @@ def __getitem__(self, key): if is_scalar(left) and isna(left): return self._fill_value return Interval(left, right, self.closed) - if np.ndim(left) > 1: + # pandas/core/arrays/interval.py:567: error: Argument 1 to "ndim" has + # incompatible type "Union[ndarray, ExtensionArray]"; expected "Union[Union[int, + # float, complex, str, bytes, generic], Sequence[Union[int, float, complex, str, + # bytes, generic]], Sequence[Sequence[Any]], _SupportsArray]" [arg-type] + if np.ndim(left) > 1: # type: ignore[arg-type] # GH#30588 multi-dimensional indexer disallowed raise ValueError("multi-dimensional indexing not allowed") return self._shallow_copy(left, right) diff --git a/pandas/core/arrays/sparse/array.py b/pandas/core/arrays/sparse/array.py index 7656beb169a90..0cbdb82ae863b 100644 --- a/pandas/core/arrays/sparse/array.py +++ b/pandas/core/arrays/sparse/array.py @@ -719,7 +719,10 @@ def factorize(self, na_sentinel=-1): # Given that we have to return a dense array of codes, why bother # implementing an efficient factorize? codes, uniques = algos.factorize(np.asarray(self), na_sentinel=na_sentinel) - uniques = SparseArray(uniques, dtype=self.dtype) + # pandas/core/arrays/sparse/array.py:722: error: Incompatible types in + # assignment (expression has type "SparseArray", variable has type + # "Union[ndarray, Index]") [assignment] + uniques = SparseArray(uniques, dtype=self.dtype) # type: ignore[assignment] return codes, uniques def value_counts(self, dropna=True): diff --git a/pandas/core/arrays/string_.py b/pandas/core/arrays/string_.py index cc2013deb5252..10be2b50567a0 100644 --- a/pandas/core/arrays/string_.py +++ b/pandas/core/arrays/string_.py @@ -296,7 +296,10 @@ def astype(self, dtype, copy=True): values = arr.astype(dtype.numpy_dtype) return IntegerArray(values, mask, copy=False) elif isinstance(dtype, FloatingDtype): - arr = self.copy() + # pandas/core/arrays/string_.py:299: error: Incompatible types in assignment + # (expression has type "StringArray", variable has type "ndarray") + # [assignment] + arr = self.copy() # type: ignore[assignment] mask = self.isna() arr[mask] = "0" values = arr.astype(dtype.numpy_dtype) diff --git a/pandas/core/arrays/string_arrow.py b/pandas/core/arrays/string_arrow.py index 184fbc050036b..413ed55d2429f 100644 --- a/pandas/core/arrays/string_arrow.py +++ b/pandas/core/arrays/string_arrow.py @@ -305,7 +305,10 @@ def __getitem__(self, item: Any) -> Any: if not len(item): return type(self)(pa.chunked_array([], type=pa.string())) elif is_integer_dtype(item.dtype): - return self.take(item) + # pandas/core/arrays/string_arrow.py:308: error: Argument 1 to "take" of + # "ArrowStringArray" has incompatible type "ndarray"; expected + # "Sequence[int]" [arg-type] + return self.take(item) # type: ignore[arg-type] elif is_bool_dtype(item.dtype): return type(self)(self._data.filter(item)) else: @@ -394,7 +397,10 @@ def nbytes(self) -> int: """ return self._data.nbytes - def isna(self) -> np.ndarray: + # pandas/core/arrays/string_arrow.py:397: error: Return type "ndarray" of "isna" + # incompatible with return type "ArrayLike" in supertype "ExtensionArray" + # [override] + def isna(self) -> np.ndarray: # type: ignore[override] """ Boolean NumPy array indicating if each value is missing. @@ -469,7 +475,9 @@ def __setitem__(self, key: Union[int, np.ndarray], value: Any) -> None: # Slice data and insert inbetween new_data = [ - *self._data[0:key].chunks, + # pandas/core/arrays/string_arrow.py:472: error: Slice index must be an + # integer or None [misc] + *self._data[0:key].chunks, # type: ignore[misc] pa.array([value], type=pa.string()), *self._data[(key + 1) :].chunks, ] @@ -560,7 +568,10 @@ def take( if not is_array_like(indices): indices_array = np.asanyarray(indices) else: - indices_array = indices + # pandas/core/arrays/string_arrow.py:563: error: Incompatible types in + # assignment (expression has type "Sequence[int]", variable has type + # "ndarray") [assignment] + indices_array = indices # type: ignore[assignment] if len(self._data) == 0 and (indices_array >= 0).any(): raise IndexError("cannot do a non-empty take") diff --git a/pandas/core/base.py b/pandas/core/base.py index 53ca293426594..55f9be322a8ad 100644 --- a/pandas/core/base.py +++ b/pandas/core/base.py @@ -1313,4 +1313,6 @@ def drop_duplicates(self, keep="first"): return result def duplicated(self, keep="first"): - return duplicated(self._values, keep=keep) + # pandas/core/base.py:1316: error: Value of type variable "ArrayLike" of + # "duplicated" cannot be "Union[ExtensionArray, ndarray]" [type-var] + return duplicated(self._values, keep=keep) # type: ignore[type-var] diff --git a/pandas/core/construction.py b/pandas/core/construction.py index 405a9db8712a6..9f75c7c5ef866 100644 --- a/pandas/core/construction.py +++ b/pandas/core/construction.py @@ -498,11 +498,7 @@ def sanitize_array( else: subarr = maybe_convert_platform(data) - # pandas\core\construction.py:485: error: Argument 2 to - # "maybe_cast_to_datetime" has incompatible type "Union[dtype, - # ExtensionDtype, None]"; expected "Union[dtype, ExtensionDtype]" - # [arg-type] - subarr = maybe_cast_to_datetime(subarr, dtype) # type: ignore[arg-type] + subarr = maybe_cast_to_datetime(subarr, dtype) elif isinstance(data, range): # GH#16804 @@ -619,11 +615,7 @@ def _try_cast(arr, dtype: Optional[DtypeObj], copy: bool, raise_cast_failure: bo maybe_cast_to_integer_array(arr, dtype) # type: ignore[arg-type] subarr = arr else: - # pandas\core\construction.py:598: error: Argument 2 to - # "maybe_cast_to_datetime" has incompatible type "Union[dtype, - # ExtensionDtype, None]"; expected "Union[dtype, ExtensionDtype]" - # [arg-type] - subarr = maybe_cast_to_datetime(arr, dtype) # type: ignore[arg-type] + subarr = maybe_cast_to_datetime(arr, dtype) # Take care in creating object arrays (but iterators are not # supported): diff --git a/pandas/core/dtypes/cast.py b/pandas/core/dtypes/cast.py index b59a976f89464..4a79cb07abb8e 100644 --- a/pandas/core/dtypes/cast.py +++ b/pandas/core/dtypes/cast.py @@ -334,11 +334,22 @@ def maybe_cast_result( ): # We have to special case categorical so as not to upcast # things like counts back to categorical - cls = dtype.construct_array_type() - result = maybe_cast_to_extension_array(cls, result, dtype=dtype) + + # pandas/core/dtypes/cast.py:337: error: Item "dtype[Any]" of "Union[dtype[Any], + # ExtensionDtype]" has no attribute "construct_array_type" [union-attr] + cls = dtype.construct_array_type() # type: ignore[union-attr] + # pandas/core/dtypes/cast.py:338: error: Argument "dtype" to + # "maybe_cast_to_extension_array" has incompatible type "Union[dtype[Any], + # ExtensionDtype]"; expected "Optional[ExtensionDtype]" [arg-type] + result = maybe_cast_to_extension_array( + cls, result, dtype=dtype # type: ignore[arg-type] + ) elif numeric_only and is_numeric_dtype(dtype) or not numeric_only: - result = maybe_downcast_to_dtype(result, dtype) + # pandas/core/dtypes/cast.py:341: error: Argument 2 to "maybe_downcast_to_dtype" + # has incompatible type "Union[dtype[Any], ExtensionDtype]"; expected + # "Union[str, dtype[Any]]" [arg-type] + result = maybe_downcast_to_dtype(result, dtype) # type: ignore[arg-type] return result @@ -1447,8 +1458,14 @@ def maybe_cast_to_datetime(value, dtype: Optional[DtypeObj]): # pandas supports dtype whose granularity is less than [ns] # e.g., [ps], [fs], [as] - if dtype <= np.dtype("M8[ns]"): - if dtype.name == "datetime64": + + # pandas/core/dtypes/cast.py:1450: error: Unsupported operand types + # for >= ("dtype[Any]" and "ExtensionDtype") [operator] + if dtype <= np.dtype("M8[ns]"): # type: ignore[operator] + # pandas/core/dtypes/cast.py:1451: error: Item "None" of + # "Union[dtype[Any], ExtensionDtype, None]" has no attribute + # "name" [union-attr] + if dtype.name == "datetime64": # type: ignore[union-attr] raise ValueError(msg) dtype = DT64NS_DTYPE else: diff --git a/pandas/core/frame.py b/pandas/core/frame.py index f3db9814dcc19..d5b54f7923863 100644 --- a/pandas/core/frame.py +++ b/pandas/core/frame.py @@ -704,7 +704,10 @@ def __init__( raise ValueError("DataFrame constructor not properly called!") shape = (len(index), len(columns)) - values = np.full(shape, arr) + # pandas/core/frame.py:707: error: Incompatible types in assignment + # (expression has type "ndarray", variable has type + # "List[ExtensionArray]") [assignment] + values = np.full(shape, arr) # type: ignore[assignment] mgr = init_ndarray( # error: "List[ExtensionArray]" has no attribute "dtype" diff --git a/pandas/core/generic.py b/pandas/core/generic.py index 24daaff3eb6ed..c084960fccc09 100644 --- a/pandas/core/generic.py +++ b/pandas/core/generic.py @@ -8499,7 +8499,9 @@ def last(self: FrameOrSeries, offset) -> FrameOrSeries: start_date = self.index[-1] - offset start = self.index.searchsorted(start_date, side="right") - return self.iloc[start:] + # pandas/core/generic.py:8502: error: Slice index must be an integer or None + # [misc] + return self.iloc[start:] # type: ignore[misc] @final def rank( diff --git a/pandas/core/groupby/generic.py b/pandas/core/groupby/generic.py index 4ee013ee9fe32..ed483aab52d5f 100644 --- a/pandas/core/groupby/generic.py +++ b/pandas/core/groupby/generic.py @@ -715,11 +715,16 @@ def value_counts( lab = cut(Series(val), bins, include_lowest=True) # error: "ndarray" has no attribute "cat" lev = lab.cat.categories # type: ignore[attr-defined] - # error: "ndarray" has no attribute "cat" - lab = lev.take( + # pandas/core/groupby/generic.py:719: error: No overload variant of "take" + # of "_ArrayOrScalarCommon" matches argument types "Any", "bool", + # "Union[Any, float]" [call-overload] + lab = lev.take( # type: ignore[call-overload] + # error: "ndarray" has no attribute "cat" lab.cat.codes, # type: ignore[attr-defined] allow_fill=True, - fill_value=lev._na_value, + # pandas/core/groupby/generic.py:722: error: Item "ndarray" of + # "Union[ndarray, Index]" has no attribute "_na_value" [union-attr] + fill_value=lev._na_value, # type: ignore[union-attr] ) llab = lambda lab, inc: lab[inc]._multiindex.codes[-1] @@ -1125,7 +1130,10 @@ def blk_func(bvalues: ArrayLike) -> ArrayLike: assert how == "ohlc" raise - result = py_fallback(bvalues) + # pandas/core/groupby/generic.py:1128: error: Incompatible types in + # assignment (expression has type "ExtensionArray", variable has type + # "ndarray") [assignment] + result = py_fallback(bvalues) # type: ignore[assignment] return cast_agg_result(result, bvalues, how) diff --git a/pandas/core/groupby/ops.py b/pandas/core/groupby/ops.py index 7724e3930f7df..d458cdc88e016 100644 --- a/pandas/core/groupby/ops.py +++ b/pandas/core/groupby/ops.py @@ -510,7 +510,11 @@ def _ea_wrap_cython_operation( ) if how in ["rank"]: # preserve float64 dtype - return res_values + + # pandas/core/groupby/ops.py:513: error: Incompatible return value type + # (got "ndarray", expected "Tuple[ndarray, Optional[List[str]]]") + # [return-value] + return res_values # type: ignore[return-value] res_values = res_values.astype("i8", copy=False) result = type(orig_values)._simple_new(res_values, dtype=orig_values.dtype) @@ -524,9 +528,14 @@ def _ea_wrap_cython_operation( ) dtype = maybe_cast_result_dtype(orig_values.dtype, how) if is_extension_array_dtype(dtype): - cls = dtype.construct_array_type() + # pandas/core/groupby/ops.py:527: error: Item "dtype[Any]" of + # "Union[dtype[Any], ExtensionDtype]" has no attribute + # "construct_array_type" [union-attr] + cls = dtype.construct_array_type() # type: ignore[union-attr] return cls._from_sequence(res_values, dtype=dtype) - return res_values + # pandas/core/groupby/ops.py:529: error: Incompatible return value type (got + # "ndarray", expected "Tuple[ndarray, Optional[List[str]]]") [return-value] + return res_values # type: ignore[return-value] elif is_float_dtype(values.dtype): # FloatingArray @@ -561,7 +570,9 @@ def _cython_operation( self._disallow_invalid_ops(values, how) if is_extension_array_dtype(values.dtype): - return self._ea_wrap_cython_operation( + # pandas/core/groupby/ops.py:564: error: Incompatible return value type (got + # "Tuple[ndarray, Optional[List[str]]]", expected "ndarray") [return-value] + return self._ea_wrap_cython_operation( # type: ignore[return-value] kind, values, how, axis, min_count, **kwargs ) @@ -649,7 +660,10 @@ def _cython_operation( # e.g. if we are int64 and need to restore to datetime64/timedelta64 # "rank" is the only member of cython_cast_blocklist we get here dtype = maybe_cast_result_dtype(orig_values.dtype, how) - result = maybe_downcast_to_dtype(result, dtype) + # pandas/core/groupby/ops.py:652: error: Argument 2 to + # "maybe_downcast_to_dtype" has incompatible type "Union[dtype[Any], + # ExtensionDtype]"; expected "Union[str, dtype[Any]]" [arg-type] + result = maybe_downcast_to_dtype(result, dtype) # type: ignore[arg-type] return result diff --git a/pandas/core/indexes/base.py b/pandas/core/indexes/base.py index c39055391344e..b830fdf442d29 100644 --- a/pandas/core/indexes/base.py +++ b/pandas/core/indexes/base.py @@ -336,7 +336,13 @@ def __new__( # they are actually ints, e.g. '0' and 0.0 # should not be coerced # GH 11836 - data = _maybe_cast_with_dtype(data, dtype, copy) + + # pandas/core/indexes/base.py:339: error: Argument 1 to + # "_maybe_cast_with_dtype" has incompatible type "Union[ndarray, Index, + # Series]"; expected "ndarray" [arg-type] + data = _maybe_cast_with_dtype( + data, dtype, copy # type: ignore[arg-type] + ) dtype = data.dtype # TODO: maybe not for object? # maybe coerce to a sub-class @@ -3842,7 +3848,11 @@ def _join_non_unique(self, other, how="left", return_indexers=False): mask = left_idx == -1 np.putmask(join_index, mask, rvalues.take(right_idx)) - join_index = self._wrap_joined_index(join_index, other) + # pandas/core/indexes/base.py:3845: error: Incompatible types in assignment + # (expression has type "Index", variable has type "ndarray") [assignment] + join_index = self._wrap_joined_index( + join_index, other # type: ignore[assignment] + ) if return_indexers: return join_index, left_idx, right_idx @@ -4020,10 +4030,20 @@ def _join_monotonic(self, other, how="left", return_indexers=False): ridx = None elif how == "inner": join_index, lidx, ridx = self._inner_indexer(sv, ov) - join_index = self._wrap_joined_index(join_index, other) + # pandas/core/indexes/base.py:4023: error: Argument 1 to + # "_wrap_joined_index" of "Index" has incompatible type "Index"; + # expected "ndarray" [arg-type] + join_index = self._wrap_joined_index( + join_index, other # type: ignore[arg-type] + ) elif how == "outer": join_index, lidx, ridx = self._outer_indexer(sv, ov) - join_index = self._wrap_joined_index(join_index, other) + # pandas/core/indexes/base.py:4026: error: Argument 1 to + # "_wrap_joined_index" of "Index" has incompatible type "Index"; + # expected "ndarray" [arg-type] + join_index = self._wrap_joined_index( + join_index, other # type: ignore[arg-type] + ) else: if how == "left": join_index, lidx, ridx = self._left_indexer(sv, ov) @@ -4033,7 +4053,12 @@ def _join_monotonic(self, other, how="left", return_indexers=False): join_index, lidx, ridx = self._inner_indexer(sv, ov) elif how == "outer": join_index, lidx, ridx = self._outer_indexer(sv, ov) - join_index = self._wrap_joined_index(join_index, other) + # pandas/core/indexes/base.py:4036: error: Argument 1 to + # "_wrap_joined_index" of "Index" has incompatible type "Index"; expected + # "ndarray" [arg-type] + join_index = self._wrap_joined_index( + join_index, other # type: ignore[arg-type] + ) if return_indexers: lidx = None if lidx is None else ensure_platform_int(lidx) @@ -5250,7 +5275,9 @@ def isin(self, values, level=None): """ if level is not None: self._validate_index_level(level) - return algos.isin(self._values, values) + # pandas/core/indexes/base.py:5253: error: Value of type variable "AnyArrayLike" + # of "isin" cannot be "Union[ExtensionArray, ndarray]" [type-var] + return algos.isin(self._values, values) # type: ignore[type-var] def _get_string_slice(self, key: str_t): # this is for partial string indexing, diff --git a/pandas/core/indexes/category.py b/pandas/core/indexes/category.py index def31bdd3feb7..a444ee3344957 100644 --- a/pandas/core/indexes/category.py +++ b/pandas/core/indexes/category.py @@ -510,7 +510,10 @@ def _get_indexer( if self.is_unique and self.equals(target): return np.arange(len(self), dtype="intp") - return self._get_indexer_non_unique(target._values)[0] + # pandas/core/indexes/category.py:513: error: Value of type variable "ArrayLike" + # of "_get_indexer_non_unique" of "CategoricalIndex" cannot be + # "Union[ExtensionArray, ndarray]" [type-var] + return self._get_indexer_non_unique(target._values)[0] # type: ignore[type-var] @Appender(_index_shared_docs["get_indexer_non_unique"] % _index_doc_kwargs) def get_indexer_non_unique(self, target): diff --git a/pandas/core/indexes/numeric.py b/pandas/core/indexes/numeric.py index ed76e26a57634..a15daa32994a1 100644 --- a/pandas/core/indexes/numeric.py +++ b/pandas/core/indexes/numeric.py @@ -284,7 +284,9 @@ def asi8(self) -> np.ndarray: FutureWarning, stacklevel=2, ) - return self._values.view(self._default_dtype) + # pandas/core/indexes/numeric.py:287: error: Incompatible return value type (got + # "Union[ExtensionArray, ndarray]", expected "ndarray") [return-value] + return self._values.view(self._default_dtype) # type: ignore[return-value] class Int64Index(IntegerIndex): diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index 4ab516e6ee6f8..57e401f279e7c 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -788,7 +788,9 @@ def replace( # so does not get here. to_replace = convert_scalar_for_putitemlike(to_replace, values.dtype) - mask = missing.mask_missing(values, to_replace) + # pandas/core/internals/blocks.py:791: error: Value of type variable "ArrayLike" + # of "mask_missing" cannot be "Union[ndarray, ExtensionArray]" [type-var] + mask = missing.mask_missing(values, to_replace) # type: ignore[type-var] if not mask.any(): # Note: we get here with test_replace_extension_other incorrectly # bc _can_hold_element is incorrect. @@ -903,7 +905,14 @@ def comp(s: Scalar, mask: np.ndarray, regex: bool = False) -> np.ndarray: masks = [comp(s[0], mask, regex) for s in pairs] else: # GH#38086 faster if we know we dont need to check for regex - masks = [missing.mask_missing(self.values, s[0]) for s in pairs] + + # pandas/core/internals/blocks.py:906: error: Value of type variable + # "ArrayLike" of "mask_missing" cannot be "Union[ndarray, ExtensionArray]" + # [type-var] + masks = [ + missing.mask_missing(self.values, s[0]) # type: ignore[type-var] + for s in pairs + ] masks = [_extract_bool_array(x) for x in masks] @@ -1372,11 +1381,8 @@ def func(yvalues: np.ndarray) -> np.ndarray: # process a 1-d slice, returning it # should the axis argument be handled below in apply_along_axis? # i.e. not an arg to missing.interpolate_1d - - # error: Argument "xvalues" to "interpolate_1d" has incompatible - # type "Index"; expected "ndarray" return missing.interpolate_1d( - xvalues=index, # type: ignore[arg-type] + xvalues=index, yvalues=yvalues, method=method, limit=limit, diff --git a/pandas/core/internals/construction.py b/pandas/core/internals/construction.py index caa28e95b7802..0a7acfe2b7a87 100644 --- a/pandas/core/internals/construction.py +++ b/pandas/core/internals/construction.py @@ -612,10 +612,7 @@ def _list_of_series_to_arrays( values = extract_array(s, extract_numpy=True) aligned_values.append(algorithms.take_1d(values, indexer)) - # pandas\core\internals\construction.py:613: error: Incompatible types in - # assignment (expression has type "ndarray", variable has type - # "ExtensionArray") [assignment] - values = np.vstack(aligned_values) # type: ignore[assignment] + values = np.vstack(aligned_values) if values.dtype == np.object_: content = list(values.T) @@ -751,11 +748,7 @@ def _convert_object_array( def convert(arr): if dtype != np.dtype("O"): arr = lib.maybe_convert_objects(arr, try_float=coerce_float) - # pandas\core\internals\construction.py:742: error: Argument 2 to - # "maybe_cast_to_datetime" has incompatible type "Union[dtype, - # ExtensionDtype, None]"; expected "Union[dtype, ExtensionDtype]" - # [arg-type] - arr = maybe_cast_to_datetime(arr, dtype) # type: ignore[arg-type] + arr = maybe_cast_to_datetime(arr, dtype) return arr arrays = [convert(arr) for arr in content] diff --git a/pandas/core/missing.py b/pandas/core/missing.py index 6bedd12c68a6a..4d1b648b2c09e 100644 --- a/pandas/core/missing.py +++ b/pandas/core/missing.py @@ -41,7 +41,11 @@ def mask_missing(arr: ArrayLike, values_to_mask) -> np.ndarray: # known to be holdable by arr. # When called from Series._single_replace, values_to_mask is tuple or list dtype, values_to_mask = infer_dtype_from_array(values_to_mask) - values_to_mask = np.array(values_to_mask, dtype=dtype) + # pandas/core/missing.py:44: error: Argument "dtype" to "array" has incompatible + # type "Union[dtype[Any], ExtensionDtype]"; expected "Union[dtype[Any], None, type, + # _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], + # List[Any], _DTypeDict, Tuple[Any, Any]]]" [arg-type] + values_to_mask = np.array(values_to_mask, dtype=dtype) # type: ignore[arg-type] na_mask = isna(values_to_mask) nonna = values_to_mask[~na_mask] @@ -271,7 +275,12 @@ def interpolate_1d( if method in NP_METHODS: # np.interp requires sorted X values, #21037 - indexer = np.argsort(inds[valid]) + + # pandas/core/missing.py:274: error: Argument 1 to "argsort" has incompatible + # type "Union[ExtensionArray, Any]"; expected "Union[Union[int, float, complex, + # str, bytes, generic], Sequence[Union[int, float, complex, str, bytes, + # generic]], Sequence[Sequence[Any]], _SupportsArray]" [arg-type] + indexer = np.argsort(inds[valid]) # type: ignore[arg-type] result[invalid] = np.interp( inds[invalid], inds[valid][indexer], yvalues[valid][indexer] ) diff --git a/pandas/core/nanops.py b/pandas/core/nanops.py index 7c5e0c6bdf1d5..d44bdd0ebaa83 100644 --- a/pandas/core/nanops.py +++ b/pandas/core/nanops.py @@ -285,9 +285,7 @@ def _get_values( # with scalar fill_value. This guarantee is important for the # np.where call below assert is_scalar(fill_value) - # error: Incompatible types in assignment (expression has type - # "ExtensionArray", variable has type "ndarray") - values = extract_array(values, extract_numpy=True) # type: ignore[assignment] + values = extract_array(values, extract_numpy=True) mask = _maybe_get_mask(values, skipna, mask) @@ -1163,9 +1161,7 @@ def nanskew( >>> nanops.nanskew(s) 1.7320508075688787 """ - # error: Incompatible types in assignment (expression has type - # "ExtensionArray", variable has type "ndarray") - values = extract_array(values, extract_numpy=True) # type: ignore[assignment] + values = extract_array(values, extract_numpy=True) mask = _maybe_get_mask(values, skipna, mask) if not is_float_dtype(values.dtype): values = values.astype("f8") @@ -1250,9 +1246,7 @@ def nankurt( >>> nanops.nankurt(s) -1.2892561983471076 """ - # error: Incompatible types in assignment (expression has type - # "ExtensionArray", variable has type "ndarray") - values = extract_array(values, extract_numpy=True) # type: ignore[assignment] + values = extract_array(values, extract_numpy=True) mask = _maybe_get_mask(values, skipna, mask) if not is_float_dtype(values.dtype): values = values.astype("f8") diff --git a/pandas/core/ops/array_ops.py b/pandas/core/ops/array_ops.py index 2c4fa9a728d05..9c98e614b123b 100644 --- a/pandas/core/ops/array_ops.py +++ b/pandas/core/ops/array_ops.py @@ -355,10 +355,7 @@ def fill_bool(x, left=None): # For int vs int `^`, `|`, `&` are bitwise operators and return # integer dtypes. Otherwise these are boolean ops filler = fill_int if is_self_int_dtype and is_other_int_dtype else fill_bool - - # error: Argument 1 to "na_logical_op" has incompatible type - # "ExtensionArray"; expected "ndarray" - res_values = na_logical_op(lvalues, rvalues, op) # type: ignore[arg-type] + res_values = na_logical_op(lvalues, rvalues, op) # error: Cannot call function of unknown type res_values = filler(res_values) # type: ignore[operator] diff --git a/pandas/core/reshape/concat.py b/pandas/core/reshape/concat.py index 4a2629daf63d7..c1deb97fae908 100644 --- a/pandas/core/reshape/concat.py +++ b/pandas/core/reshape/concat.py @@ -518,7 +518,10 @@ def get_result(self): # in new labels to make them unique, otherwise we would # duplicate or duplicates again if not obj_labels.is_unique: - new_labels = algos.make_duplicates_of_left_unique_in_right( + # pandas/core/reshape/concat.py:521: error: Incompatible + # types in assignment (expression has type "ndarray", + # variable has type "Index") [assignment] + new_labels = algos.make_duplicates_of_left_unique_in_right( # type: ignore[assignment] # noqa np.asarray(obj_labels), np.asarray(new_labels) ) indexers[ax] = obj_labels.reindex(new_labels)[1] diff --git a/pandas/core/reshape/merge.py b/pandas/core/reshape/merge.py index afe18e8ee40ff..b279bb13eeafd 100644 --- a/pandas/core/reshape/merge.py +++ b/pandas/core/reshape/merge.py @@ -2010,13 +2010,8 @@ def _factorize_keys( (array([0, 1, 2]), array([0, 1]), 3) """ # Some pre-processing for non-ndarray lk / rk - - # error: Incompatible types in assignment (expression has type - # "ExtensionArray", variable has type "ndarray") - lk = extract_array(lk, extract_numpy=True) # type: ignore[assignment] - # error: Incompatible types in assignment (expression has type - # "ExtensionArray", variable has type "ndarray") - rk = extract_array(rk, extract_numpy=True) # type: ignore[assignment] + lk = extract_array(lk, extract_numpy=True) + rk = extract_array(rk, extract_numpy=True) if is_datetime64tz_dtype(lk.dtype) and is_datetime64tz_dtype(rk.dtype): # Extract the ndarray (UTC-localized) values @@ -2032,25 +2027,33 @@ def _factorize_keys( assert isinstance(lk, Categorical) assert isinstance(rk, Categorical) # Cast rk to encoding so we can compare codes with lk - rk = lk._encode_with_my_categories(rk) - lk = ensure_int64(lk.codes) - rk = ensure_int64(rk.codes) + # pandas/core/reshape/merge.py:2035: error: has no attribute + # "_encode_with_my_categories" [attr-defined] + rk = lk._encode_with_my_categories(rk) # type: ignore[attr-defined] + + # pandas/core/reshape/merge.py:2037: error: has no attribute "codes" + # [attr-defined] + lk = ensure_int64(lk.codes) # type: ignore[attr-defined] + # pandas/core/reshape/merge.py:2038: error: "ndarray" has no attribute "codes" + # [attr-defined] + rk = ensure_int64(rk.codes) # type: ignore[attr-defined] elif is_extension_array_dtype(lk.dtype) and is_dtype_equal(lk.dtype, rk.dtype): # pandas\core\reshape\merge.py:1967: error: Incompatible types in # assignment (expression has type "ndarray", variable has type # "ExtensionArray") [assignment] - # pandas\core\reshape\merge.py:1967: error: "ndarray" has no attribute - # "_values_for_factorize" [attr-defined] - lk, _ = lk._values_for_factorize() # type: ignore[attr-defined,assignment] + # pandas/core/reshape/merge.py:2047: error: Item "ndarray" of "Union[Any, + # ndarray]" has no attribute "_values_for_factorize" [union-attr] + lk, _ = lk._values_for_factorize() # type: ignore[union-attr,assignment] # error: Incompatible types in assignment (expression has type # "ndarray", variable has type "ExtensionArray") - # error: "ndarray" has no attribute "_values_for_factorize" - rk, _ = rk._values_for_factorize() # type: ignore[attr-defined,assignment] + # pandas/core/reshape/merge.py:2053: error: Item "ndarray" of "Union[Any, + # ndarray]" has no attribute "_values_for_factorize" [union-attr] + rk, _ = rk._values_for_factorize() # type: ignore[union-attr,assignment] if is_integer_dtype(lk.dtype) and is_integer_dtype(rk.dtype): # GH#23917 TODO: needs tests for case where lk is integer-dtype diff --git a/pandas/io/formats/format.py b/pandas/io/formats/format.py index 0c4d53455c1e6..4bd8698682d08 100644 --- a/pandas/io/formats/format.py +++ b/pandas/io/formats/format.py @@ -1544,8 +1544,10 @@ def _format_strings(self) -> List[str]: if is_categorical_dtype(values.dtype): # Categorical is special for now, so that we can preserve tzinfo - # error: "ExtensionArray" has no attribute "_internal_get_values" - array = values._internal_get_values() # type: ignore[attr-defined] + # pandas/io/formats/format.py:1546: error: Item "ExtensionArray" of + # "Union[Any, ExtensionArray]" has no attribute "_internal_get_values" + # [union-attr] + array = values._internal_get_values() # type: ignore[union-attr] else: array = np.asarray(values) diff --git a/pandas/io/pytables.py b/pandas/io/pytables.py index 8f774e3af3636..36027a7b022a7 100644 --- a/pandas/io/pytables.py +++ b/pandas/io/pytables.py @@ -2980,11 +2980,7 @@ def write_array(self, key: str, obj: FrameOrSeries, items: Optional[Index] = Non # TODO: we only have a few tests that get here, the only EA # that gets passed is DatetimeArray, and we never have # both self._filters and EA - - # pandas\io\pytables.py:2980: error: Value of type variable - # "AnyArrayLike" of "extract_array" cannot be "FrameOrSeries" - # [type-var] - value = extract_array(obj, extract_numpy=True) # type: ignore[type-var] + value = extract_array(obj, extract_numpy=True) if key in self.group: self._handle.remove_node(self.group, key) @@ -3046,18 +3042,16 @@ def write_array(self, key: str, obj: FrameOrSeries, items: Optional[Index] = Non # store as UTC # with a zone - # error: "ndarray" has no attribute "asi8" + # pandas/io/pytables.py:3045: error: Item "ExtensionArray" of "Union[Any, + # ExtensionArray]" has no attribute "asi8" [union-attr] self._handle.create_array( - self.group, key, value.asi8 # type: ignore[attr-defined] + self.group, key, value.asi8 # type: ignore[union-attr] ) node = getattr(self.group, key) - # pandas\io\pytables.py:3061: error: "ExtensionArray" has no - # attribute "tz" [attr-defined] - - # pandas\io\pytables.py:3061: error: "ndarray" has no attribute - # "tz" [attr-defined] - node._v_attrs.tz = _get_tz(value.tz) # type: ignore[attr-defined] + # pandas/io/pytables.py:3048: error: Item "ExtensionArray" of "Union[Any, + # ExtensionArray]" has no attribute "tz" [union-attr] + node._v_attrs.tz = _get_tz(value.tz) # type: ignore[union-attr] node._v_attrs.value_type = "datetime64" elif is_timedelta64_dtype(value.dtype): self._handle.create_array(self.group, key, value.view("i8")) diff --git a/pandas/plotting/_matplotlib/core.py b/pandas/plotting/_matplotlib/core.py index 1a22e5629ebe8..b10e5d1de847b 100644 --- a/pandas/plotting/_matplotlib/core.py +++ b/pandas/plotting/_matplotlib/core.py @@ -1431,10 +1431,18 @@ def _make_plot(self): if self.orientation == "vertical": ax.xaxis.update_units(self.ax_index) - self.tick_pos = ax.convert_xunits(self.ax_index).astype(np.int) + # pandas/plotting/_matplotlib/core.py:1434: error: Module has no + # attribute "int"; maybe "uint", "rint", or "intp"? [attr-defined] + self.tick_pos = ax.convert_xunits(self.ax_index).astype( + np.int # type: ignore[attr-defined] + ) elif self.orientation == "horizontal": ax.yaxis.update_units(self.ax_index) - self.tick_pos = ax.convert_yunits(self.ax_index).astype(np.int) + # pandas/plotting/_matplotlib/core.py:1437: error: Module has no + # attribute "int"; maybe "uint", "rint", or "intp"? [attr-defined] + self.tick_pos = ax.convert_yunits(self.ax_index).astype( + np.int # type: ignore[attr-defined] + ) self.ax_pos = self.tick_pos - self.tickoffset kwds = self.kwds.copy() diff --git a/setup.cfg b/setup.cfg index 460e1d62dbfce..bfb02fe4582b5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -137,15 +137,15 @@ ignore_errors=True [mypy-pandas.tests.dtypes.test_common] ignore_errors=True -[mypy-pandas.tests.frame.indexing.test_setitem] -ignore_errors=True - [mypy-pandas.tests.frame.methods.test_to_records] ignore_errors=True [mypy-pandas.tests.frame.test_constructors] ignore_errors=True +[mypy-pandas.tests.groupby.test_rank] +ignore_errors=True + [mypy-pandas.tests.groupby.transform.test_transform] ignore_errors=True @@ -163,6 +163,3 @@ ignore_errors=True [mypy-pandas.tests.series.apply.test_series_apply] ignore_errors=True - -[mypy-pandas.tests.window.test_dtypes] -ignore_errors=True From 494b9f2e275b0f96e76869043c38e35cf0af4701 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Fri, 11 Dec 2020 15:40:57 +0000 Subject: [PATCH 40/86] tidy diff --- pandas/core/algorithms.py | 1 + pandas/core/internals/blocks.py | 1 - pandas/core/ops/array_ops.py | 1 + pandas/io/pytables.py | 1 + pandas/io/stata.py | 3 +-- pandas/tests/arrays/sparse/test_combine_concat.py | 8 +------- setup.cfg | 3 +++ 7 files changed, 8 insertions(+), 10 deletions(-) diff --git a/pandas/core/algorithms.py b/pandas/core/algorithms.py index 23baeb91fe83e..8d39d2d667d3b 100644 --- a/pandas/core/algorithms.py +++ b/pandas/core/algorithms.py @@ -1329,6 +1329,7 @@ def compute(self, method: str) -> Series: arr -= 1 elif is_bool_dtype(pandas_dtype): + # GH 26154: ensure False is smaller than True arr = 1 - (-arr) if self.keep == "last": diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index 57e401f279e7c..56799ac6215c9 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -1560,7 +1560,6 @@ def where( for m in [mask, ~mask]: if m.any(): result = cast(np.ndarray, result) # EABlock overrides where - taken = result.take(m.nonzero()[0], axis=axis) r = maybe_downcast_numeric(taken, self.dtype) nb = self.make_block(r.T, placement=self.mgr_locs[m]) diff --git a/pandas/core/ops/array_ops.py b/pandas/core/ops/array_ops.py index 9c98e614b123b..6fd9cb4504cac 100644 --- a/pandas/core/ops/array_ops.py +++ b/pandas/core/ops/array_ops.py @@ -355,6 +355,7 @@ def fill_bool(x, left=None): # For int vs int `^`, `|`, `&` are bitwise operators and return # integer dtypes. Otherwise these are boolean ops filler = fill_int if is_self_int_dtype and is_other_int_dtype else fill_bool + res_values = na_logical_op(lvalues, rvalues, op) # error: Cannot call function of unknown type res_values = filler(res_values) # type: ignore[operator] diff --git a/pandas/io/pytables.py b/pandas/io/pytables.py index 36027a7b022a7..3b5fae9d21869 100644 --- a/pandas/io/pytables.py +++ b/pandas/io/pytables.py @@ -2980,6 +2980,7 @@ def write_array(self, key: str, obj: FrameOrSeries, items: Optional[Index] = Non # TODO: we only have a few tests that get here, the only EA # that gets passed is DatetimeArray, and we never have # both self._filters and EA + value = extract_array(obj, extract_numpy=True) if key in self.group: diff --git a/pandas/io/stata.py b/pandas/io/stata.py index e22852cacd05b..3ddea2b43710d 100644 --- a/pandas/io/stata.py +++ b/pandas/io/stata.py @@ -559,8 +559,7 @@ def _cast_to_stata_types(data: DataFrame) -> DataFrame: dtype = c_data[1] else: dtype = c_data[2] - if c_data[2] == np.int64: - # Warn if necessary + if c_data[2] == np.int64: # Warn if necessary if data[col].max() >= 2 ** 53: ws = precision_loss_doc.format("uint64", "float64") diff --git a/pandas/tests/arrays/sparse/test_combine_concat.py b/pandas/tests/arrays/sparse/test_combine_concat.py index 899f7d434b9ed..0f09af269148b 100644 --- a/pandas/tests/arrays/sparse/test_combine_concat.py +++ b/pandas/tests/arrays/sparse/test_combine_concat.py @@ -46,13 +46,7 @@ def test_uses_first_kind(self, kind): (pd.Series([3, 4, 5], dtype="category"), np.dtype("int64")), (pd.Series([1.5, 2.5, 3.5], dtype="category"), np.dtype("float64")), # categorical with incompatible categories -> object dtype - ( - pd.Series(["a", "b", "c"], dtype="category"), - # pandas\tests\arrays\sparse\test_combine_concat.py:49: error: Value - # of type variable "_DTypeScalar" of "dtype" cannot be "object" - # [type-var] - np.dtype(object), # type: ignore[type-var] - ), + (pd.Series(["a", "b", "c"], dtype="category"), np.dtype(object)), ], ) def test_concat_with_non_sparse(other, expected_dtype): diff --git a/setup.cfg b/setup.cfg index bfb02fe4582b5..f230d13711c80 100644 --- a/setup.cfg +++ b/setup.cfg @@ -134,6 +134,9 @@ check_untyped_defs=False [mypy-pandas.tests.arithmetic.conftest] ignore_errors=True +[mypy-pandas.tests.arrays.sparse.test_combine_concat] +ignore_errors=True + [mypy-pandas.tests.dtypes.test_common] ignore_errors=True From e28c09fb76caa0b9e9c85fc6a958a02ac5013fda Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Wed, 6 Jan 2021 18:10:39 +0000 Subject: [PATCH 41/86] update ci to numpy 1.20.0rc2 --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fb559242bf9aa..2f56412d3c9bb 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -168,7 +168,7 @@ jobs: - name: Update numpy to release candidate run: | source activate pandas-dev - pip install numpy==1.20.0rc1 + pip install numpy==1.20.0rc2 conda list - name: Remove pandas run: | From 1361e7f14a869cd59938deb21c47b46b7749f5bb Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Wed, 6 Jan 2021 21:04:34 +0000 Subject: [PATCH 42/86] wip --- pandas/_testing/__init__.py | 18 +++++- pandas/core/arrays/boolean.py | 8 ++- pandas/core/arrays/categorical.py | 13 ++++- pandas/core/arrays/floating.py | 4 +- pandas/core/arrays/integer.py | 4 +- pandas/core/arrays/masked.py | 29 ++++++++-- pandas/core/arrays/sparse/array.py | 82 ++++++++++++++++++++++++--- pandas/core/arrays/string_arrow.py | 17 +++++- pandas/core/frame.py | 79 ++++++++++++++++++++++---- pandas/core/indexes/datetimelike.py | 4 +- pandas/core/indexes/numeric.py | 6 +- pandas/core/indexes/period.py | 4 +- pandas/core/indexes/range.py | 14 ++++- pandas/core/internals/blocks.py | 76 +++++++++++++++++++------ pandas/core/internals/concat.py | 29 ++++++++-- pandas/core/internals/construction.py | 55 +++++++++++++++--- pandas/core/window/ewm.py | 7 ++- pandas/core/window/rolling.py | 8 ++- pandas/io/json/_json.py | 21 ++++++- pandas/io/parsers.py | 16 +++++- pandas/io/pytables.py | 7 ++- pandas/io/sql.py | 16 +++++- 22 files changed, 435 insertions(+), 82 deletions(-) diff --git a/pandas/_testing/__init__.py b/pandas/_testing/__init__.py index c51ceb750c338..bcc831d11b5d2 100644 --- a/pandas/_testing/__init__.py +++ b/pandas/_testing/__init__.py @@ -119,9 +119,21 @@ + STRING_DTYPES + DATETIME64_DTYPES + TIMEDELTA64_DTYPES - + BOOL_DTYPES - + OBJECT_DTYPES - + BYTES_DTYPES + # pandas/_testing/__init__.py:122: error: Unsupported operand types for + + # ("List[Union[ExtensionDtype, Union[str, dtype[Any]], Type[str], Type[float], + # Type[int], Type[complex], Type[bool], Type[object]]]" and "List[object]") + # [operator] + + BOOL_DTYPES # type: ignore[operator] + # pandas/_testing/__init__.py:123: error: Unsupported operand types for + + # ("List[Union[ExtensionDtype, Union[str, dtype[Any]], Type[str], Type[float], + # Type[int], Type[complex], Type[bool], Type[object]]]" and "List[object]") + # [operator] + + OBJECT_DTYPES # type: ignore[operator] + # pandas/_testing/__init__.py:124: error: Unsupported operand types for + + # ("List[Union[ExtensionDtype, Union[str, dtype[Any]], Type[str], Type[float], + # Type[int], Type[complex], Type[bool], Type[object]]]" and "List[object]") + # [operator] + + BYTES_DTYPES # type: ignore[operator] ) NULL_OBJECTS = [None, np.nan, pd.NaT, float("nan"), pd.NA] diff --git a/pandas/core/arrays/boolean.py b/pandas/core/arrays/boolean.py index 8f545b0f1cc7f..f63542e6f0f18 100644 --- a/pandas/core/arrays/boolean.py +++ b/pandas/core/arrays/boolean.py @@ -374,14 +374,18 @@ def astype(self, dtype, copy: bool = True) -> ArrayLike: dtype = pandas_dtype(dtype) if isinstance(dtype, ExtensionDtype): - return super().astype(dtype, copy) + # pandas/core/arrays/boolean.py:377: error: Incompatible return value type + # (got "ExtensionArray", expected "ndarray") [return-value] + return super().astype(dtype, copy) # type: ignore[return-value] if is_bool_dtype(dtype): # astype_nansafe converts np.nan to True if self._hasna: raise ValueError("cannot convert float NaN to bool") else: - return self._data.astype(dtype, copy=copy) + # pandas/core/arrays/boolean.py:384: error: Incompatible return value + # type (got "ndarray", expected "ExtensionArray") [return-value] + return self._data.astype(dtype, copy=copy) # type: ignore[return-value] # for integer, error if there are missing values if is_integer_dtype(dtype) and self._hasna: diff --git a/pandas/core/arrays/categorical.py b/pandas/core/arrays/categorical.py index 0d041f9d2151e..43b58d170e8e4 100644 --- a/pandas/core/arrays/categorical.py +++ b/pandas/core/arrays/categorical.py @@ -369,7 +369,10 @@ def __init__( values = sanitize_array(values, None, dtype=sanitize_dtype) else: - values = sanitize_to_nanoseconds(values) + # pandas/core/arrays/categorical.py:372: error: Argument 1 to + # "sanitize_to_nanoseconds" has incompatible type "Union[ndarray, + # ExtensionArray]"; expected "ndarray" [arg-type] + values = sanitize_to_nanoseconds(values) # type: ignore[arg-type] if dtype.categories is None: try: @@ -395,7 +398,9 @@ def __init__( dtype = CategoricalDtype(categories, dtype.ordered) elif is_categorical_dtype(values.dtype): - old_codes = extract_array(values)._codes + # pandas/core/arrays/categorical.py:398: error: Item "ExtensionArray" of + # "Union[Any, ExtensionArray]" has no attribute "_codes" [union-attr] + old_codes = extract_array(values)._codes # type: ignore[union-attr] codes = recode_for_categories( old_codes, values.dtype.categories, dtype.categories, copy=copy ) @@ -2576,7 +2581,9 @@ def _get_codes_for_values(values, categories: "Index") -> np.ndarray: # Only hit here when we've already coerced to object dtypee. hash_klass, vals = get_data_algo(values) - _, cats = get_data_algo(categories) + # pandas/core/arrays/categorical.py:2579: error: Value of type variable "ArrayLike" + # of "get_data_algo" cannot be "Index" [type-var] + _, cats = get_data_algo(categories) # type: ignore[type-var] t = hash_klass(len(cats)) t.map_locations(cats) return coerce_indexer_dtype(t.lookup(vals), cats) diff --git a/pandas/core/arrays/floating.py b/pandas/core/arrays/floating.py index fe33e4fe94824..908b76065ec91 100644 --- a/pandas/core/arrays/floating.py +++ b/pandas/core/arrays/floating.py @@ -334,7 +334,9 @@ def astype(self, dtype, copy: bool = True) -> ArrayLike: dtype = pandas_dtype(dtype) if isinstance(dtype, ExtensionDtype): - return super().astype(dtype, copy=copy) + # pandas/core/arrays/floating.py:337: error: Incompatible return value type + # (got "ExtensionArray", expected "ndarray") [return-value] + return super().astype(dtype, copy=copy) # type: ignore[return-value] # coerce if is_float_dtype(dtype): diff --git a/pandas/core/arrays/integer.py b/pandas/core/arrays/integer.py index 76ab4da543195..1ae3b71d698bb 100644 --- a/pandas/core/arrays/integer.py +++ b/pandas/core/arrays/integer.py @@ -403,7 +403,9 @@ def astype(self, dtype, copy: bool = True) -> ArrayLike: dtype = pandas_dtype(dtype) if isinstance(dtype, ExtensionDtype): - return super().astype(dtype, copy=copy) + # pandas/core/arrays/integer.py:406: error: Incompatible return value type + # (got "ExtensionArray", expected "ndarray") [return-value] + return super().astype(dtype, copy=copy) # type: ignore[return-value] # coerce if is_float_dtype(dtype): diff --git a/pandas/core/arrays/masked.py b/pandas/core/arrays/masked.py index fd42e990349d6..f4f2b99ff5290 100644 --- a/pandas/core/arrays/masked.py +++ b/pandas/core/arrays/masked.py @@ -146,7 +146,17 @@ def __len__(self) -> int: def __invert__(self: BaseMaskedArrayT) -> BaseMaskedArrayT: return type(self)(~self._data, self._mask) - def to_numpy( + # pandas/core/arrays/masked.py:149: error: Argument 1 of "to_numpy" is incompatible + # with supertype "ExtensionArray"; supertype defines the argument type as + # "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], Type[int], + # Type[complex], Type[bool], Type[object], None]" [override] + + # pandas/core/arrays/masked.py:149: note: This violates the Liskov substitution + # principle + + # pandas/core/arrays/masked.py:149: note: See + # https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides + def to_numpy( # type: ignore[override] self, dtype: Optional[NpDtype] = None, copy: bool = False, @@ -215,7 +225,10 @@ def to_numpy( if na_value is lib.no_default: na_value = libmissing.NA if dtype is None: - dtype = object + # pandas/core/arrays/masked.py:218: error: Incompatible types in assignment + # (expression has type "Type[object]", variable has type "Union[str, + # dtype[Any], None]") [assignment] + dtype = object # type: ignore[assignment] if self._hasna: if ( not is_object_dtype(dtype) @@ -239,8 +252,12 @@ def astype(self, dtype: Dtype, copy: bool = True) -> ArrayLike: if is_dtype_equal(dtype, self.dtype): if copy: - return self.copy() - return self + # pandas/core/arrays/masked.py:242: error: Incompatible return value + # type (got "BaseMaskedArray", expected "ndarray") [return-value] + return self.copy() # type: ignore[return-value] + # pandas/core/arrays/masked.py:243: error: Incompatible return value type + # (got "BaseMaskedArray", expected "ndarray") [return-value] + return self # type: ignore[return-value] # if we are astyping to another nullable masked dtype, we can fastpath if isinstance(dtype, BaseMaskedDtype): @@ -250,7 +267,9 @@ def astype(self, dtype: Dtype, copy: bool = True) -> ArrayLike: # not directly depending on the `copy` keyword mask = self._mask if data is self._data else self._mask.copy() cls = dtype.construct_array_type() - return cls(data, mask, copy=False) + # pandas/core/arrays/masked.py:253: error: Incompatible return value type + # (got "BaseMaskedArray", expected "ndarray") [return-value] + return cls(data, mask, copy=False) # type: ignore[return-value] if isinstance(dtype, ExtensionDtype): eacls = dtype.construct_array_type() diff --git a/pandas/core/arrays/sparse/array.py b/pandas/core/arrays/sparse/array.py index d3f01b8348ff7..6abe7a40e4775 100644 --- a/pandas/core/arrays/sparse/array.py +++ b/pandas/core/arrays/sparse/array.py @@ -337,7 +337,13 @@ def __init__( # dtype inference if data is None: # TODO: What should the empty dtype be? Object or float? - data = np.array([], dtype=dtype) + + # pandas/core/arrays/sparse/array.py:340: error: Argument "dtype" to "array" + # has incompatible type "Union[ExtensionDtype, dtype[Any], None]"; expected + # "Union[dtype[Any], None, type, _SupportsDType, str, Union[Tuple[Any, int], + # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, + # Any]]]" [arg-type] + data = np.array([], dtype=dtype) # type: ignore[arg-type] if not is_array_like(data): try: @@ -366,7 +372,14 @@ def __init__( if isinstance(data, type(self)) and sparse_index is None: sparse_index = data._sparse_index - sparse_values = np.asarray(data.sp_values, dtype=dtype) + # pandas/core/arrays/sparse/array.py:369: error: Argument "dtype" to + # "asarray" has incompatible type "Union[ExtensionDtype, dtype[Any], + # Type[object], None]"; expected "Union[dtype[Any], None, type, + # _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, Union[int, + # Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]]" [arg-type] + sparse_values = np.asarray( + data.sp_values, dtype=dtype # type: ignore[arg-type] + ) elif sparse_index is None: data = extract_array(data, extract_numpy=True) if not isinstance(data, np.ndarray): @@ -382,10 +395,22 @@ def __init__( data = np.asarray(data, dtype="datetime64[ns]") data = np.asarray(data) sparse_values, sparse_index, fill_value = make_sparse( - data, kind=kind, fill_value=fill_value, dtype=dtype + # pandas/core/arrays/sparse/array.py:385: error: Argument "dtype" to + # "make_sparse" has incompatible type "Union[ExtensionDtype, dtype[Any], + # Type[object], None]"; expected "Union[str, dtype[Any], None]" + # [arg-type] + data, + kind=kind, + fill_value=fill_value, + dtype=dtype, # type: ignore[arg-type] ) else: - sparse_values = np.asarray(data, dtype=dtype) + # pandas/core/arrays/sparse/array.py:388: error: Argument "dtype" to + # "asarray" has incompatible type "Union[ExtensionDtype, dtype[Any], + # Type[object], None]"; expected "Union[dtype[Any], None, type, + # _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, Union[int, + # Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]]" [arg-type] + sparse_values = np.asarray(data, dtype=dtype) # type: ignore[arg-type] if len(sparse_values) != sparse_index.npoints: raise AssertionError( f"Non array-like type {type(sparse_values)} must " @@ -473,7 +498,10 @@ def __array__(self, dtype: Optional[NpDtype] = None) -> np.ndarray: try: dtype = np.result_type(self.sp_values.dtype, type(fill_value)) except TypeError: - dtype = object + # pandas/core/arrays/sparse/array.py:476: error: Incompatible types in + # assignment (expression has type "Type[object]", variable has type + # "Union[str, dtype[Any], None]") [assignment] + dtype = object # type: ignore[assignment] out = np.full(self.shape, fill_value, dtype=dtype) out[self.sp_index.to_int_index().indices] = self.sp_values @@ -1066,7 +1094,36 @@ def astype(self, dtype: Optional[Dtype] = None, copy=True): else: return self.copy() dtype = self.dtype.update_dtype(dtype) - subtype = pandas_dtype(dtype._subtype_with_str) + # pandas/core/arrays/sparse/array.py:1069: error: Item "ExtensionDtype" of + # "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], Type[int], + # Type[complex], Type[bool], Type[object], None]" has no attribute + # "_subtype_with_str" [union-attr] + + # pandas/core/arrays/sparse/array.py:1069: error: Item "str" of + # "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], Type[int], + # Type[complex], Type[bool], Type[object], None]" has no attribute + # "_subtype_with_str" [union-attr] + + # pandas/core/arrays/sparse/array.py:1069: error: Item "dtype[Any]" of + # "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], Type[int], + # Type[complex], Type[bool], Type[object], None]" has no attribute + # "_subtype_with_str" [union-attr] + + # pandas/core/arrays/sparse/array.py:1069: error: Item "ABCMeta" of + # "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], Type[int], + # Type[complex], Type[bool], Type[object], None]" has no attribute + # "_subtype_with_str" [union-attr] + + # pandas/core/arrays/sparse/array.py:1069: error: Item "type" of + # "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], Type[int], + # Type[complex], Type[bool], Type[object], None]" has no attribute + # "_subtype_with_str" [union-attr] + + # pandas/core/arrays/sparse/array.py:1069: error: Item "None" of + # "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], Type[int], + # Type[complex], Type[bool], Type[object], None]" has no attribute + # "_subtype_with_str" [union-attr] + subtype = pandas_dtype(dtype._subtype_with_str) # type: ignore[union-attr] # TODO copy=False is broken for astype_nansafe with int -> float, so cannot # passthrough copy keyword: https://github.com/pandas-dev/pandas/issues/34456 sp_values = astype_nansafe(self.sp_values, subtype, copy=True) @@ -1376,7 +1433,11 @@ def __array_ufunc__(self, ufunc, method, *inputs, **kwargs): return type(self)(result) def __abs__(self): - return np.abs(self) + # pandas/core/arrays/sparse/array.py:1379: error: Argument 1 to "__call__" of + # "ufunc" has incompatible type "SparseArray"; expected "Union[Union[int, float, + # complex, str, bytes, generic], Sequence[Union[int, float, complex, str, bytes, + # generic]], Sequence[Sequence[Any]], _SupportsArray]" [arg-type] + return np.abs(self) # type: ignore[arg-type] # ------------------------------------------------------------------------ # Ops @@ -1525,7 +1586,12 @@ def make_sparse( index = make_sparse_index(length, indices, kind) sparsified_values = arr[mask] if dtype is not None: - sparsified_values = astype_nansafe(sparsified_values, dtype=dtype) + # pandas/core/arrays/sparse/array.py:1528: error: Argument "dtype" to + # "astype_nansafe" has incompatible type "Union[str, dtype[Any]]"; expected + # "Union[dtype[Any], ExtensionDtype]" [arg-type] + sparsified_values = astype_nansafe( + sparsified_values, dtype=dtype # type: ignore[arg-type] + ) # TODO: copy return sparsified_values, index, fill_value diff --git a/pandas/core/arrays/string_arrow.py b/pandas/core/arrays/string_arrow.py index ef170dbfd571e..97e2cdb9705c4 100644 --- a/pandas/core/arrays/string_arrow.py +++ b/pandas/core/arrays/string_arrow.py @@ -230,7 +230,12 @@ def __arrow_array__(self, type=None): """Convert myself to a pyarrow Array or ChunkedArray.""" return self._data - def to_numpy( + # pandas/core/arrays/string_arrow.py:233: error: Argument 1 of "to_numpy" is + # incompatible with supertype "ExtensionArray"; supertype defines the + # argument type as "Union[ExtensionDtype, str, dtype[Any], Type[str], + # Type[float], Type[int], Type[complex], Type[bool], Type[object], None]" + # [override] + def to_numpy( # type: ignore[override] self, dtype: Optional[NpDtype] = None, copy: bool = False, @@ -379,7 +384,15 @@ def fillna(self, value=None, method=None, limit=None): if mask.any(): if method is not None: func = get_fill_func(method) - new_values = func(self.to_numpy(object), limit=limit, mask=mask) + # pandas/core/arrays/string_arrow.py:382: error: Argument 1 to + # "to_numpy" of "ArrowStringArray" has incompatible type + # "Type[object]"; expected "Union[str, dtype[Any], None]" + # [arg-type] + new_values = func( + self.to_numpy(object), # type: ignore[arg-type] + limit=limit, + mask=mask, + ) new_values = self._from_sequence(new_values) else: # fill with value diff --git a/pandas/core/frame.py b/pandas/core/frame.py index acb7f6547a8f3..675e87298b5af 100644 --- a/pandas/core/frame.py +++ b/pandas/core/frame.py @@ -555,7 +555,17 @@ def __init__( # a masked array else: data = sanitize_masked_array(data) - mgr = init_ndarray(data, index, columns, dtype=dtype, copy=copy) + # pandas/core/frame.py:558: error: Argument "dtype" to "init_ndarray" + # has incompatible type "Union[ExtensionDtype, str, dtype[Any], + # Type[object], None]"; expected "Union[dtype[Any], ExtensionDtype, + # None]" [arg-type] + mgr = init_ndarray( + data, + index, + columns, + dtype=dtype, # type: ignore[arg-type] + copy=copy, + ) elif isinstance(data, (np.ndarray, Series, Index)): if data.dtype.names: @@ -605,9 +615,34 @@ def __init__( data = dataclasses_to_dicts(data) if treat_as_nested(data): arrays, columns, index = nested_data_to_arrays( - data, columns, index, dtype + # pandas/core/frame.py:608: error: Argument 2 to + # "nested_data_to_arrays" has incompatible type + # "Optional[Collection[Any]]"; expected "Optional[Index]" + # [arg-type] + # pandas/core/frame.py:608: error: Argument 3 to + # "nested_data_to_arrays" has incompatible type + # "Optional[Collection[Any]]"; expected "Optional[Index]" + # [arg-type] + # pandas/core/frame.py:608: error: Argument 4 to + # "nested_data_to_arrays" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; + # expected "Union[dtype[Any], ExtensionDtype, None]" [arg-type] + data, + columns, # type: ignore[arg-type] + index, # type: ignore[arg-type] + dtype, # type: ignore[arg-type] + ) + # pandas/core/frame.py:610: error: Argument "dtype" to + # "arrays_to_mgr" has incompatible type "Union[ExtensionDtype, str, + # dtype[Any], Type[object], None]"; expected "Union[dtype[Any], + # ExtensionDtype, None]" [arg-type] + mgr = arrays_to_mgr( + arrays, + columns, + index, + columns, + dtype=dtype, # type: ignore[arg-type] ) - mgr = arrays_to_mgr(arrays, columns, index, columns, dtype=dtype) else: # error: Argument "dtype" to "init_ndarray" has # incompatible type "Union[ExtensionDtype, str, dtype, @@ -650,8 +685,19 @@ def __init__( ] mgr = arrays_to_mgr(values, columns, index, columns, dtype=None) else: - values = construct_2d_arraylike_from_scalar( - data, len(index), len(columns), dtype, copy + # pandas/core/frame.py:653: error: Incompatible types in assignment + # (expression has type "ndarray", variable has type + # "List[ExtensionArray]") [assignment] + values = construct_2d_arraylike_from_scalar( # type: ignore[assignment] + # pandas/core/frame.py:654: error: Argument 4 to + # "construct_2d_arraylike_from_scalar" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object]]"; expected + # "dtype[Any]" [arg-type] + data, + len(index), + len(columns), + dtype, # type: ignore[arg-type] + copy, ) mgr = init_ndarray( @@ -1203,17 +1249,19 @@ def __len__(self) -> int: """ return len(self.index) - # pandas/core/frame.py:1146: error: Overloaded function signatures 1 and 2 - # overlap with incompatible return types [misc] @overload - def dot(self, other: Series) -> Series: # type: ignore[misc] + def dot(self, other: Series) -> Series: ... @overload def dot(self, other: Union[DataFrame, Index, ArrayLike]) -> DataFrame: ... - def dot(self, other: Union[AnyArrayLike, FrameOrSeriesUnion]) -> FrameOrSeriesUnion: + # pandas/core/frame.py:1216: error: Overloaded function implementation cannot + # satisfy signature 2 due to inconsistencies in how they use type variables [misc] + def dot( # type: ignore[misc] + self, other: Union[AnyArrayLike, FrameOrSeriesUnion] + ) -> FrameOrSeriesUnion: """ Compute the matrix multiplication between the DataFrame and other. @@ -2056,7 +2104,9 @@ def to_records( # array of tuples to numpy cols. copy copy copy ix_vals = list(map(np.array, zip(*self.index._values))) else: - ix_vals = [self.index.values] + # pandas/core/frame.py:2059: error: List item 0 has incompatible type + # "ArrayLike"; expected "ndarray" [list-item] + ix_vals = [self.index.values] # type: ignore[list-item] arrays = ix_vals + [ np.asarray(self.iloc[:, i]) for i in range(len(self.columns)) @@ -3305,7 +3355,10 @@ def _set_item_frame_value(self, key, value: "DataFrame") -> None: value = value.reindex(cols, axis=1) # now align rows - value = _reindex_for_setitem(value, self.index) + + # pandas/core/frame.py:3308: error: Incompatible types in assignment (expression + # has type "ExtensionArray", variable has type "DataFrame") [assignment] + value = _reindex_for_setitem(value, self.index) # type: ignore[assignment] value = value.T self._set_item_mgr(key, value) @@ -9715,7 +9768,9 @@ def _reindex_for_setitem(value: FrameOrSeriesUnion, index: Index) -> ArrayLike: # reindex if necessary if value.index.equals(index) or not len(index): - return value._values.copy() + # pandas/core/frame.py:9718: error: Incompatible return value type (got + # "Union[ndarray, Any]", expected "ExtensionArray") [return-value] + return value._values.copy() # type: ignore[return-value] # GH#4107 try: diff --git a/pandas/core/indexes/datetimelike.py b/pandas/core/indexes/datetimelike.py index 7d214829b1871..08a5704d008f8 100644 --- a/pandas/core/indexes/datetimelike.py +++ b/pandas/core/indexes/datetimelike.py @@ -131,7 +131,9 @@ def _is_all_dates(self) -> bool: # Abstract data attributes @property - def values(self) -> np.ndarray: + # pandas/core/indexes/datetimelike.py:134: error: Return type "ndarray" of "values" + # incompatible with return type "ArrayLike" in supertype "Index" [override] + def values(self) -> np.ndarray: # type: ignore[override] # Note: PeriodArray overrides this to return an ndarray of objects. return self._data._data diff --git a/pandas/core/indexes/numeric.py b/pandas/core/indexes/numeric.py index 21579b3f83847..63a38255d15ae 100644 --- a/pandas/core/indexes/numeric.py +++ b/pandas/core/indexes/numeric.py @@ -354,7 +354,11 @@ def astype(self, dtype, copy=True): elif is_integer_dtype(dtype) and not is_extension_array_dtype(dtype): # TODO(jreback); this can change once we have an EA Index type # GH 13149 - arr = astype_nansafe(self._values, dtype=dtype) + + # pandas/core/indexes/numeric.py:357: error: Argument 1 to "astype_nansafe" + # has incompatible type "Union[ExtensionArray, ndarray]"; expected "ndarray" + # [arg-type] + arr = astype_nansafe(self._values, dtype=dtype) # type: ignore[arg-type] return Int64Index(arr, name=self.name) return super().astype(dtype, copy=copy) diff --git a/pandas/core/indexes/period.py b/pandas/core/indexes/period.py index 7762198246603..1ba8699ab312a 100644 --- a/pandas/core/indexes/period.py +++ b/pandas/core/indexes/period.py @@ -248,7 +248,9 @@ def __new__( # Data @property - def values(self) -> np.ndarray: + # pandas/core/indexes/period.py:251: error: Return type "ndarray" of "values" + # incompatible with return type "ArrayLike" in supertype "Index" [override] + def values(self) -> np.ndarray: # type: ignore[override] return np.asarray(self, dtype=object) def _maybe_convert_timedelta(self, other): diff --git a/pandas/core/indexes/range.py b/pandas/core/indexes/range.py index 40ef6b34f94c3..a20a194f51696 100644 --- a/pandas/core/indexes/range.py +++ b/pandas/core/indexes/range.py @@ -92,7 +92,12 @@ def __new__( name=None, ): - cls._validate_dtype(dtype) + # pandas/core/indexes/range.py:95: error: Argument 1 to "_validate_dtype" of + # "NumericIndex" has incompatible type "Union[ExtensionDtype, str, dtype[Any], + # Type[str], Type[float], Type[int], Type[complex], Type[bool], Type[object], + # None]"; expected "Union[ExtensionDtype, Union[str, dtype[Any]], Type[str], + # Type[float], Type[int], Type[complex], Type[bool], Type[object]]" [arg-type] + cls._validate_dtype(dtype) # type: ignore[arg-type] name = maybe_extract_name(name, start, cls) # RangeIndex @@ -135,7 +140,12 @@ def from_range( f"range, {repr(data)} was passed" ) - cls._validate_dtype(dtype) + # pandas/core/indexes/range.py:138: error: Argument 1 to "_validate_dtype" of + # "NumericIndex" has incompatible type "Union[ExtensionDtype, str, dtype[Any], + # Type[str], Type[float], Type[int], Type[complex], Type[bool], Type[object], + # None]"; expected "Union[ExtensionDtype, Union[str, dtype[Any]], Type[str], + # Type[float], Type[int], Type[complex], Type[bool], Type[object]]" [arg-type] + cls._validate_dtype(dtype) # type: ignore[arg-type] return cls._simple_new(data, name=name) @classmethod diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index e73aab9d19318..5f73b1efe7c54 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -632,7 +632,10 @@ def astype(self, dtype, copy: bool = False, errors: str = "raise"): # e.g. astype_nansafe can fail on object-dtype of strings # trying to convert to float if errors == "ignore": - new_values = self.values + # pandas/core/internals/blocks.py:635: error: Incompatible types in + # assignment (expression has type "Union[ndarray, ExtensionArray]", + # variable has type "ExtensionArray") [assignment] + new_values = self.values # type: ignore[assignment] else: raise @@ -650,12 +653,33 @@ def _astype(self, dtype: DtypeObj, copy: bool) -> ArrayLike: values = self.values if is_datetime64tz_dtype(dtype) and is_datetime64_dtype(values.dtype): - return astype_dt64_to_dt64tz(values, dtype, copy, via_utc=True) + # pandas/core/internals/blocks.py:653: error: Value of type variable + # "ArrayLike" of "astype_dt64_to_dt64tz" cannot be "Union[ndarray, + # ExtensionArray]" [type-var] + + # pandas/core/internals/blocks.py:653: error: Incompatible return value type + # (got "DatetimeArray", expected "ndarray") [return-value] + return astype_dt64_to_dt64tz( # type: ignore[type-var,return-value] + values, dtype, copy, via_utc=True + ) if is_dtype_equal(values.dtype, dtype): if copy: - return values.copy() - return values + # pandas/core/internals/blocks.py:657: error: Incompatible return value + # type (got "Union[ndarray, ExtensionArray]", expected "ExtensionArray") + # [return-value] + + # pandas/core/internals/blocks.py:657: error: Incompatible return value + # type (got "Union[ndarray, ExtensionArray]", expected "ndarray") + # [return-value] + return values.copy() # type: ignore[return-value] + # pandas/core/internals/blocks.py:658: error: Incompatible return value type + # (got "Union[ndarray, ExtensionArray]", expected "ExtensionArray") + # [return-value] + + # pandas/core/internals/blocks.py:658: error: Incompatible return value type + # (got "Union[ndarray, ExtensionArray]", expected "ndarray") [return-value] + return values # type: ignore[return-value] if isinstance(values, ExtensionArray): values = values.astype(dtype, copy=copy) @@ -663,7 +687,13 @@ def _astype(self, dtype: DtypeObj, copy: bool) -> ArrayLike: else: values = astype_nansafe(values, dtype, copy=copy) - return values + # pandas/core/internals/blocks.py:666: error: Incompatible return value type + # (got "Union[ndarray, ExtensionArray]", expected "ExtensionArray") + # [return-value] + + # pandas/core/internals/blocks.py:666: error: Incompatible return value type + # (got "Union[ndarray, ExtensionArray]", expected "ndarray") [return-value] + return values # type: ignore[return-value] def convert( self, @@ -849,7 +879,15 @@ def comp(s: Scalar, mask: np.ndarray, regex: bool = False) -> np.ndarray: if isna(s): return ~mask - return compare_or_regex_search(self.values, s, regex, mask) + # pandas/core/internals/blocks.py:852: error: Incompatible return value type + # (got "Union[ndarray, bool]", expected "ndarray") [return-value] + + # pandas/core/internals/blocks.py:852: error: Argument 1 to + # "compare_or_regex_search" has incompatible type "Union[ndarray, + # ExtensionArray]"; expected "ndarray" [arg-type] + return compare_or_regex_search( # type: ignore[return-value] + self.values, s, regex, mask # type: ignore[arg-type] + ) if self.is_object: # Calculate the mask once, prior to the call of comp @@ -1049,7 +1087,10 @@ def putmask(self, mask, new, axis: int = 0) -> List["Block"]: if transpose: new_values = new_values.T - putmask_without_repeat(new_values, mask, new) + # pandas/core/internals/blocks.py:1052: error: Argument 1 to + # "putmask_without_repeat" has incompatible type "Union[ndarray, + # ExtensionArray]"; expected "ndarray" [arg-type] + putmask_without_repeat(new_values, mask, new) # type: ignore[arg-type] # maybe upcast me elif mask.any(): @@ -1284,19 +1325,14 @@ def shift(self, periods: int, axis: int = 0, fill_value=None): # convert integer to float if necessary. need to do a lot more than # that, handle boolean etc also - # pandas\core\internals\blocks.py:1376: error: Value of type variable - # "ArrayLike" of "maybe_upcast" cannot be "Union[ndarray, - # ExtensionArray]" [type-var] + # pandas/core/internals/blocks.py:1286: error: Argument 1 to "maybe_upcast" has + # incompatible type "Union[ndarray, ExtensionArray]"; expected "ndarray" + # [arg-type] new_values, fill_value = maybe_upcast( - self.values, fill_value # type: ignore[type-var] + self.values, fill_value # type: ignore[arg-type] ) - # pandas\core\internals\blocks.py:1378: error: Argument 1 to "shift" - # has incompatible type "Union[ndarray, ExtensionArray]"; expected - # "ndarray" [arg-type] - new_values = shift( - new_values, periods, axis, fill_value # type: ignore[arg-type] - ) + new_values = shift(new_values, periods, axis, fill_value) return [self.make_block(new_values)] @@ -2612,7 +2648,11 @@ def safe_reshape(arr: ArrayLike, new_shape: Shape) -> ArrayLike: if not is_extension_array_dtype(arr.dtype): # Note: this will include TimedeltaArray and tz-naive DatetimeArray # TODO(EA2D): special case will be unnecessary with 2D EAs - arr = np.asarray(arr).reshape(new_shape) + + # pandas/core/internals/blocks.py:2615: error: Incompatible types in assignment + # (expression has type "ndarray", variable has type "ExtensionArray") + # [assignment] + arr = np.asarray(arr).reshape(new_shape) # type: ignore[assignment] return arr diff --git a/pandas/core/internals/concat.py b/pandas/core/internals/concat.py index b0c9bd71bb541..ef7ec005c936f 100644 --- a/pandas/core/internals/concat.py +++ b/pandas/core/internals/concat.py @@ -128,7 +128,9 @@ def _get_mgr_concatenation_plan(mgr: BlockManager, indexers: Dict[int, np.ndarra blk = mgr.blocks[0] return [(blk.mgr_locs, JoinUnit(blk, mgr_shape, indexers))] - ax0_indexer = None + # pandas/core/internals/concat.py:131: error: Incompatible types in assignment + # (expression has type "None", variable has type "ndarray") [assignment] + ax0_indexer = None # type: ignore[assignment] blknos = mgr.blknos blklocs = mgr.blklocs @@ -258,7 +260,11 @@ def get_reindexed_values(self, empty_dtype: DtypeObj, upcasted_na) -> ArrayLike: ): if self.block is None: # TODO(EA2D): special case unneeded with 2D EAs - return DatetimeArray( + + # pandas/core/internals/concat.py:261: error: Incompatible + # return value type (got "DatetimeArray", expected "ndarray") + # [return-value] + return DatetimeArray( # type: ignore[return-value] np.full(self.shape[1], fill_value.value), dtype=empty_dtype ) elif getattr(self.block, "is_categorical", False): @@ -356,7 +362,14 @@ def _concatenate_join_units( elif any(isinstance(t, ExtensionArray) for t in to_concat): # concatting with at least one EA means we are concatting a single column # the non-EA values are 2D arrays with shape (1, n) - to_concat = [t if isinstance(t, ExtensionArray) else t[0, :] for t in to_concat] + + # pandas/core/internals/concat.py:359: error: Invalid index type "Tuple[int, + # slice]" for "ExtensionArray"; expected type "Union[int, slice, ndarray]" + # [index] + to_concat = [ + t if isinstance(t, ExtensionArray) else t[0, :] # type: ignore[index] + for t in to_concat + ] concat_values = concat_compat(to_concat, axis=0) if not isinstance(concat_values, ExtensionArray) or ( isinstance(concat_values, DatetimeArray) and concat_values.tz is None @@ -365,11 +378,17 @@ def _concatenate_join_units( # 2D to put it a non-EA Block # special case DatetimeArray, which *is* an EA, but is put in a # consolidated 2D block - concat_values = np.atleast_2d(concat_values) + + # pandas/core/internals/concat.py:368: error: Incompatible types in + # assignment (expression has type "ndarray", variable has type + # "ExtensionArray") [assignment] + concat_values = np.atleast_2d(concat_values) # type: ignore[assignment] else: concat_values = concat_compat(to_concat, axis=concat_axis) - return concat_values + # pandas/core/internals/concat.py:372: error: Incompatible return value type (got + # "ExtensionArray", expected "ndarray") [return-value] + return concat_values # type: ignore[return-value] def _get_empty_dtype_and_na(join_units: Sequence[JoinUnit]) -> Tuple[DtypeObj, Any]: diff --git a/pandas/core/internals/construction.py b/pandas/core/internals/construction.py index 27b75c284fa07..53ae71e59f34a 100644 --- a/pandas/core/internals/construction.py +++ b/pandas/core/internals/construction.py @@ -298,7 +298,9 @@ def nested_data_to_arrays( columns = data[0]._fields arrays, columns = to_arrays(data, columns, dtype=dtype) - columns = ensure_index(columns) + # pandas/core/internals/construction.py:301: error: Value of type variable + # "AnyArrayLike" of "ensure_index" cannot be "Optional[Index]" [type-var] + columns = ensure_index(columns) # type: ignore[type-var] if index is None: if isinstance(data[0], ABCSeries): @@ -581,7 +583,22 @@ def to_arrays(data, columns, dtype: Optional[DtypeObj] = None): data = [tuple(x) for x in data] content, columns = _list_to_arrays(data, columns) - content, columns = _finalize_columns_and_data(content, columns, dtype) + # pandas/core/internals/construction.py:584: error: Incompatible types in assignment + # (expression has type "List[ndarray]", variable has type "List[Union[Union[str, + # int, float, bool], Union[Any, Any, Any, Any]]]") [assignment] + + # pandas/core/internals/construction.py:584: note: "List" is invariant -- see + # http://mypy.readthedocs.io/en/latest/common_issues.html#variance + + # pandas/core/internals/construction.py:584: note: Consider using "Sequence" + # instead, which is covariant + + # pandas/core/internals/construction.py:584: error: Argument 1 to + # "_finalize_columns_and_data" has incompatible type "List[Union[Union[str, int, + # float, bool], Union[Any, Any, Any, Any]]]"; expected "ndarray" [arg-type] + content, columns = _finalize_columns_and_data( # type: ignore[assignment] + content, columns, dtype # type: ignore[arg-type] + ) return content, columns @@ -625,7 +642,11 @@ def _list_of_series_to_arrays( content = np.vstack(aligned_values) - return content, columns + # pandas/core/internals/construction.py:628: error: Incompatible return value type + # (got "Tuple[ndarray, Union[Index, List[Any]]]", expected + # "Tuple[List[Union[Union[str, int, float, bool], Union[Any, Any, Any, Any]]], + # Union[Index, List[Union[str, int]]]]") [return-value] + return content, columns # type: ignore[return-value] def _list_of_dict_to_arrays( @@ -672,17 +693,37 @@ def _finalize_columns_and_data( """ Ensure we have valid columns, cast object dtypes if possible. """ - content = list(content.T) + # pandas/core/internals/construction.py:675: error: Incompatible types in assignment + # (expression has type "List[Any]", variable has type "ndarray") [assignment] + content = list(content.T) # type: ignore[assignment] try: - columns = _validate_or_indexify_columns(content, columns) + # pandas/core/internals/construction.py:678: error: Argument 1 to + # "_validate_or_indexify_columns" has incompatible type "ndarray"; expected + # "List[Any]" [arg-type] + columns = _validate_or_indexify_columns( + content, columns # type: ignore[arg-type] + ) except AssertionError as err: # GH#26429 do not raise user-facing AssertionError raise ValueError(err) from err if len(content) and content[0].dtype == np.object_: - content = _convert_object_array(content, dtype=dtype) - return content, columns + # pandas/core/internals/construction.py:684: error: Incompatible types in + # assignment (expression has type "List[Union[Union[str, int, float, bool], + # Union[Any, Any, Any, Any]]]", variable has type "ndarray") [assignment] + + # pandas/core/internals/construction.py:684: error: Argument 1 to + # "_convert_object_array" has incompatible type "ndarray"; expected + # "List[Union[Union[str, int, float, bool], Union[Any, Any, Any, Any]]]" + # [arg-type] + content = _convert_object_array( # type: ignore[assignment] + content, dtype=dtype # type: ignore[arg-type] + ) + # pandas/core/internals/construction.py:685: error: Incompatible return value type + # (got "Tuple[ndarray, Union[Index, List[Union[str, int]]]]", expected + # "Tuple[List[ndarray], Union[Index, List[Union[str, int]]]]") [return-value] + return content, columns # type: ignore[return-value] def _validate_or_indexify_columns( diff --git a/pandas/core/window/ewm.py b/pandas/core/window/ewm.py index d9722497bd023..b74a74b7d443f 100644 --- a/pandas/core/window/ewm.py +++ b/pandas/core/window/ewm.py @@ -258,7 +258,12 @@ def __init__( ) if isna(times).any(): raise ValueError("Cannot convert NaT values to integer") - self.times = np.asarray(times.view(np.int64)) + # pandas/core/window/ewm.py:261: error: Item "str" of "Union[str, ndarray, + # FrameOrSeries, None]" has no attribute "view" [union-attr] + + # pandas/core/window/ewm.py:261: error: Item "None" of "Union[str, ndarray, + # FrameOrSeries, None]" has no attribute "view" [union-attr] + self.times = np.asarray(times.view(np.int64)) # type: ignore[union-attr] self.halflife = Timedelta(halflife).value # Halflife is no longer applicable when calculating COM # But allow COM to still be calculated if the user passes other decay args diff --git a/pandas/core/window/rolling.py b/pandas/core/window/rolling.py index d68918d15924f..aa91f81e27291 100644 --- a/pandas/core/window/rolling.py +++ b/pandas/core/window/rolling.py @@ -297,9 +297,13 @@ def _prep_values(self, values: Optional[np.ndarray] = None) -> np.ndarray: self._selected_obj, extract_numpy=True ) - if needs_i8_conversion(values.dtype): + # pandas/core/window/rolling.py:300: error: Item "None" of "Optional[ndarray]" + # has no attribute "dtype" [union-attr] + if needs_i8_conversion(values.dtype): # type: ignore[union-attr] raise NotImplementedError( - f"ops for {type(self).__name__} for this " + # pandas/core/window/rolling.py:302: error: Item "None" of + # "Optional[ndarray]" has no attribute "dtype" [union-attr] + f"ops for {type(self).__name__} for this " # type: ignore[union-attr] f"dtype {values.dtype} are not implemented" ) else: diff --git a/pandas/io/json/_json.py b/pandas/io/json/_json.py index 0791599dad201..9999ee79f2e61 100644 --- a/pandas/io/json/_json.py +++ b/pandas/io/json/_json.py @@ -535,7 +535,13 @@ def read_json( raise ValueError("cannot pass both convert_axes and orient='table'") if dtype is None and orient != "table": - dtype = True + # pandas/io/json/_json.py:538: error: Incompatible types in assignment + # (expression has type "bool", variable has type "Union[ExtensionDtype, str, + # dtype[Any], Type[str], Type[float], Type[int], Type[complex], Type[bool], + # Type[object], Dict[Optional[Hashable], Union[ExtensionDtype, Union[str, + # dtype[Any]], Type[str], Type[float], Type[int], Type[complex], Type[bool], + # Type[object]]], None]") [assignment] + dtype = True # type: ignore[assignment] if convert_axes is None and orient != "table": convert_axes = True @@ -875,7 +881,13 @@ def _try_convert_data(self, name, data, use_dtypes=True, convert_dates=True): return data, False return data.fillna(np.nan), True - elif self.dtype is True: + # pandas/io/json/_json.py:878: error: Non-overlapping identity check (left + # operand type: "Union[ExtensionDtype, str, dtype[Any], Type[object], + # Dict[Optional[Hashable], Union[ExtensionDtype, Union[str, dtype[Any]], + # Type[str], Type[float], Type[int], Type[complex], Type[bool], + # Type[object]]]]", right operand type: "Literal[True]") + # [comparison-overlap] + elif self.dtype is True: # type: ignore[comparison-overlap] pass else: # dtype to force @@ -884,7 +896,10 @@ def _try_convert_data(self, name, data, use_dtypes=True, convert_dates=True): ) if dtype is not None: try: - dtype = np.dtype(dtype) + # pandas/io/json/_json.py:887: error: Argument 1 to "dtype" has + # incompatible type "Union[ExtensionDtype, str, dtype[Any], + # Type[object]]"; expected "Type[Any]" [arg-type] + dtype = np.dtype(dtype) # type: ignore[arg-type] return data.astype(dtype), True except (TypeError, ValueError): return data, False diff --git a/pandas/io/parsers.py b/pandas/io/parsers.py index 102d00f58607a..46956e3a0cdcd 100644 --- a/pandas/io/parsers.py +++ b/pandas/io/parsers.py @@ -3524,7 +3524,21 @@ def _get_empty_meta(columns, index_col, index_names, dtype: Optional[DtypeArg] = if not is_dict_like(dtype): # if dtype == None, default will be object. default_dtype = dtype or object - dtype = defaultdict(lambda: default_dtype) + # pandas/io/parsers.py:3527: error: Argument 1 to "defaultdict" has incompatible + # type "Callable[[], Union[ExtensionDtype, str, dtype[Any], Type[object], + # Dict[Optional[Hashable], Union[ExtensionDtype, Union[str, dtype[Any]], + # Type[str], Type[float], Type[int], Type[complex], Type[bool], + # Type[object]]]]]"; expected "Optional[Callable[[], Union[ExtensionDtype, str, + # dtype[Any], Type[object]]]]" [arg-type] + + # pandas/io/parsers.py:3527: error: Incompatible return value type (got + # "Union[ExtensionDtype, str, dtype[Any], Type[object], Dict[Optional[Hashable], + # Union[ExtensionDtype, Union[str, dtype[Any]], Type[str], Type[float], + # Type[int], Type[complex], Type[bool], Type[object]]]]", expected + # "Union[ExtensionDtype, str, dtype[Any], Type[object]]") [return-value] + dtype = defaultdict( + lambda: default_dtype # type: ignore[arg-type,return-value] + ) else: dtype = cast(dict, dtype) dtype = defaultdict( diff --git a/pandas/io/pytables.py b/pandas/io/pytables.py index 838f6d58f9768..a86eb4121c000 100644 --- a/pandas/io/pytables.py +++ b/pandas/io/pytables.py @@ -4959,7 +4959,12 @@ def _maybe_convert_for_string_atom( ) # itemsize is the maximum length of a string (along any dimension) - data_converted = _convert_string_array(data, encoding, errors).reshape(data.shape) + + # pandas/io/pytables.py:4962: error: Argument 1 to "_convert_string_array" has + # incompatible type "Union[ndarray, ExtensionArray]"; expected "ndarray" [arg-type] + data_converted = _convert_string_array( + data, encoding, errors # type: ignore[arg-type] + ).reshape(data.shape) assert data_converted.shape == block.shape, (data_converted.shape, block.shape) itemsize = data_converted.itemsize diff --git a/pandas/io/sql.py b/pandas/io/sql.py index 75b7493234ed9..7bf13ecfd8b84 100644 --- a/pandas/io/sql.py +++ b/pandas/io/sql.py @@ -1499,7 +1499,13 @@ def to_sql( """ if dtype: if not is_dict_like(dtype): - dtype = {col_name: dtype for col_name in frame} + # pandas/io/sql.py:1502: error: Value expression in dictionary + # comprehension has incompatible type "Union[ExtensionDtype, str, + # dtype[Any], Type[object], Dict[Optional[Hashable], + # Union[ExtensionDtype, Union[str, dtype[Any]], Type[str], Type[float], + # Type[int], Type[complex], Type[bool], Type[object]]]]"; expected type + # "Union[ExtensionDtype, str, dtype[Any], Type[object]]" [misc] + dtype = {col_name: dtype for col_name in frame} # type: ignore[misc] else: dtype = cast(dict, dtype) @@ -1967,7 +1973,13 @@ def to_sql( """ if dtype: if not is_dict_like(dtype): - dtype = {col_name: dtype for col_name in frame} + # pandas/io/sql.py:1970: error: Value expression in dictionary + # comprehension has incompatible type "Union[ExtensionDtype, str, + # dtype[Any], Type[object], Dict[Optional[Hashable], + # Union[ExtensionDtype, Union[str, dtype[Any]], Type[str], Type[float], + # Type[int], Type[complex], Type[bool], Type[object]]]]"; expected type + # "Union[ExtensionDtype, str, dtype[Any], Type[object]]" [misc] + dtype = {col_name: dtype for col_name in frame} # type: ignore[misc] else: dtype = cast(dict, dtype) From 86a5dcd1605d5cca3126a070b9e038d4a9c9825a Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Thu, 7 Jan 2021 12:22:51 +0000 Subject: [PATCH 43/86] update comments --- pandas/core/algorithms.py | 7 +- pandas/core/arrays/base.py | 8 ++- pandas/core/arrays/numpy_.py | 19 ++++- pandas/core/arrays/string_.py | 8 ++- pandas/core/base.py | 20 ++++-- pandas/core/construction.py | 7 +- pandas/core/dtypes/cast.py | 126 ++++++++++++++++++++++++++-------- pandas/core/dtypes/concat.py | 7 +- pandas/core/dtypes/missing.py | 7 +- pandas/core/indexes/base.py | 52 +++++++++++--- pandas/core/missing.py | 6 +- pandas/core/tools/numeric.py | 4 +- 12 files changed, 221 insertions(+), 50 deletions(-) diff --git a/pandas/core/algorithms.py b/pandas/core/algorithms.py index e930bd4f64873..6ab7ab10a99bf 100644 --- a/pandas/core/algorithms.py +++ b/pandas/core/algorithms.py @@ -2205,7 +2205,12 @@ def safe_sort( if not isinstance(values, (np.ndarray, ABCExtensionArray)): # don't convert to string types dtype, _ = infer_dtype_from_array(values) - values = np.asarray(values, dtype=dtype) + # pandas/core/algorithms.py:2208: error: Argument "dtype" to "asarray" has + # incompatible type "Union[dtype[Any], ExtensionDtype]"; expected + # "Union[dtype[Any], None, type, _SupportsDType, str, Union[Tuple[Any, int], + # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, + # Any]]]" [arg-type] + values = np.asarray(values, dtype=dtype) # type: ignore[arg-type] sorter = None diff --git a/pandas/core/arrays/base.py b/pandas/core/arrays/base.py index 1730fe9ee4ef2..c82dd43202090 100644 --- a/pandas/core/arrays/base.py +++ b/pandas/core/arrays/base.py @@ -428,7 +428,13 @@ def to_numpy( ------- numpy.ndarray """ - result = np.asarray(self, dtype=dtype) + # pandas/core/arrays/base.py:431: error: Argument "dtype" to "asarray" has + # incompatible type "Union[ExtensionDtype, str, dtype[Any], Type[str], + # Type[float], Type[int], Type[complex], Type[bool], Type[object], None]"; + # expected "Union[dtype[Any], None, type, _SupportsDType, str, Union[Tuple[Any, + # int], Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, + # Any]]]" [arg-type] + result = np.asarray(self, dtype=dtype) # type: ignore[arg-type] if copy or na_value is not lib.no_default: result = result.copy() if na_value is not lib.no_default: diff --git a/pandas/core/arrays/numpy_.py b/pandas/core/arrays/numpy_.py index 89b327269f311..bc75c70bcd01f 100644 --- a/pandas/core/arrays/numpy_.py +++ b/pandas/core/arrays/numpy_.py @@ -178,7 +178,12 @@ def _from_sequence( if isinstance(dtype, PandasDtype): dtype = dtype._dtype - result = np.asarray(scalars, dtype=dtype) + # pandas/core/arrays/numpy_.py:181: error: Argument "dtype" to "asarray" has + # incompatible type "Union[ExtensionDtype, str, dtype[Any], + # dtype[floating[_64Bit]], Type[object], None]"; expected "Union[dtype[Any], + # None, type, _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, Union[int, + # Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]]" [arg-type] + result = np.asarray(scalars, dtype=dtype) # type: ignore[arg-type] if copy and result is scalars: result = result.copy() return cls(result) @@ -416,7 +421,17 @@ def skew( # ------------------------------------------------------------------------ # Additional Methods - def to_numpy( + # pandas/core/arrays/numpy_.py:419: error: Argument 1 of "to_numpy" is incompatible + # with supertype "ExtensionArray"; supertype defines the argument type as + # "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], Type[int], + # Type[complex], Type[bool], Type[object], None]" [override] + + # pandas/core/arrays/numpy_.py:419: note: This violates the Liskov substitution + # principle + + # pandas/core/arrays/numpy_.py:419: note: See + # https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides + def to_numpy( # type: ignore[override] self, dtype: Optional[NpDtype] = None, copy: bool = False, diff --git a/pandas/core/arrays/string_.py b/pandas/core/arrays/string_.py index 5396b9acc2d04..984eab4bcee23 100644 --- a/pandas/core/arrays/string_.py +++ b/pandas/core/arrays/string_.py @@ -414,7 +414,13 @@ def _str_map(self, f, na_value=None, dtype: Optional[Dtype] = None): mask.view("uint8"), convert=False, na_value=na_value, - dtype=np.dtype(dtype), + # pandas/core/arrays/string_.py:417: error: Value of type variable + # "_DTypeScalar" of "dtype" cannot be "object" [type-var] + + # pandas/core/arrays/string_.py:417: error: Argument 1 to "dtype" has + # incompatible type "Union[ExtensionDtype, str, dtype[Any], + # Type[object]]"; expected "Type[object]" [arg-type] + dtype=np.dtype(dtype), # type: ignore[type-var,arg-type] ) if not na_value_is_na: diff --git a/pandas/core/base.py b/pandas/core/base.py index 2162fc8fe115d..d4c9681946982 100644 --- a/pandas/core/base.py +++ b/pandas/core/base.py @@ -720,12 +720,18 @@ def argmax(self, axis=None, skipna: bool = True, *args, **kwargs) -> int: skipna = nv.validate_argmax_with_skipna(skipna, args, kwargs) if isinstance(delegate, ExtensionArray): - if not skipna and delegate.isna().any(): + # pandas/core/base.py:723: error: "ExtensionArray" has no attribute "any" + # [attr-defined] + if not skipna and delegate.isna().any(): # type: ignore[attr-defined] return -1 else: return delegate.argmax() else: - return nanops.nanargmax(delegate, skipna=skipna) + # pandas/core/base.py:728: error: Incompatible return value type (got + # "Union[int, ndarray]", expected "int") [return-value] + return nanops.nanargmax( # type: ignore[return-value] + delegate, skipna=skipna + ) def min(self, axis=None, skipna: bool = True, *args, **kwargs): """ @@ -778,12 +784,18 @@ def argmin(self, axis=None, skipna=True, *args, **kwargs) -> int: skipna = nv.validate_argmin_with_skipna(skipna, args, kwargs) if isinstance(delegate, ExtensionArray): - if not skipna and delegate.isna().any(): + # pandas/core/base.py:781: error: "ExtensionArray" has no attribute "any" + # [attr-defined] + if not skipna and delegate.isna().any(): # type: ignore[attr-defined] return -1 else: return delegate.argmin() else: - return nanops.nanargmin(delegate, skipna=skipna) + # pandas/core/base.py:786: error: Incompatible return value type (got + # "Union[int, ndarray]", expected "int") [return-value] + return nanops.nanargmin( # type: ignore[return-value] + delegate, skipna=skipna + ) def tolist(self): """ diff --git a/pandas/core/construction.py b/pandas/core/construction.py index aad8b158eec26..09c9a4f750f2f 100644 --- a/pandas/core/construction.py +++ b/pandas/core/construction.py @@ -569,7 +569,12 @@ def _sanitize_str_dtypes( # GH#19853: If data is a scalar, result has already the result if not lib.is_scalar(data): if not np.all(isna(data)): - data = np.array(data, dtype=dtype, copy=False) + # pandas/core/construction.py:572: error: Argument "dtype" to "array" + # has incompatible type "Union[dtype[Any], ExtensionDtype, None]"; + # expected "Union[dtype[Any], None, type, _SupportsDType, str, + # Union[Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], + # List[Any], _DTypeDict, Tuple[Any, Any]]]" [arg-type] + data = np.array(data, dtype=dtype, copy=False) # type: ignore[arg-type] result = np.array(data, dtype=object, copy=copy) return result diff --git a/pandas/core/dtypes/cast.py b/pandas/core/dtypes/cast.py index 2c7ad68fc5367..c82717bec592b 100644 --- a/pandas/core/dtypes/cast.py +++ b/pandas/core/dtypes/cast.py @@ -968,12 +968,19 @@ def astype_dt64_to_dt64tz( from pandas.core.construction import ensure_wrapped_if_datetimelike values = ensure_wrapped_if_datetimelike(values) - values = cast("DatetimeArray", values) + # pandas/core/dtypes/cast.py:971: error: Incompatible types in assignment + # (expression has type "DatetimeArray", variable has type "ndarray") [assignment] + values = cast("DatetimeArray", values) # type: ignore[assignment] aware = isinstance(dtype, DatetimeTZDtype) if via_utc: # Series.astype behavior - assert values.tz is None and aware # caller is responsible for checking this + + # caller is responsible for checking this + + # pandas/core/dtypes/cast.py:976: error: "ndarray" has no attribute "tz" + # [attr-defined] + assert values.tz is None and aware # type: ignore[attr-defined] dtype = cast(DatetimeTZDtype, dtype) if copy: @@ -981,25 +988,42 @@ def astype_dt64_to_dt64tz( values = values.copy() # FIXME: GH#33401 this doesn't match DatetimeArray.astype, which # goes through the `not via_utc` path - return values.tz_localize("UTC").tz_convert(dtype.tz) + + # pandas/core/dtypes/cast.py:984: error: "ndarray" has no attribute + # "tz_localize" [attr-defined] + return values.tz_localize("UTC").tz_convert( # type: ignore[attr-defined] + dtype.tz + ) else: # DatetimeArray/DatetimeIndex.astype behavior - if values.tz is None and aware: + # pandas/core/dtypes/cast.py:989: error: "ndarray" has no attribute "tz" + # [attr-defined] + if values.tz is None and aware: # type: ignore[attr-defined] dtype = cast(DatetimeTZDtype, dtype) - return values.tz_localize(dtype.tz) + # pandas/core/dtypes/cast.py:991: error: "ndarray" has no attribute + # "tz_localize" [attr-defined] + return values.tz_localize(dtype.tz) # type: ignore[attr-defined] elif aware: # GH#18951: datetime64_tz dtype but not equal means different tz dtype = cast(DatetimeTZDtype, dtype) - result = values.tz_convert(dtype.tz) + # pandas/core/dtypes/cast.py:996: error: "ndarray" has no attribute + # "tz_convert" [attr-defined] + result = values.tz_convert(dtype.tz) # type: ignore[attr-defined] if copy: result = result.copy() return result - elif values.tz is not None: - result = values.tz_convert("UTC").tz_localize(None) + # pandas/core/dtypes/cast.py:1001: error: "ndarray" has no attribute "tz" + # [attr-defined] + elif values.tz is not None: # type: ignore[attr-defined] + # pandas/core/dtypes/cast.py:1002: error: "ndarray" has no attribute + # "tz_convert" [attr-defined] + result = values.tz_convert("UTC").tz_localize( # type: ignore[attr-defined] + None + ) if copy: result = result.copy() return result @@ -1067,7 +1091,9 @@ def astype_nansafe( flat = arr.ravel("K") result = astype_nansafe(flat, dtype, copy=copy, skipna=skipna) order = "F" if flags.f_contiguous else "C" - return result.reshape(arr.shape, order=order) + # pandas/core/dtypes/cast.py:1070: error: "ExtensionArray" has no attribute + # "reshape"; maybe "shape"? [attr-defined] + return result.reshape(arr.shape, order=order) # type: ignore[attr-defined] # We get here with 0-dim from sparse arr = np.atleast_1d(arr) @@ -1085,7 +1111,9 @@ def astype_nansafe( from pandas.core.construction import ensure_wrapped_if_datetimelike arr = ensure_wrapped_if_datetimelike(arr) - return arr.astype(dtype, copy=copy) + # pandas/core/dtypes/cast.py:1088: error: Incompatible return value type (got + # "ndarray", expected "ExtensionArray") [return-value] + return arr.astype(dtype, copy=copy) # type: ignore[return-value] if issubclass(dtype.type, str): return lib.ensure_string_array(arr, skipna=skipna, convert_na_value=False) @@ -1102,11 +1130,15 @@ def astype_nansafe( ) if isna(arr).any(): raise ValueError("Cannot convert NaT values to integer") - return arr.view(dtype) + # pandas/core/dtypes/cast.py:1105: error: Incompatible return value type + # (got "ndarray", expected "ExtensionArray") [return-value] + return arr.view(dtype) # type: ignore[return-value] # allow frequency conversions if dtype.kind == "M": - return arr.astype(dtype) + # pandas/core/dtypes/cast.py:1109: error: Incompatible return value type + # (got "ndarray", expected "ExtensionArray") [return-value] + return arr.astype(dtype) # type: ignore[return-value] raise TypeError(f"cannot astype a datetimelike from [{arr.dtype}] to [{dtype}]") @@ -1122,20 +1154,20 @@ def astype_nansafe( ) if isna(arr).any(): raise ValueError("Cannot convert NaT values to integer") - return arr.view(dtype) + # pandas/core/dtypes/cast.py:1125: error: Incompatible return value type + # (got "ndarray", expected "ExtensionArray") [return-value] + return arr.view(dtype) # type: ignore[return-value] elif dtype.kind == "m": - return astype_td64_unit_conversion(arr, dtype, copy=copy) + # pandas/core/dtypes/cast.py:1128: error: Incompatible return value type + # (got "ndarray", expected "ExtensionArray") [return-value] + return astype_td64_unit_conversion( # type: ignore[return-value] + arr, dtype, copy=copy + ) raise TypeError(f"cannot astype a timedelta from [{arr.dtype}] to [{dtype}]") - # pandas\core\dtypes\cast.py:1065: error: Argument 1 to "issubdtype" has - # incompatible type "Union[dtype, ExtensionDtype]"; expected "Union[dtype, - # None, type, _SupportsDtype, str, Tuple[Any, int], Tuple[Any, Union[int, - # Sequence[int]]], List[Any], _DtypeDict, Tuple[Any, Any]]" [arg-type] - elif np.issubdtype(arr.dtype, np.floating) and np.issubdtype( - dtype, np.integer # type: ignore[arg-type] - ): + elif np.issubdtype(arr.dtype, np.floating) and np.issubdtype(dtype, np.integer): if not np.isfinite(arr).all(): raise ValueError("Cannot convert non-finite values (NA or inf) to integer") @@ -1152,10 +1184,32 @@ def astype_nansafe( elif is_datetime64_dtype(dtype): from pandas import to_datetime - # pandas\core\dtypes\cast.py:1082: error: Incompatible return value - # type (got "ExtensionArray", expected "ndarray") [return-value] + # pandas/core/dtypes/cast.py:1179: error: Incompatible return value type + # (got "ExtensionArray", expected "ndarray") [return-value] return astype_nansafe( # type: ignore[return-value] - to_datetime(arr).values, dtype, copy=copy + # pandas/core/dtypes/cast.py:1158: error: No overload variant of + # "to_datetime" matches argument type "ndarray" [call-overload] + # pandas/core/dtypes/cast.py:1158: note: Possible overload variants: + # pandas/core/dtypes/cast.py:1158: note: def [DatetimeScalar in + # (Union[int, float, str], datetime)] to_datetime(arg: DatetimeScalar, + # errors: str = ..., dayfirst: bool = ..., yearfirst: bool = ..., utc: + # Optional[bool] = ..., format: Optional[str] = ..., exact: bool = ..., + # unit: Optional[str] = ..., infer_datetime_format: bool = ..., origin: + # Any = ..., cache: bool = ...) -> Union[DatetimeScalar, Any] + # pandas/core/dtypes/cast.py:1158: note: def to_datetime(arg: + # Series, errors: str = ..., dayfirst: bool = ..., yearfirst: bool = + # ..., utc: Optional[bool] = ..., format: Optional[str] = ..., exact: + # bool = ..., unit: Optional[str] = ..., infer_datetime_format: bool = + # ..., origin: Any = ..., cache: bool = ...) -> Series + # pandas/core/dtypes/cast.py:1158: note: def to_datetime(arg: + # Union[List[Any], Tuple[Any, ...]], errors: str = ..., dayfirst: bool = + # ..., yearfirst: bool = ..., utc: Optional[bool] = ..., format: + # Optional[str] = ..., exact: bool = ..., unit: Optional[str] = ..., + # infer_datetime_format: bool = ..., origin: Any = ..., cache: bool = + # ...) -> DatetimeIndex + to_datetime(arr).values, # type: ignore[call-overload] + dtype, + copy=copy, ) elif is_timedelta64_dtype(dtype): from pandas import to_timedelta @@ -1175,9 +1229,14 @@ def astype_nansafe( if copy or is_object_dtype(arr.dtype) or is_object_dtype(dtype): # Explicit copy, or required since NumPy can't view from / to object. - return arr.astype(dtype, copy=True) - return arr.astype(dtype, copy=copy) + # pandas/core/dtypes/cast.py:1178: error: Incompatible return value type (got + # "ndarray", expected "ExtensionArray") [return-value] + return arr.astype(dtype, copy=True) # type: ignore[return-value] + + # pandas/core/dtypes/cast.py:1180: error: Incompatible return value type (got + # "ndarray", expected "ExtensionArray") [return-value] + return arr.astype(dtype, copy=copy) # type: ignore[return-value] def soft_convert_objects( @@ -1521,7 +1580,13 @@ def maybe_cast_to_datetime(value, dtype: Optional[DtypeObj]): # we have an array of datetime or timedeltas & nulls elif np.prod(value.shape) or not is_dtype_equal(value.dtype, dtype): - _disallow_mismatched_datetimelike(value, dtype) + # pandas/core/dtypes/cast.py:1524: error: Argument 2 to + # "_disallow_mismatched_datetimelike" has incompatible type + # "Union[dtype[Any], ExtensionDtype, None]"; expected + # "Union[dtype[Any], ExtensionDtype]" [arg-type] + _disallow_mismatched_datetimelike( + value, dtype # type: ignore[arg-type] + ) try: if is_datetime64: @@ -1817,7 +1882,12 @@ def construct_1d_ndarray_preserving_na( else: if dtype is not None: _disallow_mismatched_datetimelike(values, dtype) - subarr = np.array(values, dtype=dtype, copy=copy) + # pandas/core/dtypes/cast.py:1820: error: Argument "dtype" to "array" has + # incompatible type "Union[dtype[Any], ExtensionDtype, None]"; expected + # "Union[dtype[Any], None, type, _SupportsDType, str, Union[Tuple[Any, int], + # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, + # Any]]]" [arg-type] + subarr = np.array(values, dtype=dtype, copy=copy) # type: ignore[arg-type] return subarr diff --git a/pandas/core/dtypes/concat.py b/pandas/core/dtypes/concat.py index ce748f261f71e..c11bdfa73a430 100644 --- a/pandas/core/dtypes/concat.py +++ b/pandas/core/dtypes/concat.py @@ -98,7 +98,12 @@ def _cast_to_common_type(arr: ArrayLike, dtype: DtypeObj) -> ArrayLike: if is_extension_array_dtype(dtype) and isinstance(arr, np.ndarray): # numpy's astype cannot handle ExtensionDtypes return array(arr, dtype=dtype, copy=False) - return arr.astype(dtype, copy=False) + # pandas/core/dtypes/concat.py:101: error: Argument 1 to "astype" of + # "_ArrayOrScalarCommon" has incompatible type "Union[dtype[Any], ExtensionDtype]"; + # expected "Union[dtype[Any], None, type, _SupportsDType, str, Union[Tuple[Any, + # int], Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, + # Any]]]" [arg-type] + return arr.astype(dtype, copy=False) # type: ignore[arg-type] def concat_compat(to_concat, axis: int = 0): diff --git a/pandas/core/dtypes/missing.py b/pandas/core/dtypes/missing.py index cf4895f0dc28d..4540dea9ff1f6 100644 --- a/pandas/core/dtypes/missing.py +++ b/pandas/core/dtypes/missing.py @@ -645,5 +645,10 @@ def isna_all(arr: ArrayLike) -> bool: ) return all( - checker(arr[i : i + chunk_len]).all() for i in range(0, total_len, chunk_len) + # pandas/core/dtypes/missing.py:648: error: Argument 1 to "__call__" of "ufunc" + # has incompatible type "Union[ExtensionArray, Any]"; expected "Union[Union[int, + # float, complex, str, bytes, generic], Sequence[Union[int, float, complex, str, + # bytes, generic]], Sequence[Sequence[Any]], _SupportsArray]" [arg-type] + checker(arr[i : i + chunk_len]).all() # type: ignore[arg-type] + for i in range(0, total_len, chunk_len) ) diff --git a/pandas/core/indexes/base.py b/pandas/core/indexes/base.py index c359c896d1cf2..9a7afa2d675df 100644 --- a/pandas/core/indexes/base.py +++ b/pandas/core/indexes/base.py @@ -338,7 +338,13 @@ def __new__( # they are actually ints, e.g. '0' and 0.0 # should not be coerced # GH 11836 - data = _maybe_cast_with_dtype(data, dtype, copy) + + # pandas/core/indexes/base.py:341: error: Argument 1 to + # "_maybe_cast_with_dtype" has incompatible type "Union[ndarray, Index, + # Series]"; expected "ndarray" [arg-type] + data = _maybe_cast_with_dtype( + data, dtype, copy # type: ignore[arg-type] + ) dtype = data.dtype if data.dtype.kind in ["i", "u", "f"]: @@ -2806,10 +2812,17 @@ def union(self, other, sort=None): # | -> T # | -> object if not (is_integer_dtype(self.dtype) and is_integer_dtype(other.dtype)): - dtype = "float64" + # pandas/core/indexes/base.py:2809: error: Incompatible types in + # assignment (expression has type "str", variable has type + # "Union[dtype[Any], ExtensionDtype]") [assignment] + dtype = "float64" # type: ignore[assignment] else: # one is int64 other is uint64 - dtype = object + + # pandas/core/indexes/base.py:2812: error: Incompatible types in + # assignment (expression has type "Type[object]", variable has type + # "Union[dtype[Any], ExtensionDtype]") [assignment] + dtype = object # type: ignore[assignment] left = self.astype(dtype, copy=False) right = other.astype(dtype, copy=False) @@ -4241,7 +4254,12 @@ def values(self) -> ArrayLike: Index.array : Reference to the underlying data. Index.to_numpy : A NumPy array representing the underlying data. """ - return self._data + # pandas/core/indexes/base.py:4244: error: Incompatible return value type (got + # "Union[ExtensionArray, ndarray]", expected "ExtensionArray") [return-value] + + # pandas/core/indexes/base.py:4244: error: Incompatible return value type (got + # "Union[ExtensionArray, ndarray]", expected "ndarray") [return-value] + return self._data # type: ignore[return-value] @cache_readonly @doc(IndexOpsMixin.array) @@ -5898,7 +5916,11 @@ def any(self, *args, **kwargs): """ # FIXME: docstr inaccurate, args/kwargs not passed self._maybe_disable_logical_methods("any") - return np.any(self.values) + # pandas/core/indexes/base.py:5901: error: Argument 1 to "any" has incompatible + # type "ArrayLike"; expected "Union[Union[int, float, complex, str, bytes, + # generic], Sequence[Union[int, float, complex, str, bytes, generic]], + # Sequence[Sequence[Any]], _SupportsArray]" [arg-type] + return np.any(self.values) # type: ignore[arg-type] def all(self): """ @@ -5956,7 +5978,11 @@ def all(self): # FIXME: docstr inaccurate, args/kwargs not passed self._maybe_disable_logical_methods("all") - return np.all(self.values) + # pandas/core/indexes/base.py:5959: error: Argument 1 to "all" has incompatible + # type "ArrayLike"; expected "Union[Union[int, float, complex, str, bytes, + # generic], Sequence[Union[int, float, complex, str, bytes, generic]], + # Sequence[Sequence[Any]], _SupportsArray]" [arg-type] + return np.all(self.values) # type: ignore[arg-type] @final def _maybe_disable_logical_methods(self, opname: str_t): @@ -6243,7 +6269,12 @@ def _maybe_cast_data_without_dtype(subarr): if inferred == "integer": try: - data = _try_convert_to_int_array(subarr, False, None) + # pandas/core/indexes/base.py:6246: error: Argument 3 to + # "_try_convert_to_int_array" has incompatible type "None"; expected + # "dtype[Any]" [arg-type] + data = _try_convert_to_int_array( + subarr, False, None # type: ignore[arg-type] + ) return data except ValueError: pass @@ -6277,7 +6308,12 @@ def _maybe_cast_data_without_dtype(subarr): pass elif inferred.startswith("timedelta"): - data = TimedeltaArray._from_sequence(subarr, copy=False) + # pandas/core/indexes/base.py:6280: error: Incompatible types in assignment + # (expression has type "TimedeltaArray", variable has type "ndarray") + # [assignment] + data = TimedeltaArray._from_sequence( # type: ignore[assignment] + subarr, copy=False + ) return data elif inferred == "period": try: diff --git a/pandas/core/missing.py b/pandas/core/missing.py index 78e9039b5cbbf..742a66452032a 100644 --- a/pandas/core/missing.py +++ b/pandas/core/missing.py @@ -41,7 +41,11 @@ def mask_missing(arr: ArrayLike, values_to_mask) -> np.ndarray: # known to be holdable by arr. # When called from Series._single_replace, values_to_mask is tuple or list dtype, values_to_mask = infer_dtype_from(values_to_mask) - values_to_mask = np.array(values_to_mask, dtype=dtype) + # pandas/core/missing.py:44: error: Argument "dtype" to "array" has incompatible + # type "Union[dtype[Any], ExtensionDtype]"; expected "Union[dtype[Any], None, type, + # _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], + # List[Any], _DTypeDict, Tuple[Any, Any]]]" [arg-type] + values_to_mask = np.array(values_to_mask, dtype=dtype) # type: ignore[arg-type] na_mask = isna(values_to_mask) nonna = values_to_mask[~na_mask] diff --git a/pandas/core/tools/numeric.py b/pandas/core/tools/numeric.py index d8a5855d05dfd..011618a03814b 100644 --- a/pandas/core/tools/numeric.py +++ b/pandas/core/tools/numeric.py @@ -165,7 +165,9 @@ def to_numeric(arg, errors="raise", downcast=None): mask = values._mask values = values._data[~mask] else: - mask = None + # pandas/core/tools/numeric.py:168: error: Incompatible types in assignment + # (expression has type "None", variable has type "ndarray") [assignment] + mask = None # type: ignore[assignment] values_dtype = getattr(values, "dtype", None) if is_numeric_dtype(values_dtype): From 133095dcaeed72358652ec526ceb7f921d5a7c17 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Thu, 7 Jan 2021 12:37:39 +0000 Subject: [PATCH 44/86] remove added workflow --- .github/workflows/ci.yml | 43 ---------------------------------------- environment.yml | 1 + 2 files changed, 1 insertion(+), 43 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 50e8a89df2ce9..e8834bd509bf0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -132,46 +132,3 @@ jobs: - name: Upload dev docs run: rsync -az --delete doc/build/html/ docs@${{ secrets.server_ip }}:/usr/share/nginx/pandas/pandas-docs/dev if: github.event_name == 'push' - - numpy-types: - name: Numpy Type Annotations - runs-on: ubuntu-latest - continue-on-error: true - steps: - - name: Update for build environment - run: | - sudo apt-get update - sudo apt-get install -y build-essential - sudo apt-get clean - - name: Setting conda path - run: echo "$CONDA/bin" >> $GITHUB_PATH - - name: Update conda - run: | - conda config --set quiet true --set always_yes true - conda update -n base -c defaults conda - conda list - - name: Checkout pandas - uses: actions/checkout@v2 - - name: Update conda environment - run: | - conda env update -n pandas-dev --file=environment.yml - conda list - # - name: Update numpy using wheel from nightly - # run: | - # source activate pandas-dev - # pip install -U --extra-index-url https://pypi.anaconda.org/scipy-wheels-nightly/simple --pre numpy - # conda list - - name: Update numpy to release candidate - run: | - source activate pandas-dev - pip install numpy==1.20.0rc2 - conda list - - name: Remove pandas - run: | - source activate pandas-dev - conda uninstall -y --force pandas || true - conda list - - name: Typing validation - run: | - source activate pandas-dev - ci/code_checks.sh typing diff --git a/environment.yml b/environment.yml index 600a20b153ed3..e08a45cce32b9 100644 --- a/environment.yml +++ b/environment.yml @@ -113,5 +113,6 @@ dependencies: - tabulate>=0.8.3 # DataFrame.to_markdown - natsort # DataFrame.sort_values - pip: + - numpy==1.20.0rc2 - git+https://github.com/pandas-dev/pydata-sphinx-theme.git@master - git+https://github.com/numpy/numpydoc From fdba33891ea3a4ef581f660cb4bd80120008eff9 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Thu, 7 Jan 2021 13:58:33 +0000 Subject: [PATCH 45/86] lint --- pandas/core/arrays/string_.py | 1 - requirements-dev.txt | 1 + 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/pandas/core/arrays/string_.py b/pandas/core/arrays/string_.py index 984eab4bcee23..85a282bd93550 100644 --- a/pandas/core/arrays/string_.py +++ b/pandas/core/arrays/string_.py @@ -416,7 +416,6 @@ def _str_map(self, f, na_value=None, dtype: Optional[Dtype] = None): na_value=na_value, # pandas/core/arrays/string_.py:417: error: Value of type variable # "_DTypeScalar" of "dtype" cannot be "object" [type-var] - # pandas/core/arrays/string_.py:417: error: Argument 1 to "dtype" has # incompatible type "Union[ExtensionDtype, str, dtype[Any], # Type[object]]"; expected "Type[object]" [arg-type] diff --git a/requirements-dev.txt b/requirements-dev.txt index d45e87b1785b0..6a01384dcc8df 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -76,5 +76,6 @@ cftime pyreadstat tabulate>=0.8.3 natsort +numpy==1.20.0rc2 git+https://github.com/pandas-dev/pydata-sphinx-theme.git@master git+https://github.com/numpy/numpydoc From 0bf1790728e281af690bc49b59beef205cf0f4b9 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Fri, 8 Jan 2021 12:36:31 +0000 Subject: [PATCH 46/86] errors from NpDtype --- pandas/core/common.py | 13 ++++++++++--- pandas/core/construction.py | 12 +++++++++++- pandas/core/indexes/base.py | 13 +++++++++++-- pandas/core/indexes/multi.py | 7 ++++++- pandas/core/indexes/numeric.py | 6 +++++- 5 files changed, 43 insertions(+), 8 deletions(-) diff --git a/pandas/core/common.py b/pandas/core/common.py index a6514b5167460..c47c8bbfb2e03 100644 --- a/pandas/core/common.py +++ b/pandas/core/common.py @@ -200,9 +200,16 @@ def asarray_tuplesafe(values, dtype: Optional[NpDtype] = None) -> np.ndarray: if not (isinstance(values, (list, tuple)) or hasattr(values, "__array__")): values = list(values) elif isinstance(values, ABCIndex): - return values._values - - if isinstance(values, list) and dtype in [np.object_, object]: + # pandas/core/common.py:203: error: Incompatible return value type (got + # "Union[ExtensionArray, ndarray]", expected "ndarray") [return-value] + return values._values # type: ignore[return-value] + + # pandas/core/common.py:205: error: Non-overlapping container check (element type: + # "Union[str, dtype[Any], None]", container item type: "type") [comparison-overlap] + if isinstance(values, list) and dtype in [ # type: ignore[comparison-overlap] + np.object_, + object, + ]: return construct_1d_object_array_from_listlike(values) result = np.asarray(values, dtype=dtype) diff --git a/pandas/core/construction.py b/pandas/core/construction.py index 09c9a4f750f2f..d98e107d2b26b 100644 --- a/pandas/core/construction.py +++ b/pandas/core/construction.py @@ -550,7 +550,17 @@ def _sanitize_ndim( if isinstance(data, np.ndarray): raise ValueError("Data must be 1-dimensional") else: - result = com.asarray_tuplesafe(data, dtype=dtype) + # pandas/core/construction.py:553: error: Incompatible types in assignment + # (expression has type "ndarray", variable has type "ExtensionArray") + # [assignment] + + # pandas/core/construction.py:553: error: Argument "dtype" to + # "asarray_tuplesafe" has incompatible type "Union[dtype[Any], + # ExtensionDtype, None]"; expected "Union[str, dtype[Any], None]" + # [arg-type] + result = com.asarray_tuplesafe( # type: ignore[assignment] + data, dtype=dtype # type: ignore[arg-type] + ) return result diff --git a/pandas/core/indexes/base.py b/pandas/core/indexes/base.py index 9a7afa2d675df..6a7665937ef7f 100644 --- a/pandas/core/indexes/base.py +++ b/pandas/core/indexes/base.py @@ -351,7 +351,12 @@ def __new__( # maybe coerce to a sub-class arr = data else: - arr = com.asarray_tuplesafe(data, dtype=object) + # pandas/core/indexes/base.py:354: error: Argument "dtype" to + # "asarray_tuplesafe" has incompatible type "Type[object]"; expected + # "Union[str, dtype[Any], None]" [arg-type] + arr = com.asarray_tuplesafe( + data, dtype=object # type: ignore[arg-type] + ) if dtype is None: arr = _maybe_cast_data_without_dtype(arr) @@ -385,7 +390,11 @@ def __new__( data, names=name or kwargs.get("names") ) # other iterable of some kind - subarr = com.asarray_tuplesafe(data, dtype=object) + + # pandas/core/indexes/base.py:388: error: Argument "dtype" to + # "asarray_tuplesafe" has incompatible type "Type[object]"; expected + # "Union[str, dtype[Any], None]" [arg-type] + subarr = com.asarray_tuplesafe(data, dtype=object) # type: ignore[arg-type] return Index(subarr, dtype=dtype, copy=copy, name=name, **kwargs) @classmethod diff --git a/pandas/core/indexes/multi.py b/pandas/core/indexes/multi.py index 2e06b7591ef5f..87d52c0b5c4cd 100644 --- a/pandas/core/indexes/multi.py +++ b/pandas/core/indexes/multi.py @@ -2182,7 +2182,12 @@ def drop(self, codes, level=None, errors="raise"): if not isinstance(codes, (np.ndarray, Index)): try: - codes = com.index_labels_to_array(codes, dtype=object) + # pandas/core/indexes/multi.py:2185: error: Argument "dtype" to + # "index_labels_to_array" has incompatible type "Type[object]"; expected + # "Union[str, dtype[Any], None]" [arg-type] + codes = com.index_labels_to_array( + codes, dtype=object # type: ignore[arg-type] + ) except ValueError: pass diff --git a/pandas/core/indexes/numeric.py b/pandas/core/indexes/numeric.py index 63a38255d15ae..3d39312b96496 100644 --- a/pandas/core/indexes/numeric.py +++ b/pandas/core/indexes/numeric.py @@ -318,7 +318,11 @@ def _convert_arr_indexer(self, keyarr): ): dtype = np.uint64 - return com.asarray_tuplesafe(keyarr, dtype=dtype) + # pandas/core/indexes/numeric.py:321: error: Argument "dtype" to + # "asarray_tuplesafe" has incompatible type + # "Optional[Type[unsignedinteger[Any]]]"; expected "Union[str, dtype[Any], + # None]" [arg-type] + return com.asarray_tuplesafe(keyarr, dtype=dtype) # type: ignore[arg-type] _float64_descr_args = { From f95dda6e9df4600942dd21b8713412b13665f0cd Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 9 Jan 2021 16:52:02 +0000 Subject: [PATCH 47/86] update ignores --- pandas/core/base.py | 8 +++++++- pandas/core/generic.py | 6 +++++- pandas/core/internals/managers.py | 15 +++++++++++++-- pandas/core/reshape/reshape.py | 5 ++++- pandas/core/series.py | 12 +++++++++++- setup.cfg | 3 +++ 6 files changed, 43 insertions(+), 6 deletions(-) diff --git a/pandas/core/base.py b/pandas/core/base.py index 812917be4d6f7..260ffa02a5bc0 100644 --- a/pandas/core/base.py +++ b/pandas/core/base.py @@ -613,7 +613,13 @@ def to_numpy( f"to_numpy() got an unexpected keyword argument '{bad_keys}'" ) - result = np.asarray(self._values, dtype=dtype) + # pandas/core/base.py:616: error: Argument "dtype" to "asarray" has incompatible + # type "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], + # Type[int], Type[complex], Type[bool], Type[object], None]"; expected + # "Union[dtype[Any], None, type, _SupportsDType, str, Union[Tuple[Any, int], + # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, + # Any]]]" [arg-type] + result = np.asarray(self._values, dtype=dtype) # type: ignore[arg-type] # TODO(GH-24345): Avoid potential double copy if copy or na_value is not lib.no_default: result = result.copy() diff --git a/pandas/core/generic.py b/pandas/core/generic.py index baa2565a68e08..5518fb980468f 100644 --- a/pandas/core/generic.py +++ b/pandas/core/generic.py @@ -9848,7 +9848,11 @@ def abs(self: FrameOrSeries) -> FrameOrSeries: 2 6 30 -30 3 7 40 -50 """ - return np.abs(self) + # pandas/core/generic.py:9851: error: Argument 1 to "__call__" of "ufunc" has + # incompatible type "FrameOrSeries"; expected "Union[Union[int, float, complex, + # str, bytes, generic], Sequence[Union[int, float, complex, str, bytes, + # generic]], Sequence[Sequence[Any]], _SupportsArray]" [arg-type] + return np.abs(self) # type: ignore[arg-type] @final def describe( diff --git a/pandas/core/internals/managers.py b/pandas/core/internals/managers.py index d1bc6fb0f7360..5efd62300ca3c 100644 --- a/pandas/core/internals/managers.py +++ b/pandas/core/internals/managers.py @@ -866,7 +866,13 @@ def as_array( else: arr = np.asarray(blk.get_values()) if dtype: - arr = arr.astype(dtype, copy=False) + # pandas/core/internals/managers.py:869: error: Argument 1 to + # "astype" of "_ArrayOrScalarCommon" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object]]"; expected + # "Union[dtype[Any], None, type, _SupportsDType, str, + # Union[Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], + # List[Any], _DTypeDict, Tuple[Any, Any]]]" [arg-type] + arr = arr.astype(dtype, copy=False) # type: ignore[arg-type] else: arr = self._interleave(dtype=dtype, na_value=na_value) # The underlying data was copied within _interleave @@ -899,7 +905,12 @@ def _interleave( elif is_dtype_equal(dtype, str): dtype = "object" - result = np.empty(self.shape, dtype=dtype) + # pandas/core/internals/managers.py:902: error: Argument "dtype" to "empty" has + # incompatible type "Union[ExtensionDtype, str, dtype[Any], Type[object], + # None]"; expected "Union[dtype[Any], None, type, _SupportsDType, str, + # Union[Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], List[Any], + # _DTypeDict, Tuple[Any, Any]]]" [arg-type] + result = np.empty(self.shape, dtype=dtype) # type: ignore[arg-type] itemmask = np.zeros(self.shape[0]) diff --git a/pandas/core/reshape/reshape.py b/pandas/core/reshape/reshape.py index b014262830972..647047256cd45 100644 --- a/pandas/core/reshape/reshape.py +++ b/pandas/core/reshape/reshape.py @@ -933,7 +933,10 @@ def _get_dummies_1d( if dtype is None: dtype = np.uint8 - dtype = np.dtype(dtype) + # pandas/core/reshape/reshape.py:936: error: Argument 1 to "dtype" has incompatible + # type "Union[ExtensionDtype, str, dtype[Any], Type[object]]"; expected "Type[Any]" + # [arg-type] + dtype = np.dtype(dtype) # type: ignore[arg-type] if is_object_dtype(dtype): raise ValueError("dtype=object is not a valid dtype for get_dummies") diff --git a/pandas/core/series.py b/pandas/core/series.py index 99dff8896295c..424998eb5eb08 100644 --- a/pandas/core/series.py +++ b/pandas/core/series.py @@ -339,7 +339,17 @@ def __init__( elif copy: data = data.copy() else: - data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True) + # pandas/core/series.py:342: error: Argument 3 to "sanitize_array" has + # incompatible type "Union[ExtensionDtype, str, dtype[Any], + # Type[object], None]"; expected "Union[dtype[Any], ExtensionDtype, + # None]" [arg-type] + data = sanitize_array( + data, + index, + dtype, # type: ignore[arg-type] + copy, + raise_cast_failure=True, + ) data = SingleBlockManager.from_array(data, index) diff --git a/setup.cfg b/setup.cfg index 55acc414b921d..a117c442a587a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -172,3 +172,6 @@ ignore_errors=True [mypy-pandas.tests.series.apply.test_series_apply] ignore_errors=True + +[mypy-pandas.tests.test_expressions] +ignore_errors=True From be7b8b9f71e4aea7289333ec6a0a21ee8a353e7c Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sun, 10 Jan 2021 11:30:57 +0000 Subject: [PATCH 48/86] update ignores --- pandas/_testing/__init__.py | 18 +++--------------- pandas/core/apply.py | 7 ++++++- pandas/core/arrays/period.py | 16 +++------------- pandas/core/indexes/multi.py | 5 ++++- pandas/core/reshape/merge.py | 6 ++---- pandas/core/window/expanding.py | 9 ++++++--- 6 files changed, 24 insertions(+), 37 deletions(-) diff --git a/pandas/_testing/__init__.py b/pandas/_testing/__init__.py index 5ac5b50c77391..3df1999ce7dce 100644 --- a/pandas/_testing/__init__.py +++ b/pandas/_testing/__init__.py @@ -119,21 +119,9 @@ + STRING_DTYPES + DATETIME64_DTYPES + TIMEDELTA64_DTYPES - # pandas/_testing/__init__.py:122: error: Unsupported operand types for + - # ("List[Union[ExtensionDtype, Union[str, dtype[Any]], Type[str], Type[float], - # Type[int], Type[complex], Type[bool], Type[object]]]" and "List[object]") - # [operator] - + BOOL_DTYPES # type: ignore[operator] - # pandas/_testing/__init__.py:123: error: Unsupported operand types for + - # ("List[Union[ExtensionDtype, Union[str, dtype[Any]], Type[str], Type[float], - # Type[int], Type[complex], Type[bool], Type[object]]]" and "List[object]") - # [operator] - + OBJECT_DTYPES # type: ignore[operator] - # pandas/_testing/__init__.py:124: error: Unsupported operand types for + - # ("List[Union[ExtensionDtype, Union[str, dtype[Any]], Type[str], Type[float], - # Type[int], Type[complex], Type[bool], Type[object]]]" and "List[object]") - # [operator] - + BYTES_DTYPES # type: ignore[operator] + + BOOL_DTYPES + + OBJECT_DTYPES + + BYTES_DTYPES ) NULL_OBJECTS = [None, np.nan, pd.NaT, float("nan"), pd.NA] diff --git a/pandas/core/apply.py b/pandas/core/apply.py index 874b40f224a26..0221f7212d86f 100644 --- a/pandas/core/apply.py +++ b/pandas/core/apply.py @@ -619,7 +619,12 @@ def apply_standard(self) -> FrameOrSeriesUnion: with np.errstate(all="ignore"): if isinstance(f, np.ufunc): - return f(obj) + # pandas/core/apply.py:622: error: Argument 1 to "__call__" of "ufunc" + # has incompatible type "Series"; expected "Union[Union[int, float, + # complex, str, bytes, generic], Sequence[Union[int, float, complex, + # str, bytes, generic]], Sequence[Sequence[Any]], _SupportsArray]" + # [arg-type] + return f(obj) # type: ignore[arg-type] # row-wise access if is_extension_array_dtype(obj.dtype) and hasattr(obj._values, "map"): diff --git a/pandas/core/arrays/period.py b/pandas/core/arrays/period.py index c240c60725dd8..c1a65b28d69b7 100644 --- a/pandas/core/arrays/period.py +++ b/pandas/core/arrays/period.py @@ -270,24 +270,14 @@ def _generate_range(cls, start, end, periods, freq, fields): # ----------------------------------------------------------------- # DatetimeLike Interface - # pandas\core\arrays\period.py:264: error: Return type "int" of - # "_unbox_scalar" incompatible with return type - # "Union[signedinteger[_64Bit], datetime64, timedelta64]" in supertype - # "DatetimeLikeArrayMixin" [override] - def _unbox_scalar( # type: ignore[override] + def _unbox_scalar( self, value: Union[Period, NaTType], setitem: bool = False ) -> np.int64: if value is NaT: - # pandas\core\arrays\period.py:268: error: Incompatible return - # value type (got "signedinteger[_64Bit]", expected "int") - # [return-value] - return np.int64(value.value) # type: ignore[return-value] + return np.int64(value.value) elif isinstance(value, self._scalar_type): self._check_compatible_with(value, setitem=setitem) - # pandas\core\arrays\period.py:271: error: Incompatible return - # value type (got "signedinteger[_64Bit]", expected "int") - # [return-value] - return np.int64(value.ordinal) # type: ignore[return-value] + return np.int64(value.ordinal) else: raise ValueError(f"'value' should be a Period. Got '{value}' instead.") diff --git a/pandas/core/indexes/multi.py b/pandas/core/indexes/multi.py index 87d52c0b5c4cd..03faa32cb2041 100644 --- a/pandas/core/indexes/multi.py +++ b/pandas/core/indexes/multi.py @@ -530,7 +530,10 @@ def from_tuples( elif isinstance(tuples, list): arrays = list(lib.to_object_array_tuples(tuples).T) else: - arrays = zip(*tuples) + # pandas/core/indexes/multi.py:533: error: Incompatible types in assignment + # (expression has type "Iterator[Any]", variable has type + # "List[Sequence[Optional[Hashable]]]") [assignment] + arrays = zip(*tuples) # type: ignore[assignment] return cls.from_arrays(arrays, sortorder=sortorder, names=names) diff --git a/pandas/core/reshape/merge.py b/pandas/core/reshape/merge.py index bc317b6d3d547..e48d8c5424bb1 100644 --- a/pandas/core/reshape/merge.py +++ b/pandas/core/reshape/merge.py @@ -1422,8 +1422,7 @@ def get_join_indexers( for n in range(len(left_keys)) ) zipped = zip(*mapped) - # error: No overload variant of "list" matches argument type "object" - llab, rlab, shape = [list(x) for x in zipped] # type: ignore[call-overload] + llab, rlab, shape = [list(x) for x in zipped] # get flat i8 keys from label lists lkey, rkey = _get_join_keys(llab, rlab, shape, sort) @@ -1913,8 +1912,7 @@ def _get_multiindex_indexer(join_keys, index: MultiIndex, sort: bool): for n in range(index.nlevels) ) zipped = zip(*mapped) - # error: No overload variant of "list" matches argument type "object" - rcodes, lcodes, shape = [list(x) for x in zipped] # type: ignore[call-overload] + rcodes, lcodes, shape = [list(x) for x in zipped] if sort: rcodes = list(map(np.take, rcodes, index.codes)) else: diff --git a/pandas/core/window/expanding.py b/pandas/core/window/expanding.py index e50e6c14f91f2..3e4cfb5fa1e54 100644 --- a/pandas/core/window/expanding.py +++ b/pandas/core/window/expanding.py @@ -106,10 +106,13 @@ def _get_cov_corr_window( # assignment (expression has type "int", variable has type # "Union[ndarray, FrameOrSeries, None]") [assignment] other = self.min_periods or -1 # type: ignore[assignment] - # pandas\core\window\expanding.py:92: error: Incompatible return value - # type (got "Union[int, ndarray, FrameOrSeries, None]", expected "int") + # pandas/core/window/expanding.py:109: error: Value of type variable "_LT" of + # "max" cannot be "Union[int, ndarray, FrameOrSeries, None]" [type-var] + + # pandas/core/window/expanding.py:109: error: Incompatible return value type + # (got "Union[int, ndarray, FrameOrSeries, None]", expected "int") # [return-value] - return max(length, other) # type: ignore[return-value] + return max(length, other) # type: ignore[type-var,return-value] _agg_see_also_doc = dedent( """ From 51c75706a51d96dc37d5eb641ef8a6c479c486b8 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Thu, 21 Jan 2021 20:53:10 +0000 Subject: [PATCH 49/86] update ignores --- pandas/core/algorithms.py | 28 +++++++++++--- pandas/core/arrays/_ranges.py | 10 ++++- pandas/core/arrays/base.py | 8 +++- pandas/core/arrays/masked.py | 8 +++- pandas/core/describe.py | 22 ++++++++--- pandas/core/dtypes/cast.py | 14 +++++-- pandas/core/internals/array_manager.py | 52 ++++++++++++++++++++++---- pandas/core/internals/blocks.py | 15 ++++++-- 8 files changed, 128 insertions(+), 29 deletions(-) diff --git a/pandas/core/algorithms.py b/pandas/core/algorithms.py index e056e45794c22..2fe1e8ef89036 100644 --- a/pandas/core/algorithms.py +++ b/pandas/core/algorithms.py @@ -155,7 +155,10 @@ def _ensure_data( elif is_timedelta64_dtype(values.dtype) or is_timedelta64_dtype(dtype): from pandas import TimedeltaIndex - values = TimedeltaIndex(values)._data + # pandas/core/algorithms.py:158: error: Incompatible types in assignment + # (expression has type "TimedeltaArray", variable has type "ndarray") + # [assignment] + values = TimedeltaIndex(values)._data # type: ignore[assignment] else: # Datetime if values.ndim > 1 and is_datetime64_ns_dtype(values.dtype): @@ -171,9 +174,14 @@ def _ensure_data( from pandas import DatetimeIndex - values = DatetimeIndex(values)._data + # pandas/core/algorithms.py:174: error: Incompatible types in assignment + # (expression has type "DatetimeArray", variable has type "ndarray") + # [assignment] + values = DatetimeIndex(values)._data # type: ignore[assignment] dtype = values.dtype - return values.asi8, dtype + # pandas/core/algorithms.py:176: error: Item "ndarray" of "Union[PeriodArray, + # Any, ndarray]" has no attribute "asi8" [union-attr] + return values.asi8, dtype # type: ignore[union-attr] elif is_categorical_dtype(values.dtype) and ( is_categorical_dtype(dtype) or dtype is None @@ -503,9 +511,19 @@ def isin(comps: AnyArrayLike, values: AnyArrayLike) -> np.ndarray: values = extract_array(values, extract_numpy=True) # type: ignore[assignment] comps = _ensure_arraylike(comps) - comps = extract_array(comps, extract_numpy=True) + # pandas/core/algorithms.py:506: error: Incompatible types in assignment (expression + # has type "Union[Any, ExtensionArray]", variable has type "Index") [assignment] + + # pandas/core/algorithms.py:506: error: Incompatible types in assignment (expression + # has type "Union[Any, ExtensionArray]", variable has type "Series") [assignment] + comps = extract_array(comps, extract_numpy=True) # type: ignore[assignment] if is_extension_array_dtype(comps.dtype): - return comps.isin(values) + # pandas/core/algorithms.py:508: error: Incompatible return value type (got + # "Series", expected "ndarray") [return-value] + + # pandas/core/algorithms.py:508: error: Item "ndarray" of "Union[Any, ndarray]" + # has no attribute "isin" [union-attr] + return comps.isin(values) # type: ignore[return-value,union-attr] elif needs_i8_conversion(comps.dtype): # Dispatch to DatetimeLikeArrayMixin.isin diff --git a/pandas/core/arrays/_ranges.py b/pandas/core/arrays/_ranges.py index 8958026214302..8cc27ee041f40 100644 --- a/pandas/core/arrays/_ranges.py +++ b/pandas/core/arrays/_ranges.py @@ -161,7 +161,9 @@ def _generate_range_overflow_safe_signed( # Putting this into a DatetimeArray/TimedeltaArray # would incorrectly be interpreted as NaT raise OverflowError - return result + # pandas/core/arrays/_ranges.py:164: error: Incompatible return value type + # (got "signedinteger[_64Bit]", expected "int") [return-value] + return result # type: ignore[return-value] except (FloatingPointError, OverflowError): # with endpoint negative and addend positive we risk # FloatingPointError; with reversed signed we risk OverflowError @@ -175,7 +177,11 @@ def _generate_range_overflow_safe_signed( # watch out for very special case in which we just slightly # exceed implementation bounds, but when passing the result to # np.arange will get a result slightly within the bounds - result = np.uint64(endpoint) + np.uint64(addend) + + # pandas/core/arrays/_ranges.py:178: error: Incompatible types in assignment + # (expression has type "unsignedinteger[_64Bit]", variable has type + # "signedinteger[_64Bit]") [assignment] + result = np.uint64(endpoint) + np.uint64(addend) # type: ignore[assignment] i64max = np.uint64(np.iinfo(np.int64).max) assert result > i64max if result <= i64max + np.uint64(stride): diff --git a/pandas/core/arrays/base.py b/pandas/core/arrays/base.py index 33a8f640556d5..006b564f398d0 100644 --- a/pandas/core/arrays/base.py +++ b/pandas/core/arrays/base.py @@ -629,7 +629,9 @@ def argmin(self, skipna: bool = True) -> int: ExtensionArray.argmax """ validate_bool_kwarg(skipna, "skipna") - if not skipna and self.isna().any(): + # pandas/core/arrays/base.py:632: error: "ExtensionArray" has no attribute "any" + # [attr-defined] + if not skipna and self.isna().any(): # type: ignore[attr-defined] raise NotImplementedError return nargminmax(self, "argmin") @@ -653,7 +655,9 @@ def argmax(self, skipna: bool = True) -> int: ExtensionArray.argmin """ validate_bool_kwarg(skipna, "skipna") - if not skipna and self.isna().any(): + # pandas/core/arrays/base.py:656: error: "ExtensionArray" has no attribute "any" + # [attr-defined] + if not skipna and self.isna().any(): # type: ignore[attr-defined] raise NotImplementedError return nargminmax(self, "argmax") diff --git a/pandas/core/arrays/masked.py b/pandas/core/arrays/masked.py index b4250725f1658..a1b6ad4063f21 100644 --- a/pandas/core/arrays/masked.py +++ b/pandas/core/arrays/masked.py @@ -353,7 +353,9 @@ def take( return type(self)(result, mask, copy=False) - def isin(self, values) -> BooleanArray: + # pandas/core/arrays/masked.py:356: error: Return type "BooleanArray" of "isin" + # incompatible with return type "ndarray" in supertype "ExtensionArray" [override] + def isin(self, values) -> BooleanArray: # type: ignore[override] from pandas.core.arrays import BooleanArray @@ -363,7 +365,9 @@ def isin(self, values) -> BooleanArray: result += self._mask else: result *= np.invert(self._mask) - mask = np.zeros_like(self, dtype=bool) + # pandas/core/arrays/masked.py:366: error: No overload variant of "zeros_like" + # matches argument types "BaseMaskedArray", "Type[bool]" [call-overload] + mask = np.zeros_like(self, dtype=bool) # type: ignore[call-overload] return BooleanArray(result, mask, copy=False) def copy(self: BaseMaskedArrayT) -> BaseMaskedArrayT: diff --git a/pandas/core/describe.py b/pandas/core/describe.py index dcafb3c3a8be5..adfe4ccab7e5b 100644 --- a/pandas/core/describe.py +++ b/pandas/core/describe.py @@ -174,7 +174,9 @@ def _select_data(self): # when some numerics are found, keep only numerics default_include = [np.number] if self.datetime_is_numeric: - default_include.append("datetime") + # pandas/core/describe.py:177: error: Argument 1 to "append" of "list" + # has incompatible type "str"; expected "Type[number[Any]]" [arg-type] + default_include.append("datetime") # type: ignore[arg-type] data = self.obj.select_dtypes(include=default_include) if len(data.columns) == 0: data = self.obj @@ -214,7 +216,10 @@ def describe_numeric_1d(series: Series, percentiles: Sequence[float]) -> Series: """ from pandas import Series - formatted_percentiles = format_percentiles(percentiles) + # pandas/core/describe.py:217: error: Argument 1 to "format_percentiles" has + # incompatible type "Sequence[float]"; expected "Union[ndarray, List[Union[int, + # float]], List[float], List[Union[str, float]]]" [arg-type] + formatted_percentiles = format_percentiles(percentiles) # type: ignore[arg-type] stat_index = ["count", "mean", "std", "min"] + formatted_percentiles + ["max"] d = ( @@ -318,7 +323,10 @@ def describe_timestamp_1d(data: Series, percentiles: Sequence[float]) -> Series: # GH-30164 from pandas import Series - formatted_percentiles = format_percentiles(percentiles) + # pandas/core/describe.py:321: error: Argument 1 to "format_percentiles" has + # incompatible type "Sequence[float]"; expected "Union[ndarray, List[Union[int, + # float]], List[float], List[Union[str, float]]]" [arg-type] + formatted_percentiles = format_percentiles(percentiles) # type: ignore[arg-type] stat_index = ["count", "mean", "min"] + formatted_percentiles + ["max"] d = ( @@ -374,7 +382,9 @@ def refine_percentiles(percentiles: Optional[Sequence[float]]) -> Sequence[float The percentiles to include in the output. """ if percentiles is None: - return np.array([0.25, 0.5, 0.75]) + # pandas/core/describe.py:377: error: Incompatible return value type (got + # "ndarray", expected "Sequence[float]") [return-value] + return np.array([0.25, 0.5, 0.75]) # type: ignore[return-value] # explicit conversion of `percentiles` to list percentiles = list(percentiles) @@ -386,7 +396,9 @@ def refine_percentiles(percentiles: Optional[Sequence[float]]) -> Sequence[float if 0.5 not in percentiles: percentiles.append(0.5) - percentiles = np.asarray(percentiles) + # pandas/core/describe.py:389: error: Incompatible types in assignment (expression + # has type "ndarray", variable has type "Optional[Sequence[float]]") [assignment] + percentiles = np.asarray(percentiles) # type: ignore[assignment] # sort and check for duplicates unique_pcts = np.unique(percentiles) diff --git a/pandas/core/dtypes/cast.py b/pandas/core/dtypes/cast.py index 0b4a3c988f716..b2101901f4daf 100644 --- a/pandas/core/dtypes/cast.py +++ b/pandas/core/dtypes/cast.py @@ -1027,7 +1027,9 @@ def astype_dt64_to_dt64tz( stacklevel=level, ) - return values.tz_localize(dtype.tz) + # pandas/core/dtypes/cast.py:1030: error: "ndarray" has no attribute + # "tz_localize" [attr-defined] + return values.tz_localize(dtype.tz) # type: ignore[attr-defined] elif aware: # GH#18951: datetime64_tz dtype but not equal means different tz @@ -1039,7 +1041,9 @@ def astype_dt64_to_dt64tz( result = result.copy() return result - elif values.tz is not None: + # pandas/core/dtypes/cast.py:1042: error: "ndarray" has no attribute "tz" + # [attr-defined] + elif values.tz is not None: # type: ignore[attr-defined] level = find_stack_level() warnings.warn( "Using .astype to convert from timezone-aware dtype to " @@ -1050,7 +1054,11 @@ def astype_dt64_to_dt64tz( stacklevel=level, ) - result = values.tz_convert("UTC").tz_localize(None) + # pandas/core/dtypes/cast.py:1053: error: "ndarray" has no attribute + # "tz_convert" [attr-defined] + result = values.tz_convert("UTC").tz_localize( # type: ignore[attr-defined] + None + ) if copy: result = result.copy() return result diff --git a/pandas/core/internals/array_manager.py b/pandas/core/internals/array_manager.py index 6d6f9e8f4d3e8..8402482d1c724 100644 --- a/pandas/core/internals/array_manager.py +++ b/pandas/core/internals/array_manager.py @@ -183,7 +183,16 @@ def reduce( res_arrays.append(np.array([res])) index = Index([None]) # placeholder - new_mgr = type(self)(res_arrays, [index, self.items]) + # pandas/core/internals/array_manager.py:186: error: Argument 1 to + # "ArrayManager" has incompatible type "List[ndarray]"; expected + # "List[Union[ndarray, ExtensionArray]]" [arg-type] + + # pandas/core/internals/array_manager.py:186: note: "List" is invariant -- see + # http://mypy.readthedocs.io/en/latest/common_issues.html#variance + + # pandas/core/internals/array_manager.py:186: note: Consider using "Sequence" + # instead, which is covariant + new_mgr = type(self)(res_arrays, [index, self.items]) # type: ignore[arg-type] indexer = np.arange(self.shape[0]) return new_mgr, indexer @@ -275,7 +284,16 @@ def apply( if len(result_arrays) == 0: return self.make_empty(new_axes) - return type(self)(result_arrays, new_axes) + # pandas/core/internals/array_manager.py:278: error: Argument 1 to + # "ArrayManager" has incompatible type "List[ndarray]"; expected + # "List[Union[ndarray, ExtensionArray]]" [arg-type] + + # pandas/core/internals/array_manager.py:278: note: "List" is invariant -- see + # http://mypy.readthedocs.io/en/latest/common_issues.html#variance + + # pandas/core/internals/array_manager.py:278: note: Consider using "Sequence" + # instead, which is covariant + return type(self)(result_arrays, new_axes) # type: ignore[arg-type] def apply_with_block(self: T, f, align_keys=None, **kwargs) -> T: @@ -577,7 +595,10 @@ def as_array( result = np.empty(self.shape_proper, dtype=dtype) - for i, arr in enumerate(self.arrays): + # pandas/core/internals/array_manager.py:580: error: Incompatible types in + # assignment (expression has type "Union[ndarray, ExtensionArray]", variable has + # type "ndarray") [assignment] + for i, arr in enumerate(self.arrays): # type: ignore[assignment] arr = arr.astype(dtype, copy=copy) result[:, i] = arr @@ -628,7 +649,9 @@ def fast_xs(self, loc: int) -> ArrayLike: result = np.array([arr[loc] for arr in self.arrays], dtype=temp_dtype) if isinstance(dtype, ExtensionDtype): result = dtype.construct_array_type()._from_sequence(result, dtype=dtype) - return result + # pandas/core/internals/array_manager.py:631: error: Incompatible return value + # type (got "ndarray", expected "ExtensionArray") [return-value] + return result # type: ignore[return-value] def iget(self, i: int) -> SingleBlockManager: """ @@ -645,7 +668,14 @@ def iget_values(self, i: int) -> ArrayLike: """ Return the data for column i as the values (ndarray or ExtensionArray). """ - return self.arrays[i] + # pandas/core/internals/array_manager.py:648: error: Incompatible return value + # type (got "Union[ndarray, ExtensionArray]", expected "ExtensionArray") + # [return-value] + + # pandas/core/internals/array_manager.py:648: error: Incompatible return value + # type (got "Union[ndarray, ExtensionArray]", expected "ndarray") + # [return-value] + return self.arrays[i] # type: ignore[return-value] def idelete(self, indexer): """ @@ -672,7 +702,10 @@ def iset(self, loc: Union[int, slice, np.ndarray], value): # value = np.asarray(value) # assert isinstance(value, np.ndarray) assert len(value) == len(self._axes[0]) - self.arrays[loc] = value + # pandas/core/internals/array_manager.py:675: error: Invalid index type + # "Union[int, slice, ndarray]" for "List[Union[ndarray, ExtensionArray]]"; + # expected type "int" [index] + self.arrays[loc] = value # type: ignore[index] return # TODO @@ -825,7 +858,12 @@ def _make_na_array(self, fill_value=None): fill_value = np.nan dtype, fill_value = infer_dtype_from_scalar(fill_value) - values = np.empty(self.shape_proper[0], dtype=dtype) + # pandas/core/internals/array_manager.py:828: error: Argument "dtype" to "empty" + # has incompatible type "Union[dtype[Any], ExtensionDtype]"; expected + # "Union[dtype[Any], None, type, _SupportsDType, str, Union[Tuple[Any, int], + # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, + # Any]]]" [arg-type] + values = np.empty(self.shape_proper[0], dtype=dtype) # type: ignore[arg-type] values.fill(fill_value) return values diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index 9ddadc1bfe629..c1f6b6b75792f 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -997,7 +997,10 @@ def setitem(self, indexer, value): # length checking check_setitem_lengths(indexer, value, values) - exact_match = is_exact_shape_match(values, arr_value) + # pandas/core/internals/blocks.py:1000: error: Value of type variable + # "ArrayLike" of "is_exact_shape_match" cannot be "Union[Any, ndarray, + # ExtensionArray]" [type-var] + exact_match = is_exact_shape_match(values, arr_value) # type: ignore[type-var] if is_empty_indexer(indexer, arr_value): # GH#8669 empty indexers pass @@ -1016,7 +1019,10 @@ def setitem(self, indexer, value): if values.shape[-1] != 1: # shouldn't get here (at least until 2D EAs) raise NotImplementedError - values = values[:, 0] + # pandas/core/internals/blocks.py:1019: error: Invalid index type + # "Tuple[slice, int]" for "Union[ndarray, ExtensionArray]"; expected + # type "Union[int, slice, ndarray]" [index] + values = values[:, 0] # type: ignore[index] return self.make_block(Categorical(values, dtype=arr_value.dtype)) elif exact_match and is_ea_value: @@ -1087,7 +1093,10 @@ def putmask(self, mask, new) -> List[Block]: if transpose: new_values = new_values.T - putmask_without_repeat(new_values, mask, new) + # pandas/core/internals/blocks.py:1090: error: Argument 1 to + # "putmask_without_repeat" has incompatible type "Union[ndarray, + # ExtensionArray]"; expected "ndarray" [arg-type] + putmask_without_repeat(new_values, mask, new) # type: ignore[arg-type] return [self] elif not mask.any(): From 37001d5393d58b260a1336025a6702cc15e8bb73 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Thu, 21 Jan 2021 20:57:16 +0000 Subject: [PATCH 50/86] update setup.cfg --- setup.cfg | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/setup.cfg b/setup.cfg index a117c442a587a..2f513d73e7afe 100644 --- a/setup.cfg +++ b/setup.cfg @@ -137,6 +137,9 @@ check_untyped_defs=False [mypy-pandas.io.clipboard] check_untyped_defs=False +[mypy-pandas.tests.apply.test_series_apply] +ignore_errors=True + [mypy-pandas.tests.arithmetic.conftest] ignore_errors=True @@ -149,9 +152,6 @@ ignore_errors=True [mypy-pandas.tests.frame.methods.test_to_records] ignore_errors=True -[mypy-pandas.tests.frame.test_constructors] -ignore_errors=True - [mypy-pandas.tests.groupby.test_rank] ignore_errors=True @@ -170,8 +170,5 @@ ignore_errors=True [mypy-pandas.tests.reductions.test_reductions] ignore_errors=True -[mypy-pandas.tests.series.apply.test_series_apply] -ignore_errors=True - [mypy-pandas.tests.test_expressions] ignore_errors=True From 79471543073d120563d13a10781dba6c0e64081e Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Mon, 1 Feb 2021 18:28:46 +0000 Subject: [PATCH 51/86] update error messages --- pandas/core/array_algos/putmask.py | 7 ++++++- pandas/core/arrays/_mixins.py | 4 +++- pandas/core/construction.py | 12 +++++++++++- pandas/core/dtypes/cast.py | 10 +++++++++- pandas/core/dtypes/common.py | 15 ++++++++++++--- pandas/core/indexes/datetimelike.py | 8 ++++++-- pandas/core/internals/blocks.py | 8 +++++++- pandas/core/internals/concat.py | 11 +++++++++-- pandas/io/parsers/base_parser.py | 28 +++++++++++++++++++++++++--- pandas/io/pytables.py | 7 ++++++- 10 files changed, 94 insertions(+), 16 deletions(-) diff --git a/pandas/core/array_algos/putmask.py b/pandas/core/array_algos/putmask.py index ca83692ad7ca4..81d413f3b86d4 100644 --- a/pandas/core/array_algos/putmask.py +++ b/pandas/core/array_algos/putmask.py @@ -107,7 +107,12 @@ def putmask_smart(values: np.ndarray, mask: np.ndarray, new) -> np.ndarray: return _putmask_preserve(values, new, mask) dtype = find_common_type([values.dtype, new.dtype]) - values = values.astype(dtype) + # pandas/core/array_algos/putmask.py:110: error: Argument 1 to "astype" of + # "_ArrayOrScalarCommon" has incompatible type "Union[dtype[Any], ExtensionDtype]"; + # expected "Union[dtype[Any], None, type, _SupportsDType, str, Union[Tuple[Any, + # int], Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, + # Any]]]" [arg-type] + values = values.astype(dtype) # type: ignore[arg-type] return _putmask_preserve(values, new, mask) diff --git a/pandas/core/arrays/_mixins.py b/pandas/core/arrays/_mixins.py index 89ccbcf281857..f886b95c82d7e 100644 --- a/pandas/core/arrays/_mixins.py +++ b/pandas/core/arrays/_mixins.py @@ -406,7 +406,9 @@ def value_counts(self, dropna: bool = False): from pandas import Index, Series if dropna: - values = self[~self.isna()]._ndarray + # pandas/core/arrays/_mixins.py:409: error: Unsupported operand type for ~ + # ("ExtensionArray") [operator] + values = self[~self.isna()]._ndarray # type: ignore[operator] else: values = self._ndarray diff --git a/pandas/core/construction.py b/pandas/core/construction.py index 33fae27d3d079..68e249bb54c50 100644 --- a/pandas/core/construction.py +++ b/pandas/core/construction.py @@ -550,7 +550,17 @@ def _sanitize_ndim( raise ValueError("Data must be 1-dimensional") if is_object_dtype(dtype) and isinstance(dtype, ExtensionDtype): # i.e. PandasDtype("O") - result = com.asarray_tuplesafe(data, dtype=object) + + # pandas/core/construction.py:553: error: Incompatible types in assignment + # (expression has type "ndarray", variable has type "ExtensionArray") + # [assignment] + + # pandas/core/construction.py:553: error: Argument "dtype" to + # "asarray_tuplesafe" has incompatible type "Type[object]"; expected + # "Union[str, dtype[Any], None]" [arg-type] + result = com.asarray_tuplesafe( # type: ignore[assignment] + data, dtype=object # type: ignore[arg-type] + ) cls = dtype.construct_array_type() result = cls._from_sequence(result, dtype=dtype) else: diff --git a/pandas/core/dtypes/cast.py b/pandas/core/dtypes/cast.py index 9c25e43691792..b6da3fe73152b 100644 --- a/pandas/core/dtypes/cast.py +++ b/pandas/core/dtypes/cast.py @@ -946,7 +946,15 @@ def invalidate_string_dtypes(dtype_set: Set[DtypeObj]): Change string like dtypes to object for ``DataFrame.select_dtypes()``. """ - non_string_dtypes = dtype_set - {np.dtype("S").type, np.dtype(" has incompatible type + # "Type[generic]"; expected "Union[dtype[Any], ExtensionDtype, None]" [arg-type] + + # pandas/core/dtypes/cast.py:949: error: Argument 2 to has incompatible type + # "Type[generic]"; expected "Union[dtype[Any], ExtensionDtype, None]" [arg-type] + non_string_dtypes = dtype_set - { + np.dtype("S").type, # type: ignore[arg-type] + np.dtype(" DtypeObj: """ if isinstance(dtype, type) and issubclass(dtype, np.generic): # Type object from a dtype - return dtype + + # pandas/core/dtypes/common.py:1704: error: Incompatible return value type (got + # "Type[generic]", expected "Union[dtype[Any], ExtensionDtype]") [return-value] + return dtype # type: ignore[return-value] elif isinstance(dtype, (np.dtype, ExtensionDtype)): # dtype object try: @@ -1709,7 +1712,10 @@ def infer_dtype_from_object(dtype) -> DtypeObj: except TypeError: # Should still pass if we don't have a date-like pass - return dtype.type + # pandas/core/dtypes/common.py:1712: error: Incompatible return value type (got + # "Union[Type[generic], Type[Any]]", expected "Union[dtype[Any], + # ExtensionDtype]") [return-value] + return dtype.type # type: ignore[return-value] try: dtype = pandas_dtype(dtype) @@ -1723,7 +1729,10 @@ def infer_dtype_from_object(dtype) -> DtypeObj: # TODO(jreback) # should deprecate these if dtype in ["datetimetz", "datetime64tz"]: - return DatetimeTZDtype.type + # pandas/core/dtypes/common.py:1726: error: Incompatible return value type + # (got "Type[Any]", expected "Union[dtype[Any], ExtensionDtype]") + # [return-value] + return DatetimeTZDtype.type # type: ignore[return-value] elif dtype in ["period"]: raise NotImplementedError diff --git a/pandas/core/indexes/datetimelike.py b/pandas/core/indexes/datetimelike.py index c3627b16bd456..3a02b0fce04d7 100644 --- a/pandas/core/indexes/datetimelike.py +++ b/pandas/core/indexes/datetimelike.py @@ -795,7 +795,9 @@ def _fast_union(self: _T, other: _T, sort=None) -> _T: left, right = self, other left_start = left[0] loc = right.searchsorted(left_start, side="left") - right_chunk = right._values[:loc] + # pandas/core/indexes/datetimelike.py:798: error: Slice index must be an + # integer or None [misc] + right_chunk = right._values[:loc] # type: ignore[misc] dates = concat_compat((left._values, right_chunk)) # With sort being False, we can't infer that result.freq == self.freq # TODO: no tests rely on the _with_freq("infer"); needed? @@ -810,7 +812,9 @@ def _fast_union(self: _T, other: _T, sort=None) -> _T: # concatenate if left_end < right_end: loc = right.searchsorted(left_end, side="right") - right_chunk = right._values[loc:] + # pandas/core/indexes/datetimelike.py:813: error: Slice index must be an + # integer or None [misc] + right_chunk = right._values[loc:] # type: ignore[misc] dates = concat_compat([left._values, right_chunk]) # The can_fast_union check ensures that the result.freq # should match self.freq diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index 41739668c4759..4c6d6091263f6 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -1378,7 +1378,13 @@ def where(self, other, cond, errors="raise", axis: int = 0) -> List[Block]: return self._maybe_downcast(blocks, "infer") # convert datetime to datetime64, timedelta to timedelta64 - other = convert_scalar_for_putitemlike(other, values.dtype) + + # pandas/core/internals/blocks.py:1381: error: Argument 2 to + # "convert_scalar_for_putitemlike" has incompatible type "Union[dtype[Any], + # ExtensionDtype]"; expected "dtype[Any]" [arg-type] + other = convert_scalar_for_putitemlike( + other, values.dtype # type: ignore[arg-type] + ) # By the time we get here, we should have all Series/Index # args extracted to ndarray diff --git a/pandas/core/internals/concat.py b/pandas/core/internals/concat.py index 94a135794f5b9..3144925a163bc 100644 --- a/pandas/core/internals/concat.py +++ b/pandas/core/internals/concat.py @@ -268,7 +268,9 @@ def get_reindexed_values(self, empty_dtype: DtypeObj, upcasted_na) -> ArrayLike: if self.is_na: blk_dtype = getattr(self.block, "dtype", None) - if blk_dtype == np.dtype(object): + # pandas/core/internals/concat.py:271: error: Value of type variable + # "_DTypeScalar" of "dtype" cannot be "object" [type-var] + if blk_dtype == np.dtype(object): # type: ignore[type-var] # we want to avoid filling with np.nan if we are # using None; we already know that we are all # nulls @@ -282,7 +284,12 @@ def get_reindexed_values(self, empty_dtype: DtypeObj, upcasted_na) -> ArrayLike: if self.block is None: # TODO(EA2D): special case unneeded with 2D EAs i8values = np.full(self.shape[1], fill_value.value) - return DatetimeArray(i8values, dtype=empty_dtype) + # pandas/core/internals/concat.py:285: error: Incompatible + # return value type (got "DatetimeArray", expected "ndarray") + # [return-value] + return DatetimeArray( # type: ignore[return-value] + i8values, dtype=empty_dtype + ) elif is_categorical_dtype(blk_dtype): pass elif is_extension_array_dtype(blk_dtype): diff --git a/pandas/io/parsers/base_parser.py b/pandas/io/parsers/base_parser.py index 0d23addbb5f21..154dfd5c57a63 100644 --- a/pandas/io/parsers/base_parser.py +++ b/pandas/io/parsers/base_parser.py @@ -635,7 +635,10 @@ def _infer_types(self, values, na_values, try_num_bool=True): na_count = 0 if issubclass(values.dtype.type, (np.number, np.bool_)): mask = algorithms.isin(values, list(na_values)) - na_count = mask.sum() + # pandas/io/parsers/base_parser.py:638: error: Incompatible types in + # assignment (expression has type "number[Any]", variable has type "int") + # [assignment] + na_count = mask.sum() # type: ignore[assignment] if na_count > 0: if is_integer_dtype(values): values = values.astype(np.float64) @@ -693,7 +696,11 @@ def _cast_types(self, values, cast_type, column): # TODO: this is for consistency with # c-parser which parses all categories # as strings - values = astype_nansafe(values, str) + + # pandas/io/parsers/base_parser.py:696: error: Argument 2 to + # "astype_nansafe" has incompatible type "Type[str]"; expected + # "Union[dtype[Any], ExtensionDtype]" [arg-type] + values = astype_nansafe(values, str) # type: ignore[arg-type] cats = Index(values).unique().dropna() values = Categorical._from_inferred_categories( @@ -889,7 +896,22 @@ def _get_empty_meta( if not is_dict_like(dtype): # if dtype == None, default will be object. default_dtype = dtype or object - dtype = defaultdict(lambda: default_dtype) + # pandas/io/parsers/base_parser.py:892: error: Argument 1 to "defaultdict" + # has incompatible type "Callable[[], Union[ExtensionDtype, str, dtype[Any], + # Type[object], Dict[Hashable, Union[ExtensionDtype, Union[str, dtype[Any]], + # Type[str], Type[float], Type[int], Type[complex], Type[bool], + # Type[object]]]]]"; expected "Optional[Callable[[], Union[ExtensionDtype, + # str, dtype[Any], Type[object]]]]" [arg-type] + + # pandas/io/parsers/base_parser.py:892: error: Incompatible return value + # type (got "Union[ExtensionDtype, str, dtype[Any], Type[object], + # Dict[Hashable, Union[ExtensionDtype, Union[str, dtype[Any]], Type[str], + # Type[float], Type[int], Type[complex], Type[bool], Type[object]]]]", + # expected "Union[ExtensionDtype, str, dtype[Any], Type[object]]") + # [return-value] + dtype = defaultdict( + lambda: default_dtype # type: ignore[arg-type, return-value] + ) else: dtype = cast(dict, dtype) dtype = defaultdict( diff --git a/pandas/io/pytables.py b/pandas/io/pytables.py index c73ce622f491e..03d476fc57126 100644 --- a/pandas/io/pytables.py +++ b/pandas/io/pytables.py @@ -4959,7 +4959,12 @@ def _maybe_convert_for_string_atom( ) # itemsize is the maximum length of a string (along any dimension) - data_converted = _convert_string_array(data, encoding, errors).reshape(data.shape) + + # error: Argument 1 to "_convert_string_array" has incompatible type "Union[ndarray, + # ExtensionArray]"; expected "ndarray" + data_converted = _convert_string_array( + data, encoding, errors # type: ignore[arg-type] + ).reshape(data.shape) itemsize = data_converted.itemsize # specified min_itemsize? From a309f4f4f7844b30377093542a3626a2c524c9f5 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Mon, 1 Feb 2021 18:41:47 +0000 Subject: [PATCH 52/86] update setup.cfg --- pandas/tests/io/parser/common/test_chunksize.py | 6 +++++- setup.cfg | 3 +++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/pandas/tests/io/parser/common/test_chunksize.py b/pandas/tests/io/parser/common/test_chunksize.py index 8c1475025b442..1a0fce9941e96 100644 --- a/pandas/tests/io/parser/common/test_chunksize.py +++ b/pandas/tests/io/parser/common/test_chunksize.py @@ -140,7 +140,11 @@ def test_read_chunksize_jagged_names(all_parsers): parser = all_parsers data = "\n".join(["0"] * 7 + [",".join(["0"] * 10)]) - expected = DataFrame([[0] + [np.nan] * 9] * 7 + [[0] * 10]) + # pandas/tests/io/parser/common/test_chunksize.py:143: error: List item 0 has + # incompatible type "float"; expected "int" [list-item] + expected = DataFrame( + [[0] + [np.nan] * 9] * 7 + [[0] * 10] # type: ignore[list-item] + ) with parser.read_csv(StringIO(data), names=range(10), chunksize=4) as reader: result = concat(reader) tm.assert_frame_equal(result, expected) diff --git a/setup.cfg b/setup.cfg index ff4ed326bcba5..ced711b85c761 100644 --- a/setup.cfg +++ b/setup.cfg @@ -138,6 +138,9 @@ check_untyped_defs=False [mypy-pandas.io.clipboard] check_untyped_defs=False +[mypy-pandas.io.formats.string] +ignore_errors=True + [mypy-pandas.tests.apply.test_series_apply] ignore_errors=True From 6df88fd01c499870c0330101cec07a088a529481 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Mon, 1 Feb 2021 18:45:16 +0000 Subject: [PATCH 53/86] update environment.yml --- environment.yml | 3 +-- requirements-dev.txt | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/environment.yml b/environment.yml index e2ab5a746b78d..cac705bff72ea 100644 --- a/environment.yml +++ b/environment.yml @@ -3,7 +3,7 @@ channels: - conda-forge dependencies: # required - - numpy>=1.16.5, <1.20 # gh-39513 + - numpy>=1.16.5 - python=3 - python-dateutil>=2.7.3 - pytz @@ -113,6 +113,5 @@ dependencies: - tabulate>=0.8.3 # DataFrame.to_markdown - natsort # DataFrame.sort_values - pip: - - numpy==1.20.0rc2 - git+https://github.com/pandas-dev/pydata-sphinx-theme.git@master - git+https://github.com/numpy/numpydoc diff --git a/requirements-dev.txt b/requirements-dev.txt index 07f8b2df1e398..9bae344adc520 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,7 +1,7 @@ # This file is auto-generated from environment.yml, do not modify. # See that file for comments about the need/usage of each dependency. -numpy>=1.16.5, <1.20 +numpy>=1.16.5 python-dateutil>=2.7.3 pytz asv @@ -76,6 +76,5 @@ cftime pyreadstat tabulate>=0.8.3 natsort -numpy==1.20.0rc2 git+https://github.com/pandas-dev/pydata-sphinx-theme.git@master git+https://github.com/numpy/numpydoc From 6abff155ea19de0867a9471202ca39ff11fd6f9c Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Tue, 2 Feb 2021 09:29:45 +0000 Subject: [PATCH 54/86] update error messages --- pandas/core/internals/blocks.py | 24 +++++++++++++++++++----- pandas/core/internals/concat.py | 4 +++- pandas/core/internals/managers.py | 6 +++++- 3 files changed, 27 insertions(+), 7 deletions(-) diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index cb05637b4fc51..23518172e913a 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -249,7 +249,9 @@ def get_values(self, dtype: Optional[DtypeObj] = None) -> np.ndarray: """ if is_object_dtype(dtype): return self.values.astype(object) - return self.values + # pandas/core/internals/blocks.py:252: error: Incompatible return value type + # (got "Union[ndarray, ExtensionArray]", expected "ndarray") [return-value] + return self.values # type: ignore[return-value] def get_block_values_for_json(self) -> np.ndarray: """ @@ -1535,7 +1537,10 @@ def _replace_coerce( nb = self.coerce_to_target_dtype(value) if nb is self and not inplace: nb = nb.copy() - putmask_inplace(nb.values, mask, value) + # pandas/core/internals/blocks.py:1538: error: Value of type variable + # "ArrayLike" of "putmask_inplace" cannot be "Union[ndarray, + # ExtensionArray]" [type-var] + putmask_inplace(nb.values, mask, value) # type: ignore[type-var] return [nb] else: regex = _should_use_regex(regex, to_replace) @@ -1883,7 +1888,11 @@ def where(self, other, cond, errors="raise", axis: int = 0) -> List[Block]: # The default `other` for Series / Frame is np.nan # we want to replace that with the correct NA value # for the type - other = self.dtype.na_value + + # pandas/core/internals/blocks.py:1886: error: Item "dtype[Any]" of + # "Union[dtype[Any], ExtensionDtype]" has no attribute "na_value" + # [union-attr] + other = self.dtype.na_value # type: ignore[union-attr] if is_sparse(self.values): # TODO(SparseArray.__setitem__): remove this if condition @@ -1969,7 +1978,10 @@ class NumericBlock(Block): is_numeric = True def _can_hold_element(self, element: Any) -> bool: - return can_hold_element(self.dtype, element) + # pandas/core/internals/blocks.py:1972: error: Argument 1 to "can_hold_element" + # has incompatible type "Union[dtype[Any], ExtensionDtype]"; expected + # "dtype[Any]" [arg-type] + return can_hold_element(self.dtype, element) # type: ignore[arg-type] @property def _can_hold_na(self): @@ -2036,7 +2048,9 @@ def get_values(self, dtype: Optional[DtypeObj] = None) -> np.ndarray: if is_object_dtype(dtype): # DTA/TDA constructor and astype can handle 2D return self._holder(self.values).astype(object) - return self.values + # pandas/core/internals/blocks.py:2039: error: Incompatible return value type + # (got "Union[ndarray, ExtensionArray]", expected "ndarray") [return-value] + return self.values # type: ignore[return-value] def internal_values(self): # Override to return DatetimeArray and TimedeltaArray diff --git a/pandas/core/internals/concat.py b/pandas/core/internals/concat.py index e0ed71dbfba9b..db83844e11fa9 100644 --- a/pandas/core/internals/concat.py +++ b/pandas/core/internals/concat.py @@ -416,7 +416,9 @@ def _dtype_to_na_value(dtype: DtypeObj, has_none_blocks: bool): Find the NA value to go with this dtype. """ if is_extension_array_dtype(dtype): - return dtype.na_value + # pandas/core/internals/concat.py:419: error: Item "dtype[Any]" of + # "Union[dtype[Any], ExtensionDtype]" has no attribute "na_value" [union-attr] + return dtype.na_value # type: ignore[union-attr] elif dtype.kind in ["m", "M"]: return dtype.type("NaT") elif dtype.kind in ["f", "c"]: diff --git a/pandas/core/internals/managers.py b/pandas/core/internals/managers.py index 4fcf9b42bba7a..0effc16b88aee 100644 --- a/pandas/core/internals/managers.py +++ b/pandas/core/internals/managers.py @@ -922,7 +922,11 @@ def _interleave( dtype=dtype, na_value=na_value ) else: - arr = blk.get_values(dtype) + # pandas/core/internals/managers.py:925: error: Argument 1 to + # "get_values" of "Block" has incompatible type "Union[ExtensionDtype, + # str, dtype[Any], Type[object], None]"; expected "Union[dtype[Any], + # ExtensionDtype, None]" [arg-type] + arr = blk.get_values(dtype) # type: ignore[arg-type] result[rl.indexer] = arr itemmask[rl.indexer] = 1 From 49319a84a11d9e8404fd89ee80a25b081699d054 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 13 Feb 2021 11:45:48 +0000 Subject: [PATCH 55/86] update for numpy 1.20.1 --- pandas/compat/numpy/__init__.py | 4 +--- pandas/compat/numpy/function.py | 4 +--- pandas/core/frame.py | 5 +---- pandas/util/_test_decorators.py | 4 +--- 4 files changed, 4 insertions(+), 13 deletions(-) diff --git a/pandas/compat/numpy/__init__.py b/pandas/compat/numpy/__init__.py index 675372cd38ea7..e551f05efa31b 100644 --- a/pandas/compat/numpy/__init__.py +++ b/pandas/compat/numpy/__init__.py @@ -6,9 +6,7 @@ import numpy as np # numpy versioning -# pandas\compat\numpy\__init__.py:9: error: Module has no attribute -# "__version__"; maybe "version"? [attr-defined] -_np_version = np.__version__ # type: ignore[attr-defined] +_np_version = np.__version__ _nlv = LooseVersion(_np_version) np_version_under1p17 = _nlv < LooseVersion("1.17") np_version_under1p18 = _nlv < LooseVersion("1.18") diff --git a/pandas/compat/numpy/function.py b/pandas/compat/numpy/function.py index 03498ab79d31f..c47c31fabeb70 100644 --- a/pandas/compat/numpy/function.py +++ b/pandas/compat/numpy/function.py @@ -18,9 +18,7 @@ from distutils.version import LooseVersion from typing import Any, Dict, Optional, Union -# pandas\compat\numpy\function.py:23: error: Module 'numpy' has no attribute -# '__version__'; maybe "version"? [attr-defined] -from numpy import __version__, ndarray # type: ignore[attr-defined] +from numpy import __version__, ndarray from pandas._libs.lib import is_bool, is_integer from pandas.errors import UnsupportedFunctionCall diff --git a/pandas/core/frame.py b/pandas/core/frame.py index b4db2b06fabb1..24691a0744f06 100644 --- a/pandas/core/frame.py +++ b/pandas/core/frame.py @@ -2216,10 +2216,7 @@ def to_records( msg = f"Invalid dtype {dtype_mapping} specified for {element} {name}" raise ValueError(msg) - # error: Module has no attribute "fromarrays" - return np.rec.fromarrays( # type: ignore[attr-defined] - arrays, dtype={"names": names, "formats": formats} - ) + return np.rec.fromarrays(arrays, dtype={"names": names, "formats": formats}) @classmethod def _from_arrays( diff --git a/pandas/util/_test_decorators.py b/pandas/util/_test_decorators.py index d7050c7c4e24a..239211d819783 100644 --- a/pandas/util/_test_decorators.py +++ b/pandas/util/_test_decorators.py @@ -201,9 +201,7 @@ def skip_if_np_lt(ver_str: str, *args, reason: Optional[str] = None): if reason is None: reason = f"NumPy {ver_str} or greater required" return pytest.mark.skipif( - # pandas\util\_test_decorators.py:193: error: Module has no attribute - # "__version__"; maybe "version"? [attr-defined] - np.__version__ < LooseVersion(ver_str), # type: ignore[attr-defined] + np.__version__ < LooseVersion(ver_str), *args, reason=reason, ) From 3a87d8cd60e17fbfe7f9b57065aea8c5f3326d5e Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 13 Feb 2021 12:43:09 +0000 Subject: [PATCH 56/86] update ignores --- pandas/core/algorithms.py | 13 +++++-- pandas/core/array_algos/putmask.py | 14 +++++-- pandas/core/arrays/categorical.py | 10 ++++- pandas/core/dtypes/cast.py | 12 +++++- pandas/core/dtypes/missing.py | 4 +- pandas/core/frame.py | 30 ++++++++++++--- pandas/core/groupby/generic.py | 5 ++- pandas/core/indexes/base.py | 11 +++++- pandas/core/indexes/multi.py | 8 +++- pandas/core/internals/array_manager.py | 5 ++- pandas/core/internals/blocks.py | 51 ++++++++++++++++++++++---- pandas/core/internals/concat.py | 31 ++++++++++++++-- pandas/core/internals/managers.py | 8 +++- 13 files changed, 167 insertions(+), 35 deletions(-) diff --git a/pandas/core/algorithms.py b/pandas/core/algorithms.py index fcec3ddcccd78..ee0ec61dba0dd 100644 --- a/pandas/core/algorithms.py +++ b/pandas/core/algorithms.py @@ -1782,8 +1782,13 @@ def _take_preprocess_indexer_and_fill_value( if dtype != arr.dtype and (out is None or out.dtype != dtype): # check if promotion is actually required based on indexer mask = indexer == -1 - needs_masking = mask.any() - mask_info = mask, needs_masking + # pandas/core/algorithms.py:1785: error: Item "bool" of "Union[Any, + # bool]" has no attribute "any" [union-attr] + needs_masking = mask.any() # type: ignore[union-attr] + # pandas/core/algorithms.py:1786: error: Incompatible types in + # assignment (expression has type "Tuple[Union[Any, bool], Any]", + # variable has type "Optional[Tuple[None, bool]]") [assignment] + mask_info = mask, needs_masking # type: ignore[assignment] if needs_masking: if out is not None and out.dtype != dtype: raise TypeError("Incompatible type for fill_value") @@ -1900,7 +1905,9 @@ def take_2d_multi( row_idx = ensure_int64(row_idx) col_idx = ensure_int64(col_idx) - indexer = row_idx, col_idx + # pandas/core/algorithms.py:1903: error: Incompatible types in assignment + # (expression has type "Tuple[Any, Any]", variable has type "ndarray") [assignment] + indexer = row_idx, col_idx # type: ignore[assignment] mask_info = None # check for promotion based on types only (do this first because diff --git a/pandas/core/array_algos/putmask.py b/pandas/core/array_algos/putmask.py index 75732f14f3c4e..88ae5a9dd2774 100644 --- a/pandas/core/array_algos/putmask.py +++ b/pandas/core/array_algos/putmask.py @@ -185,10 +185,18 @@ def extract_bool_array(mask: ArrayLike) -> np.ndarray: # We could have BooleanArray, Sparse[bool], ... # Except for BooleanArray, this is equivalent to just # np.asarray(mask, dtype=bool) - mask = mask.to_numpy(dtype=bool, na_value=False) - mask = np.asarray(mask, dtype=bool) - return mask + # pandas/core/array_algos/putmask.py:188: error: Incompatible types in + # assignment (expression has type "ndarray", variable has type "ExtensionArray") + # [assignment] + mask = mask.to_numpy(dtype=bool, na_value=False) # type: ignore[assignment] + + # pandas/core/array_algos/putmask.py:190: error: Incompatible types in assignment + # (expression has type "ndarray", variable has type "ExtensionArray") [assignment] + mask = np.asarray(mask, dtype=bool) # type: ignore[assignment] + # pandas/core/array_algos/putmask.py:191: error: Incompatible return value type (got + # "ExtensionArray", expected "ndarray") [return-value] + return mask # type: ignore[return-value] def setitem_datetimelike_compat(values: np.ndarray, num_set: int, other): diff --git a/pandas/core/arrays/categorical.py b/pandas/core/arrays/categorical.py index 43528aa5ad799..fdc9f8d50dcd8 100644 --- a/pandas/core/arrays/categorical.py +++ b/pandas/core/arrays/categorical.py @@ -485,7 +485,15 @@ def astype(self, dtype: Dtype, copy: bool = True) -> ArrayLike: # GH8628 (PERF): astype category codes instead of astyping array try: new_cats = np.asarray(self.categories) - new_cats = new_cats.astype(dtype=dtype, copy=copy) + # pandas/core/arrays/categorical.py:488: error: Argument "dtype" to + # "astype" of "_ArrayOrScalarCommon" has incompatible type + # "Union[ExtensionDtype, dtype[Any]]"; expected "Union[dtype[Any], None, + # type, _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, + # Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]]" + # [arg-type] + new_cats = new_cats.astype( + dtype=dtype, copy=copy # type: ignore[arg-type] + ) except ( TypeError, # downstream error msg for CategoricalIndex is misleading ValueError, diff --git a/pandas/core/dtypes/cast.py b/pandas/core/dtypes/cast.py index dfe5b4ebd3d15..d04643f75737b 100644 --- a/pandas/core/dtypes/cast.py +++ b/pandas/core/dtypes/cast.py @@ -496,7 +496,12 @@ def maybe_upcast_putmask(result: np.ndarray, mask: np.ndarray) -> np.ndarray: new_dtype = ensure_dtype_can_hold_na(result.dtype) if new_dtype != result.dtype: - result = result.astype(new_dtype, copy=True) + # pandas/core/dtypes/cast.py:499: error: Argument 1 to "astype" of + # "_ArrayOrScalarCommon" has incompatible type "Union[dtype[Any], + # ExtensionDtype]"; expected "Union[dtype[Any], None, type, _SupportsDType, + # str, Union[Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], + # List[Any], _DTypeDict, Tuple[Any, Any]]]" [arg-type] + result = result.astype(new_dtype, copy=True) # type: ignore[arg-type] np.place(result, mask, np.nan) @@ -550,7 +555,10 @@ def maybe_promote(dtype: np.dtype, fill_value=np.nan): kinds = ["i", "u", "f", "c", "m", "M"] if is_valid_na_for_dtype(fill_value, dtype) and dtype.kind in kinds: - dtype = ensure_dtype_can_hold_na(dtype) + # pandas/core/dtypes/cast.py:553: error: Incompatible types in assignment + # (expression has type "Union[dtype[Any], ExtensionDtype]", variable has type + # "dtype[Any]") [assignment] + dtype = ensure_dtype_can_hold_na(dtype) # type: ignore[assignment] fv = na_value_for_dtype(dtype) return dtype, fv diff --git a/pandas/core/dtypes/missing.py b/pandas/core/dtypes/missing.py index 37b6f6489b916..8876a56a395d7 100644 --- a/pandas/core/dtypes/missing.py +++ b/pandas/core/dtypes/missing.py @@ -562,7 +562,9 @@ def na_value_for_dtype(dtype: DtypeObj, compat: bool = True): """ if is_extension_array_dtype(dtype): - return dtype.na_value + # pandas/core/dtypes/missing.py:565: error: Item "dtype[Any]" of + # "Union[dtype[Any], ExtensionDtype]" has no attribute "na_value" [union-attr] + return dtype.na_value # type: ignore[union-attr] elif needs_i8_conversion(dtype): return dtype.type("NaT", "ns") elif is_float_dtype(dtype): diff --git a/pandas/core/frame.py b/pandas/core/frame.py index bcabe648d4468..2e800633c2408 100644 --- a/pandas/core/frame.py +++ b/pandas/core/frame.py @@ -3390,7 +3390,10 @@ def _set_item_frame_value(self, key, value: DataFrame) -> None: value = value.reindex(cols, axis=1) # now align rows - value = _reindex_for_setitem(value, self.index) + + # pandas/core/frame.py:3393: error: Incompatible types in assignment (expression + # has type "ExtensionArray", variable has type "DataFrame") [assignment] + value = _reindex_for_setitem(value, self.index) # type: ignore[assignment] self._set_item_mgr(key, value) def _iset_item_mgr(self, loc: int, value) -> None: @@ -3899,9 +3902,17 @@ def check_int_infer_dtype(dtypes): # see https://github.com/numpy/numpy/issues/9464 if (isinstance(dtype, str) and dtype == "int") or (dtype is int): converted_dtypes.append(np.int32) - converted_dtypes.append(np.int64) + # pandas/core/frame.py:3902: error: Argument 1 to "append" of "list" + # has incompatible type "Type[signedinteger[Any]]"; expected + # "Type[signedinteger[Any]]" [arg-type] + converted_dtypes.append(np.int64) # type: ignore[arg-type] else: - converted_dtypes.append(infer_dtype_from_object(dtype)) + # pandas/core/frame.py:3904: error: Argument 1 to "append" of "list" + # has incompatible type "Union[dtype[Any], ExtensionDtype]"; + # expected "Type[signedinteger[Any]]" [arg-type] + converted_dtypes.append( + infer_dtype_from_object(dtype) # type: ignore[arg-type] + ) return frozenset(converted_dtypes) include = check_int_infer_dtype(include) @@ -4261,8 +4272,10 @@ def _reindex_multi(self, axes, copy: bool, fill_value) -> DataFrame: if row_indexer is not None and col_indexer is not None: indexer = row_indexer, col_indexer + # pandas/core/frame.py:4265: error: Argument 2 to "take_2d_multi" has + # incompatible type "Tuple[Any, Any]"; expected "ndarray" [arg-type] new_values = algorithms.take_2d_multi( - self.values, indexer, fill_value=fill_value + self.values, indexer, fill_value=fill_value # type: ignore[arg-type] ) return self._constructor(new_values, index=new_index, columns=new_columns) else: @@ -4947,10 +4960,15 @@ def set_index( arrays.append(col) # type:ignore[arg-type] names.append(col.name) elif isinstance(col, (list, np.ndarray)): - arrays.append(col) + # pandas/core/frame.py:4950: error: Argument 1 to "append" of "list" has + # incompatible type "Union[List[Any], ndarray]"; expected "Index" + # [arg-type] + arrays.append(col) # type: ignore[arg-type] names.append(None) elif isinstance(col, abc.Iterator): - arrays.append(list(col)) + # pandas/core/frame.py:4953: error: Argument 1 to "append" of "list" has + # incompatible type "List[Any]"; expected "Index" [arg-type] + arrays.append(list(col)) # type: ignore[arg-type] names.append(None) # from here, col can only be a column label else: diff --git a/pandas/core/groupby/generic.py b/pandas/core/groupby/generic.py index f1aba430b72d0..bfaab9b208959 100644 --- a/pandas/core/groupby/generic.py +++ b/pandas/core/groupby/generic.py @@ -1137,7 +1137,10 @@ def py_fallback(bvalues: ArrayLike) -> ArrayLike: # We've split an object block! Everything we've assumed # about a single block input returning a single block output # is a lie. See eg GH-39329 - return mgr.as_array() + + # pandas/core/groupby/generic.py:1140: error: Incompatible return value + # type (got "ndarray", expected "ExtensionArray") [return-value] + return mgr.as_array() # type: ignore[return-value] else: result = mgr.blocks[0].values return result diff --git a/pandas/core/indexes/base.py b/pandas/core/indexes/base.py index d0a8fdba26491..8a8698aed5245 100644 --- a/pandas/core/indexes/base.py +++ b/pandas/core/indexes/base.py @@ -4592,7 +4592,9 @@ def putmask(self, mask, value): numpy.ndarray.putmask : Changes elements of an array based on conditional and input values. """ - mask, noop = validate_putmask(self._values, mask) + # pandas/core/indexes/base.py:4595: error: Value of type variable "ArrayLike" of + # "validate_putmask" cannot be "Union[ExtensionArray, ndarray]" [type-var] + mask, noop = validate_putmask(self._values, mask) # type: ignore[type-var] if noop: return self.copy() @@ -4608,7 +4610,12 @@ def putmask(self, mask, value): return self.astype(dtype).putmask(mask, value) values = self._values.copy() - converted = setitem_datetimelike_compat(values, mask.sum(), converted) + # pandas/core/indexes/base.py:4611: error: Argument 1 to + # "setitem_datetimelike_compat" has incompatible type "Union[ExtensionArray, + # ndarray]"; expected "ndarray" [arg-type] + converted = setitem_datetimelike_compat( + values, mask.sum(), converted # type: ignore[arg-type] + ) np.putmask(values, mask, converted) return type(self)._simple_new(values, name=self.name) diff --git a/pandas/core/indexes/multi.py b/pandas/core/indexes/multi.py index a2e94d290d1fa..320a90f95346f 100644 --- a/pandas/core/indexes/multi.py +++ b/pandas/core/indexes/multi.py @@ -683,14 +683,18 @@ def _values(self) -> np.ndarray: vals, (ABCDatetimeIndex, ABCTimedeltaIndex) ): vals = vals.astype(object) - vals = np.array(vals, copy=False) + # pandas/core/indexes/multi.py:686: error: Incompatible types in assignment + # (expression has type "ndarray", variable has type "Index") [assignment] + vals = np.array(vals, copy=False) # type: ignore[assignment] values.append(vals) arr = lib.fast_zip(values) return arr @property - def values(self) -> np.ndarray: + # pandas/core/indexes/multi.py:693: error: Return type "ndarray" of "values" + # incompatible with return type "ArrayLike" in supertype "Index" [override] + def values(self) -> np.ndarray: # type: ignore[override] return self._values @property diff --git a/pandas/core/internals/array_manager.py b/pandas/core/internals/array_manager.py index 4c03d786b2742..e4ca5e40fb7f7 100644 --- a/pandas/core/internals/array_manager.py +++ b/pandas/core/internals/array_manager.py @@ -873,7 +873,10 @@ def _equal_values(self, other) -> bool: assuming shape and indexes have already been checked. """ for left, right in zip(self.arrays, other.arrays): - if not array_equals(left, right): + # pandas/core/internals/array_manager.py:876: error: Value of type variable + # "ArrayLike" of "array_equals" cannot be "Union[Any, ndarray, + # ExtensionArray]" [type-var] + if not array_equals(left, right): # type: ignore[type-var] return False else: return True diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index 54b3e97a6c260..6141e19364ffd 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -432,7 +432,9 @@ def fillna( inplace = validate_bool_kwarg(inplace, "inplace") mask = isna(self.values) - mask, noop = validate_putmask(self.values, mask) + # pandas/core/internals/blocks.py:435: error: Value of type variable "ArrayLike" + # of "validate_putmask" cannot be "Union[ndarray, ExtensionArray]" [type-var] + mask, noop = validate_putmask(self.values, mask) # type: ignore[type-var] if limit is not None: limit = libalgos.validate_limit(None, limit=limit) @@ -882,7 +884,12 @@ def _replace_list( # in order to avoid repeating the same computations mask = ~isna(self.values) masks = [ - compare_or_regex_search(self.values, s[0], regex=regex, mask=mask) + # pandas/core/internals/blocks.py:885: error: Value of type variable + # "ArrayLike" of "compare_or_regex_search" cannot be "Union[ndarray, + # ExtensionArray]" [type-var] + compare_or_regex_search( # type: ignore[type-var] + self.values, s[0], regex=regex, mask=mask + ) for s in pairs ] else: @@ -896,7 +903,10 @@ def _replace_list( for s in pairs ] - masks = [extract_bool_array(x) for x in masks] + # pandas/core/internals/blocks.py:899: error: Value of type variable "ArrayLike" + # of "extract_bool_array" cannot be "Union[ndarray, ExtensionArray, bool]" + # [type-var] + masks = [extract_bool_array(x) for x in masks] # type: ignore[type-var] rb = [self if inplace else self.copy()] for i, (src, dest) in enumerate(pairs): @@ -1025,7 +1035,12 @@ def setitem(self, indexer, value): values[indexer] = value.to_numpy(values.dtype).reshape(-1, 1) else: - value = setitem_datetimelike_compat(values, len(values[indexer]), value) + # pandas/core/internals/blocks.py:1028: error: Argument 1 to + # "setitem_datetimelike_compat" has incompatible type "Union[ndarray, + # ExtensionArray]"; expected "ndarray" [arg-type] + value = setitem_datetimelike_compat( + values, len(values[indexer]), value # type: ignore[arg-type] + ) values[indexer] = value if transpose: @@ -1050,7 +1065,10 @@ def putmask(self, mask, new) -> List[Block]: List[Block] """ transpose = self.ndim == 2 - mask, noop = validate_putmask(self.values.T, mask) + # pandas/core/internals/blocks.py:1053: error: Value of type variable + # "ArrayLike" of "validate_putmask" cannot be "Union[ndarray, ExtensionArray]" + # [type-var] + mask, noop = validate_putmask(self.values.T, mask) # type: ignore[type-var] assert not isinstance(new, (ABCIndex, ABCSeries, ABCDataFrame)) new_values = self.values # delay copy if possible. @@ -1351,7 +1369,10 @@ def where(self, other, cond, errors="raise", axis: int = 0) -> List[Block]: if transpose: values = values.T - icond, noop = validate_putmask(values, ~cond) + # pandas/core/internals/blocks.py:1354: error: Value of type variable + # "ArrayLike" of "validate_putmask" cannot be "Union[ndarray, ExtensionArray]" + # [type-var] + icond, noop = validate_putmask(values, ~cond) # type: ignore[type-var] if is_valid_na_for_dtype(other, self.dtype) and not self.is_object: other = self.fill_value @@ -1369,7 +1390,16 @@ def where(self, other, cond, errors="raise", axis: int = 0) -> List[Block]: blocks = block.where(orig_other, cond, errors=errors, axis=axis) return self._maybe_downcast(blocks, "infer") - alt = setitem_datetimelike_compat(values, icond.sum(), other) + # pandas/core/internals/blocks.py:1372: error: Argument 1 to + # "setitem_datetimelike_compat" has incompatible type "Union[ndarray, + # ExtensionArray]"; expected "ndarray" [arg-type] + + # pandas/core/internals/blocks.py:1372: error: Argument 2 to + # "setitem_datetimelike_compat" has incompatible type "number[Any]"; + # expected "int" [arg-type] + alt = setitem_datetimelike_compat( + values, icond.sum(), other # type: ignore[arg-type] + ) if alt is not other: result = values.copy() np.putmask(result, icond, alt) @@ -1460,7 +1490,12 @@ def quantile( values = self.values mask = np.asarray(isna(values)) - result = quantile_with_mask(values, mask, fill_value, qs, interpolation, axis) + # pandas/core/internals/blocks.py:1463: error: Argument 1 to + # "quantile_with_mask" has incompatible type "Union[ndarray, ExtensionArray]"; + # expected "ndarray" [arg-type] + result = quantile_with_mask( + values, mask, fill_value, qs, interpolation, axis # type: ignore[arg-type] + ) return make_block(result, placement=self.mgr_locs, ndim=2) diff --git a/pandas/core/internals/concat.py b/pandas/core/internals/concat.py index 609048a50aefb..3005e8c47f823 100644 --- a/pandas/core/internals/concat.py +++ b/pandas/core/internals/concat.py @@ -300,7 +300,12 @@ def get_reindexed_values(self, empty_dtype: DtypeObj, upcasted_na) -> ArrayLike: ): # TODO(EA2D): special case unneeded with 2D EAs i8values = np.full(self.shape[1], fill_value.value) - return DatetimeArray(i8values, dtype=empty_dtype) + # pandas/core/internals/concat.py:303: error: Incompatible return + # value type (got "DatetimeArray", expected "ndarray") + # [return-value] + return DatetimeArray( # type: ignore[return-value] + i8values, dtype=empty_dtype + ) elif is_categorical_dtype(blk_dtype): pass elif is_extension_array_dtype(blk_dtype): @@ -320,7 +325,15 @@ def get_reindexed_values(self, empty_dtype: DtypeObj, upcasted_na) -> ArrayLike: else: # NB: we should never get here with empty_dtype integer or bool; # if we did, the missing_arr.fill would cast to gibberish - missing_arr = np.empty(self.shape, dtype=empty_dtype) + + # pandas/core/internals/concat.py:323: error: Argument "dtype" to + # "empty" has incompatible type "Union[dtype[Any], ExtensionDtype]"; + # expected "Union[dtype[Any], None, type, _SupportsDType, str, + # Union[Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], + # List[Any], _DTypeDict, Tuple[Any, Any]]]" [arg-type] + missing_arr = np.empty( + self.shape, dtype=empty_dtype # type: ignore[arg-type] + ) missing_arr.fill(fill_value) return missing_arr @@ -386,14 +399,24 @@ def _concatenate_join_units( elif any(isinstance(t, ExtensionArray) for t in to_concat): # concatting with at least one EA means we are concatting a single column # the non-EA values are 2D arrays with shape (1, n) - to_concat = [t if isinstance(t, ExtensionArray) else t[0, :] for t in to_concat] + + # pandas/core/internals/concat.py:389: error: Invalid index type "Tuple[int, + # slice]" for "ExtensionArray"; expected type "Union[int, slice, ndarray]" + # [index] + to_concat = [ + t if isinstance(t, ExtensionArray) else t[0, :] # type: ignore[index] + for t in to_concat + ] concat_values = concat_compat(to_concat, axis=0, ea_compat_axis=True) if not is_extension_array_dtype(concat_values.dtype): # if the result of concat is not an EA but an ndarray, reshape to # 2D to put it a non-EA Block # special case DatetimeArray/TimedeltaArray, which *is* an EA, but # is put in a consolidated 2D block - concat_values = np.atleast_2d(concat_values) + + # pandas/core/internals/concat.py:396: error: No overload variant of + # "atleast_2d" matches argument type "ExtensionArray" [call-overload] + concat_values = np.atleast_2d(concat_values) # type: ignore[call-overload] else: concat_values = concat_compat(to_concat, axis=concat_axis) diff --git a/pandas/core/internals/managers.py b/pandas/core/internals/managers.py index cb4e7146ceb6f..32ab84b3bba45 100644 --- a/pandas/core/internals/managers.py +++ b/pandas/core/internals/managers.py @@ -1807,7 +1807,13 @@ def _multi_blockify(tuples, dtype: Optional[Dtype] = None): new_blocks = [] for dtype, tup_block in grouper: - values, placement = _stack_arrays(list(tup_block), dtype) + # pandas/core/internals/managers.py:1810: error: Argument 2 to "_stack_arrays" + # has incompatible type "Union[ExtensionDtype, str, dtype[Any], Type[str], + # Type[float], Type[int], Type[complex], Type[bool], Type[object], None]"; + # expected "dtype[Any]" [arg-type] + values, placement = _stack_arrays( + list(tup_block), dtype # type: ignore[arg-type] + ) block = make_block(values, placement=placement, ndim=2) new_blocks.append(block) From ed5b61ace49764b5da750d83593c040519abece5 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Tue, 16 Feb 2021 10:59:10 +0000 Subject: [PATCH 57/86] update ignores --- pandas/core/dtypes/cast.py | 4 +++- pandas/core/generic.py | 6 +++++- pandas/core/internals/array_manager.py | 5 ++++- pandas/core/internals/blocks.py | 5 ++++- pandas/core/internals/concat.py | 5 ++++- 5 files changed, 20 insertions(+), 5 deletions(-) diff --git a/pandas/core/dtypes/cast.py b/pandas/core/dtypes/cast.py index 802564d9a0707..70ef29c76f3d3 100644 --- a/pandas/core/dtypes/cast.py +++ b/pandas/core/dtypes/cast.py @@ -609,7 +609,9 @@ def maybe_promote(dtype: np.dtype, fill_value=np.nan): if fv.tz is None: return dtype, fv.asm8 - return np.dtype(object), fill_value + # pandas/core/dtypes/cast.py:612: error: Value of type variable "_DTypeScalar" + # of "dtype" cannot be "object" [type-var] + return np.dtype(object), fill_value # type: ignore[type-var] elif issubclass(dtype.type, np.timedelta64): if ( diff --git a/pandas/core/generic.py b/pandas/core/generic.py index a12bb4c21de5f..12f1b0fb89d6d 100644 --- a/pandas/core/generic.py +++ b/pandas/core/generic.py @@ -1928,7 +1928,11 @@ def __array_wrap__( def __array_ufunc__( self, ufunc: Callable, method: str, *inputs: Any, **kwargs: Any ): - return arraylike.array_ufunc(self, ufunc, method, *inputs, **kwargs) + # pandas/core/generic.py:1931: error: Argument 2 to "array_ufunc" has + # incompatible type "Callable[..., Any]"; expected "ufunc" [arg-type] + return arraylike.array_ufunc( + self, ufunc, method, *inputs, **kwargs # type: ignore[arg-type] + ) # ideally we would define this to avoid the getattr checks, but # is slower diff --git a/pandas/core/internals/array_manager.py b/pandas/core/internals/array_manager.py index 77d427e59be83..185bb25cc1037 100644 --- a/pandas/core/internals/array_manager.py +++ b/pandas/core/internals/array_manager.py @@ -711,7 +711,10 @@ def iset(self, loc: Union[int, slice, np.ndarray], value): else: assert isinstance(loc, np.ndarray) assert loc.dtype == "bool" - indices = np.nonzero(loc)[0] + # pandas/core/internals/array_manager.py:714: error: Incompatible types in + # assignment (expression has type "ndarray", variable has type "range") + # [assignment] + indices = np.nonzero(loc)[0] # type: ignore[assignment] assert value.ndim == 2 assert value.shape[0] == len(self._axes[0]) diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index e53c8df790eaf..adcdeb614ec33 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -964,7 +964,10 @@ def _replace_coerce( nb = self.coerce_to_target_dtype(value) if nb is self and not inplace: nb = nb.copy() - putmask_inplace(nb.values, mask, value) + # pandas/core/internals/blocks.py:967: error: Value of type variable + # "ArrayLike" of "putmask_inplace" cannot be "Union[ndarray, + # ExtensionArray]" [type-var] + putmask_inplace(nb.values, mask, value) # type: ignore[type-var] return [nb] else: regex = should_use_regex(regex, to_replace) diff --git a/pandas/core/internals/concat.py b/pandas/core/internals/concat.py index 3b7ac3e434ca3..01e2144d99ca7 100644 --- a/pandas/core/internals/concat.py +++ b/pandas/core/internals/concat.py @@ -309,7 +309,10 @@ def get_reindexed_values(self, empty_dtype: DtypeObj, upcasted_na) -> ArrayLike: elif is_extension_array_dtype(blk_dtype): pass elif is_extension_array_dtype(empty_dtype): - cls = empty_dtype.construct_array_type() + # pandas/core/internals/concat.py:312: error: Item "dtype[Any]" of + # "Union[dtype[Any], ExtensionDtype]" has no attribute + # "construct_array_type" [union-attr] + cls = empty_dtype.construct_array_type() # type: ignore[union-attr] missing_arr = cls._from_sequence([], dtype=empty_dtype) ncols, nrows = self.shape assert ncols == 1, ncols From 8736f6c61ad8bbf7dc7930d5e1a9a6ecfcaa35fc Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Tue, 16 Feb 2021 12:37:14 +0000 Subject: [PATCH 58/86] tidy comments (wip) --- pandas/core/algorithms.py | 143 ++++++++++++----------------- pandas/core/apply.py | 9 +- pandas/core/array_algos/putmask.py | 21 ++--- pandas/core/array_algos/replace.py | 5 +- pandas/core/arraylike.py | 3 +- pandas/core/arrays/_mixins.py | 19 ++-- pandas/core/arrays/_ranges.py | 14 ++- pandas/core/arrays/base.py | 34 +++---- pandas/core/arrays/boolean.py | 13 ++- pandas/core/arrays/categorical.py | 43 ++++----- pandas/core/arrays/datetimelike.py | 12 +-- pandas/core/arrays/floating.py | 12 +-- pandas/core/arrays/integer.py | 4 +- pandas/core/arrays/interval.py | 8 +- pandas/core/arrays/masked.py | 13 +-- pandas/core/base.py | 29 +++--- pandas/core/common.py | 8 +- pandas/core/construction.py | 61 +++++------- pandas/core/describe.py | 24 ++--- pandas/core/frame.py | 98 +++++++++----------- pandas/core/generic.py | 23 ++--- pandas/core/missing.py | 19 ++-- pandas/core/nanops.py | 78 ++++++---------- pandas/core/series.py | 26 +++--- 24 files changed, 302 insertions(+), 417 deletions(-) diff --git a/pandas/core/algorithms.py b/pandas/core/algorithms.py index 8050357fbedb0..cfa98acf71f56 100644 --- a/pandas/core/algorithms.py +++ b/pandas/core/algorithms.py @@ -137,9 +137,9 @@ def _ensure_data( with catch_warnings(): simplefilter("ignore", np.ComplexWarning) values = ensure_float64(values) - # pandas/core/algorithms.py:134: error: Incompatible return value type (got - # "Tuple[ExtensionArray, dtype[floating[_64Bit]]]", expected "Tuple[ndarray, - # Union[dtype[Any], ExtensionDtype]]") [return-value] + # error: Incompatible return value type (got "Tuple[ExtensionArray, + # dtype[floating[_64Bit]]]", expected "Tuple[ndarray, Union[dtype[Any], + # ExtensionDtype]]") return values, np.dtype("float64") # type: ignore[return-value] except (TypeError, ValueError, OverflowError): @@ -156,9 +156,8 @@ def _ensure_data( elif is_timedelta64_dtype(values.dtype) or is_timedelta64_dtype(dtype): from pandas import TimedeltaIndex - # pandas/core/algorithms.py:158: error: Incompatible types in assignment - # (expression has type "TimedeltaArray", variable has type "ndarray") - # [assignment] + # error: Incompatible types in assignment (expression has type + # "TimedeltaArray", variable has type "ndarray") values = TimedeltaIndex(values)._data # type: ignore[assignment] else: # Datetime @@ -175,27 +174,23 @@ def _ensure_data( from pandas import DatetimeIndex - # pandas/core/algorithms.py:174: error: Incompatible types in assignment - # (expression has type "DatetimeArray", variable has type "ndarray") - # [assignment] + # Incompatible types in assignment (expression has type "DatetimeArray", + # variable has type "ndarray") values = DatetimeIndex(values)._data # type: ignore[assignment] dtype = values.dtype - # pandas/core/algorithms.py:176: error: Item "ndarray" of "Union[PeriodArray, - # Any, ndarray]" has no attribute "asi8" [union-attr] + # error: Item "ndarray" of "Union[PeriodArray, Any, ndarray]" has no attribute + # "asi8" return values.asi8, dtype # type: ignore[union-attr] elif is_categorical_dtype(values.dtype) and ( is_categorical_dtype(dtype) or dtype is None ): - # pandas/core/algorithms.py:179: error: Incompatible types in assignment - # (expression has type "Categorical", variable has type "ndarray") [assignment] + # error: Incompatible types in assignment (expression has type "Categorical", + # variable has type "ndarray") values = cast("Categorical", values) # type: ignore[assignment] - # pandas/core/algorithms.py:180: error: Incompatible types in assignment - # (expression has type "ndarray", variable has type "ExtensionArray") - # [assignment] - - # pandas/core/algorithms.py:180: error: Item "ndarray" of "Union[Any, ndarray]" - # has no attribute "codes" [union-attr] + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "ExtensionArray") + # error: Item "ndarray" of "Union[Any, ndarray]" has no attribute "codes" values = values.codes # type: ignore[assignment,union-attr] dtype = pandas_dtype("category") @@ -203,15 +198,15 @@ def _ensure_data( # until our algos support int* directly (not all do) values = ensure_int64(values) - # pandas/core/algorithms.py:187: error: Incompatible return value type (got - # "Tuple[ExtensionArray, Union[dtype[Any], ExtensionDtype]]", expected - # "Tuple[ndarray, Union[dtype[Any], ExtensionDtype]]") [return-value] + # error: Incompatible return value type (got "Tuple[ExtensionArray, + # Union[dtype[Any], ExtensionDtype]]", expected "Tuple[ndarray, + # Union[dtype[Any], ExtensionDtype]]") return values, dtype # type: ignore[return-value] # we have failed, return object - # pandas/core/algorithms.py:190: error: Incompatible types in assignment (expression - # has type "ndarray", variable has type "ExtensionArray") [assignment] + # error: Incompatible types in assignment (expression has type "ndarray", variable + # has type "ExtensionArray") values = np.asarray(values, dtype=object) # type: ignore[assignment] return ensure_object(values), np.dtype("object") @@ -237,8 +232,8 @@ def _reconstruct_data( return values if is_extension_array_dtype(dtype): - # pandas/core/algorithms.py:215: error: Item "dtype[Any]" of "Union[dtype[Any], - # ExtensionDtype]" has no attribute "construct_array_type" [union-attr] + # error: Item "dtype[Any]" of "Union[dtype[Any], ExtensionDtype]" has no + # attribute "construct_array_type" cls = dtype.construct_array_type() # type: ignore[union-attr] if isinstance(values, cls) and values.dtype == dtype: return values @@ -322,8 +317,8 @@ def _get_values_for_rank(values: ArrayLike): if is_categorical_dtype(values): values = cast("Categorical", values)._values_for_rank() - # pandas/core/algorithms.py:298: error: Incompatible types in assignment (expression - # has type "ndarray", variable has type "ExtensionArray") [assignment] + # error: Incompatible types in assignment (expression has type "ndarray", variable + # has type "ExtensionArray") values, _ = _ensure_data(values) # type: ignore[assignment] return values @@ -492,39 +487,29 @@ def isin(comps: AnyArrayLike, values: AnyArrayLike) -> np.ndarray: elif isinstance(values, ABCMultiIndex): # Avoid raising in extract_array - # pandas/core/algorithms.py:466: error: Incompatible types in assignment - # (expression has type "ndarray", variable has type "ExtensionArray") - # [assignment] - - # pandas/core/algorithms.py:466: error: Incompatible types in assignment - # (expression has type "ndarray", variable has type "Index") [assignment] - - # pandas/core/algorithms.py:466: error: Incompatible types in assignment - # (expression has type "ndarray", variable has type "Series") [assignment] + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "ExtensionArray") + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "Index") + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "Series") values = np.array(values) # type: ignore[assignment] else: - # pandas/core/algorithms.py:468: error: Incompatible types in assignment - # (expression has type "Union[Any, ExtensionArray]", variable has type "Index") - # [assignment] - - # pandas/core/algorithms.py:468: error: Incompatible types in assignment - # (expression has type "Union[Any, ExtensionArray]", variable has type "Series") - # [assignment] + # error: Incompatible types in assignment (expression has type "Union[Any, + # ExtensionArray]", variable has type "Index") + # error: Incompatible types in assignment (expression has type "Union[Any, + # ExtensionArray]", variable has type "Series") values = extract_array(values, extract_numpy=True) # type: ignore[assignment] comps = _ensure_arraylike(comps) - # pandas/core/algorithms.py:506: error: Incompatible types in assignment (expression - # has type "Union[Any, ExtensionArray]", variable has type "Index") [assignment] - - # pandas/core/algorithms.py:506: error: Incompatible types in assignment (expression - # has type "Union[Any, ExtensionArray]", variable has type "Series") [assignment] + # error: Incompatible types in assignment (expression has type "Union[Any, + # ExtensionArray]", variable has type "Index") + # error: Incompatible types in assignment (expression has type "Union[Any, + # ExtensionArray]", variable has type "Series") comps = extract_array(comps, extract_numpy=True) # type: ignore[assignment] if is_extension_array_dtype(comps.dtype): - # pandas/core/algorithms.py:508: error: Incompatible return value type (got - # "Series", expected "ndarray") [return-value] - - # pandas/core/algorithms.py:508: error: Item "ndarray" of "Union[Any, ndarray]" - # has no attribute "isin" [union-attr] + # error: Incompatible return value type (got "Series", expected "ndarray") + # error: Item "ndarray" of "Union[Any, ndarray]" has no attribute "isin" return comps.isin(values) # type: ignore[return-value,union-attr] elif needs_i8_conversion(comps.dtype): @@ -556,20 +541,16 @@ def f(c, v): f = np.in1d else: - # pandas/core/algorithms.py:505: error: List item 0 has incompatible type - # "Union[Any, dtype[Any], ExtensionDtype]"; expected "Union[dtype[Any], None, - # type, _SupportsDType, str, Tuple[Any, Union[int, Sequence[int]]], List[Any], - # _DTypeDict, Tuple[Any, Any]]" [list-item] - - # pandas/core/algorithms.py:505: error: List item 1 has incompatible type - # "Union[Any, ExtensionDtype]"; expected "Union[dtype[Any], None, type, - # _SupportsDType, str, Tuple[Any, Union[int, Sequence[int]]], List[Any], - # _DTypeDict, Tuple[Any, Any]]" [list-item] - - # pandas/core/algorithms.py:505: error: List item 1 has incompatible type - # "Union[dtype[Any], ExtensionDtype]"; expected "Union[dtype[Any], None, type, - # _SupportsDType, str, Tuple[Any, Union[int, Sequence[int]]], List[Any], - # _DTypeDict, Tuple[Any, Any]]" [list-item] + # error: List item 0 has incompatible type "Union[Any, dtype[Any], + # ExtensionDtype]"; expected "Union[dtype[Any], None, type, _SupportsDType, str, + # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, + # Any]]" + # error: List item 1 has incompatible type "Union[Any, ExtensionDtype]"; + # expected "Union[dtype[Any], None, type, _SupportsDType, str, Tuple[Any, + # Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]" + # error: List item 1 has incompatible type "Union[dtype[Any], ExtensionDtype]"; + # expected "Union[dtype[Any], None, type, _SupportsDType, str, Tuple[Any, + # Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]" common = np.find_common_type( [values.dtype, comps.dtype], [] # type: ignore[list-item] ) @@ -983,8 +964,8 @@ def duplicated(values: ArrayLike, keep: str = "first") -> np.ndarray: ------- duplicated : ndarray """ - # pandas/core/algorithms.py:917: error: Incompatible types in assignment (expression - # has type "ndarray", variable has type "ExtensionArray") [assignment] + # error: Incompatible types in assignment (expression has type "ndarray", variable + # has type "ExtensionArray") values, _ = _ensure_data(values) # type: ignore[assignment] ndtype = values.dtype.name f = getattr(htable, f"duplicated_{ndtype}") @@ -1782,12 +1763,11 @@ def _take_preprocess_indexer_and_fill_value( if dtype != arr.dtype and (out is None or out.dtype != dtype): # check if promotion is actually required based on indexer mask = indexer == -1 - # pandas/core/algorithms.py:1785: error: Item "bool" of "Union[Any, - # bool]" has no attribute "any" [union-attr] + # error: Item "bool" of "Union[Any, bool]" has no attribute "any" needs_masking = mask.any() # type: ignore[union-attr] - # pandas/core/algorithms.py:1786: error: Incompatible types in - # assignment (expression has type "Tuple[Union[Any, bool], Any]", - # variable has type "Optional[Tuple[None, bool]]") [assignment] + # error: Incompatible types in assignment (expression has type + # "Tuple[Union[Any, bool], Any]", variable has type + # "Optional[Tuple[None, bool]]") mask_info = mask, needs_masking # type: ignore[assignment] if needs_masking: if out is not None and out.dtype != dtype: @@ -1905,8 +1885,8 @@ def take_2d_multi( row_idx = ensure_int64(row_idx) col_idx = ensure_int64(col_idx) - # pandas/core/algorithms.py:1903: error: Incompatible types in assignment - # (expression has type "Tuple[Any, Any]", variable has type "ndarray") [assignment] + # error: Incompatible types in assignment (expression has type "Tuple[Any, Any]", + # variable has type "ndarray") indexer = row_idx, col_idx # type: ignore[assignment] mask_info = None @@ -2231,11 +2211,10 @@ def safe_sort( if not isinstance(values, (np.ndarray, ABCExtensionArray)): # don't convert to string types dtype, _ = infer_dtype_from_array(values) - # pandas/core/algorithms.py:2208: error: Argument "dtype" to "asarray" has - # incompatible type "Union[dtype[Any], ExtensionDtype]"; expected - # "Union[dtype[Any], None, type, _SupportsDType, str, Union[Tuple[Any, int], - # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, - # Any]]]" [arg-type] + # error: Argument "dtype" to "asarray" has incompatible type "Union[dtype[Any], + # ExtensionDtype]"; expected "Union[dtype[Any], None, type, _SupportsDType, str, + # Union[Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], List[Any], + # _DTypeDict, Tuple[Any, Any]]]" values = np.asarray(values, dtype=dtype) # type: ignore[arg-type] sorter = None diff --git a/pandas/core/apply.py b/pandas/core/apply.py index 0093edc9c459b..16380d7279b6d 100644 --- a/pandas/core/apply.py +++ b/pandas/core/apply.py @@ -843,11 +843,10 @@ def apply_standard(self) -> FrameOrSeriesUnion: with np.errstate(all="ignore"): if isinstance(f, np.ufunc): - # pandas/core/apply.py:622: error: Argument 1 to "__call__" of "ufunc" - # has incompatible type "Series"; expected "Union[Union[int, float, - # complex, str, bytes, generic], Sequence[Union[int, float, complex, - # str, bytes, generic]], Sequence[Sequence[Any]], _SupportsArray]" - # [arg-type] + # error: Argument 1 to "__call__" of "ufunc" has incompatible type + # "Series"; expected "Union[Union[int, float, complex, str, bytes, + # generic], Sequence[Union[int, float, complex, str, bytes, generic]], + # Sequence[Sequence[Any]], _SupportsArray]" return f(obj) # type: ignore[arg-type] # row-wise access diff --git a/pandas/core/array_algos/putmask.py b/pandas/core/array_algos/putmask.py index 88ae5a9dd2774..0a463ff6ce8c1 100644 --- a/pandas/core/array_algos/putmask.py +++ b/pandas/core/array_algos/putmask.py @@ -113,11 +113,10 @@ def putmask_smart(values: np.ndarray, mask: np.ndarray, new) -> np.ndarray: return _putmask_preserve(values, new, mask) dtype = find_common_type([values.dtype, new.dtype]) - # pandas/core/array_algos/putmask.py:110: error: Argument 1 to "astype" of - # "_ArrayOrScalarCommon" has incompatible type "Union[dtype[Any], ExtensionDtype]"; - # expected "Union[dtype[Any], None, type, _SupportsDType, str, Union[Tuple[Any, - # int], Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, - # Any]]]" [arg-type] + # error: Argument 1 to "astype" of "_ArrayOrScalarCommon" has incompatible type + # "Union[dtype[Any], ExtensionDtype]"; expected "Union[dtype[Any], None, type, + # _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], + # List[Any], _DTypeDict, Tuple[Any, Any]]]" values = values.astype(dtype) # type: ignore[arg-type] return _putmask_preserve(values, new, mask) @@ -186,16 +185,14 @@ def extract_bool_array(mask: ArrayLike) -> np.ndarray: # Except for BooleanArray, this is equivalent to just # np.asarray(mask, dtype=bool) - # pandas/core/array_algos/putmask.py:188: error: Incompatible types in - # assignment (expression has type "ndarray", variable has type "ExtensionArray") - # [assignment] + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "ExtensionArray") mask = mask.to_numpy(dtype=bool, na_value=False) # type: ignore[assignment] - # pandas/core/array_algos/putmask.py:190: error: Incompatible types in assignment - # (expression has type "ndarray", variable has type "ExtensionArray") [assignment] + # error: Incompatible types in assignment (expression has type "ndarray", variable + # has type "ExtensionArray") mask = np.asarray(mask, dtype=bool) # type: ignore[assignment] - # pandas/core/array_algos/putmask.py:191: error: Incompatible return value type (got - # "ExtensionArray", expected "ndarray") [return-value] + # error: Incompatible return value type (got "ExtensionArray", expected "ndarray") return mask # type: ignore[return-value] diff --git a/pandas/core/array_algos/replace.py b/pandas/core/array_algos/replace.py index 15fec393a20e5..4ed86b3d1f643 100644 --- a/pandas/core/array_algos/replace.py +++ b/pandas/core/array_algos/replace.py @@ -146,9 +146,8 @@ def re_replacer(s): f = np.vectorize(re_replacer, otypes=[values.dtype]) if mask is None: - # pandas\core\array_algos\replace.py:133: error: Invalid index type - # "slice" for "ExtensionArray"; expected type "Union[int, ndarray]" - # [index] + # error: Invalid index type "slice" for "ExtensionArray"; expected type + # "Union[int, ndarray]" values[:] = f(values) # type: ignore[index] else: values[mask] = f(values[mask]) diff --git a/pandas/core/arraylike.py b/pandas/core/arraylike.py index 327770aabbce4..7ea500347d42f 100644 --- a/pandas/core/arraylike.py +++ b/pandas/core/arraylike.py @@ -253,8 +253,7 @@ def array_ufunc(self, ufunc: np.ufunc, method: str, *inputs: Any, **kwargs: Any) # Determine if we should defer. - # pandas/core/arraylike.py:171: error: "Type[ndarray]" has no attribute - # "__array_ufunc__" [attr-defined] + # error: "Type[ndarray]" has no attribute "__array_ufunc__" no_defer = ( np.ndarray.__array_ufunc__, # type: ignore[attr-defined] cls.__array_ufunc__, diff --git a/pandas/core/arrays/_mixins.py b/pandas/core/arrays/_mixins.py index 2889ab189d4e0..d5ad8c3beda5d 100644 --- a/pandas/core/arrays/_mixins.py +++ b/pandas/core/arrays/_mixins.py @@ -205,9 +205,8 @@ def _concat_same_type( new_values = [x._ndarray for x in to_concat] new_values = np.concatenate(new_values, axis=axis) - # pandas\core\arrays\_mixins.py:187: error: Argument 1 to - # "_from_backing_data" of "NDArrayBackedExtensionArray" has - # incompatible type "List[ndarray]"; expected "ndarray" [arg-type] + # error: Argument 1 to "_from_backing_data" of "NDArrayBackedExtensionArray" has + # incompatible type "List[ndarray]"; expected "ndarray" return to_concat[0]._from_backing_data(new_values) # type: ignore[arg-type] @doc(ExtensionArray.searchsorted) @@ -249,13 +248,10 @@ def __getitem__( return self._box_func(result) return self._from_backing_data(result) - # pandas\core\arrays\_mixins.py:228: error: Value of type variable - # "AnyArrayLike" of "extract_array" cannot be "Union[int, slice, - # ndarray]" [type-var] - - # pandas\core\arrays\_mixins.py:228: error: Incompatible types in - # assignment (expression has type "ExtensionArray", variable has type - # "Union[int, slice, ndarray]") [assignment] + # error: Value of type variable "AnyArrayLike" of "extract_array" cannot be + # "Union[int, slice, ndarray]" + # error: Incompatible types in assignment (expression has type "ExtensionArray", + # variable has type "Union[int, slice, ndarray]") key = extract_array( # type: ignore[type-var,assignment] key, extract_numpy=True ) @@ -406,8 +402,7 @@ def value_counts(self, dropna: bool = True): from pandas import Index, Series if dropna: - # pandas/core/arrays/_mixins.py:409: error: Unsupported operand type for ~ - # ("ExtensionArray") [operator] + # error: Unsupported operand type for ~ ("ExtensionArray") values = self[~self.isna()]._ndarray # type: ignore[operator] else: values = self._ndarray diff --git a/pandas/core/arrays/_ranges.py b/pandas/core/arrays/_ranges.py index 8cc27ee041f40..34d5ea6cfb20d 100644 --- a/pandas/core/arrays/_ranges.py +++ b/pandas/core/arrays/_ranges.py @@ -161,8 +161,8 @@ def _generate_range_overflow_safe_signed( # Putting this into a DatetimeArray/TimedeltaArray # would incorrectly be interpreted as NaT raise OverflowError - # pandas/core/arrays/_ranges.py:164: error: Incompatible return value type - # (got "signedinteger[_64Bit]", expected "int") [return-value] + # error: Incompatible return value type (got "signedinteger[_64Bit]", + # expected "int") return result # type: ignore[return-value] except (FloatingPointError, OverflowError): # with endpoint negative and addend positive we risk @@ -178,16 +178,14 @@ def _generate_range_overflow_safe_signed( # exceed implementation bounds, but when passing the result to # np.arange will get a result slightly within the bounds - # pandas/core/arrays/_ranges.py:178: error: Incompatible types in assignment - # (expression has type "unsignedinteger[_64Bit]", variable has type - # "signedinteger[_64Bit]") [assignment] + # error: Incompatible types in assignment (expression has type + # "unsignedinteger[_64Bit]", variable has type "signedinteger[_64Bit]") result = np.uint64(endpoint) + np.uint64(addend) # type: ignore[assignment] i64max = np.uint64(np.iinfo(np.int64).max) assert result > i64max if result <= i64max + np.uint64(stride): - # pandas\core\arrays\_ranges.py:171: error: Incompatible return - # value type (got "unsignedinteger", expected "int") - # [return-value] + # error: Incompatible return value type (got "unsignedinteger", expected + # "int") return result # type: ignore[return-value] raise OutOfBoundsDatetime( diff --git a/pandas/core/arrays/base.py b/pandas/core/arrays/base.py index a78d076c26ba9..904c26ae237c3 100644 --- a/pandas/core/arrays/base.py +++ b/pandas/core/arrays/base.py @@ -369,16 +369,14 @@ def __contains__(self, item) -> bool: if not self._can_hold_na: return False elif item is self.dtype.na_value or isinstance(item, self.dtype.type): - # pandas/core/arrays/base.py:369: error: "ExtensionArray" has no - # attribute "any" [attr-defined] + # error: "ExtensionArray" has no attribute "any" return self.isna().any() # type: ignore[attr-defined] else: return False else: return (item == self).any() - # pandas/core/arrays/base.py:375: error: Signature of "__eq__" incompatible with - # supertype "object" [override] + # error: Signature of "__eq__" incompatible with supertype "object" def __eq__(self, other: Any) -> ArrayLike: # type: ignore[override] """ Return for `self == other` (element-wise equality). @@ -429,12 +427,11 @@ def to_numpy( ------- numpy.ndarray """ - # pandas/core/arrays/base.py:431: error: Argument "dtype" to "asarray" has - # incompatible type "Union[ExtensionDtype, str, dtype[Any], Type[str], - # Type[float], Type[int], Type[complex], Type[bool], Type[object], None]"; - # expected "Union[dtype[Any], None, type, _SupportsDType, str, Union[Tuple[Any, - # int], Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, - # Any]]]" [arg-type] + # error: Argument "dtype" to "asarray" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], Type[int], + # Type[complex], Type[bool], Type[object], None]"; expected "Union[dtype[Any], + # None, type, _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, Union[int, + # Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]]" result = np.asarray(self, dtype=dtype) # type: ignore[arg-type] if copy or na_value is not lib.no_default: result = result.copy() @@ -629,8 +626,7 @@ def argmin(self, skipna: bool = True) -> int: ExtensionArray.argmax """ validate_bool_kwarg(skipna, "skipna") - # pandas/core/arrays/base.py:632: error: "ExtensionArray" has no attribute "any" - # [attr-defined] + # error: "ExtensionArray" has no attribute "any" if not skipna and self.isna().any(): # type: ignore[attr-defined] raise NotImplementedError return nargminmax(self, "argmin") @@ -655,8 +651,7 @@ def argmax(self, skipna: bool = True) -> int: ExtensionArray.argmin """ validate_bool_kwarg(skipna, "skipna") - # pandas/core/arrays/base.py:656: error: "ExtensionArray" has no attribute "any" - # [attr-defined] + # error: "ExtensionArray" has no attribute "any" if not skipna and self.isna().any(): # type: ignore[attr-defined] raise NotImplementedError return nargminmax(self, "argmax") @@ -700,8 +695,7 @@ def fillna(self, value=None, method=None, limit=None): ) value = value[mask] - # pandas\core\arrays\base.py:620: error: "ExtensionArray" has no - # attribute "any" [attr-defined] + # error: "ExtensionArray" has no attribute "any" if mask.any(): # type: ignore[attr-defined] if method is not None: func = get_fill_func(method) @@ -723,8 +717,7 @@ def dropna(self): ------- valid : ExtensionArray """ - # pandas\core\arrays\base.py:641: error: Unsupported operand type for ~ - # ("ExtensionArray") [operator] + # error: Unsupported operand type for ~ ("ExtensionArray") return self[~self.isna()] # type: ignore[operator] def shift(self, periods: int = 1, fill_value: object = None) -> ExtensionArray: @@ -1144,9 +1137,8 @@ def view(self, dtype: Optional[Dtype] = None) -> ArrayLike: # giving a view with the same dtype as self. if dtype is not None: raise NotImplementedError(dtype) - # pandas\core\arrays\base.py:1075: error: Incompatible return value - # type (got "Union[ExtensionArray, Any]", expected "ndarray") - # [return-value] + # error: Incompatible return value type (got "Union[ExtensionArray, Any]", + # expected "ndarray") return self[:] # type: ignore[return-value] # ------------------------------------------------------------------------ diff --git a/pandas/core/arrays/boolean.py b/pandas/core/arrays/boolean.py index c358014986fed..59f6dab22a4b4 100644 --- a/pandas/core/arrays/boolean.py +++ b/pandas/core/arrays/boolean.py @@ -386,8 +386,8 @@ def astype(self, dtype, copy: bool = True) -> ArrayLike: dtype = pandas_dtype(dtype) if isinstance(dtype, ExtensionDtype): - # pandas/core/arrays/boolean.py:377: error: Incompatible return value type - # (got "ExtensionArray", expected "ndarray") [return-value] + # error: Incompatible return value type (got "ExtensionArray", expected + # "ndarray") return super().astype(dtype, copy) # type: ignore[return-value] if is_bool_dtype(dtype): @@ -395,8 +395,8 @@ def astype(self, dtype, copy: bool = True) -> ArrayLike: if self._hasna: raise ValueError("cannot convert float NaN to bool") else: - # pandas/core/arrays/boolean.py:384: error: Incompatible return value - # type (got "ndarray", expected "ExtensionArray") [return-value] + # error: Incompatible return value type (got "ndarray", expected + # "ExtensionArray") return self._data.astype(dtype, copy=copy) # type: ignore[return-value] # for integer, error if there are missing values @@ -602,9 +602,8 @@ def _logical_method(self, other, op): elif op.__name__ in {"xor", "rxor"}: result, mask = ops.kleene_xor(self._data, other, self._mask, mask) - # pandas\core\arrays\boolean.py:610: error: Argument 2 to - # "BooleanArray" has incompatible type "Optional[Any]"; expected - # "ndarray" [arg-type] + # error: Argument 2 to "BooleanArray" has incompatible type "Optional[Any]"; + # expected "ndarray" return BooleanArray(result, mask) # type: ignore[arg-type] def _cmp_method(self, other, op): diff --git a/pandas/core/arrays/categorical.py b/pandas/core/arrays/categorical.py index fdc9f8d50dcd8..6a9fc97a266b6 100644 --- a/pandas/core/arrays/categorical.py +++ b/pandas/core/arrays/categorical.py @@ -371,9 +371,8 @@ def __init__( values = sanitize_array(values, None, dtype=sanitize_dtype) else: - # pandas/core/arrays/categorical.py:372: error: Argument 1 to - # "sanitize_to_nanoseconds" has incompatible type "Union[ndarray, - # ExtensionArray]"; expected "ndarray" [arg-type] + # error: Argument 1 to "sanitize_to_nanoseconds" has incompatible type + # "Union[ndarray, ExtensionArray]"; expected "ndarray" values = sanitize_to_nanoseconds(values) # type: ignore[arg-type] if dtype.categories is None: @@ -400,8 +399,8 @@ def __init__( dtype = CategoricalDtype(categories, dtype.ordered) elif is_categorical_dtype(values.dtype): - # pandas/core/arrays/categorical.py:398: error: Item "ExtensionArray" of - # "Union[Any, ExtensionArray]" has no attribute "_codes" [union-attr] + # error: Item "ExtensionArray" of "Union[Any, ExtensionArray]" has no + # attribute "_codes" old_codes = extract_array(values)._codes # type: ignore[union-attr] codes = recode_for_categories( old_codes, values.dtype.categories, dtype.categories, copy=copy @@ -466,17 +465,16 @@ def astype(self, dtype: Dtype, copy: bool = True) -> ArrayLike: raise ValueError("Cannot convert float NaN to integer") elif len(self.codes) == 0 or len(self.categories) == 0: - # pandas/core/arrays/categorical.py:425: error: Incompatible types in - # assignment (expression has type "ndarray", variable has type - # "Categorical") [assignment] + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "Categorical") result = np.array( # type: ignore[assignment] self, - # pandas/core/arrays/categorical.py:425: error: Argument "dtype" to - # "array" has incompatible type "Union[ExtensionDtype, str, dtype[Any], - # Type[str], Type[float], Type[int], Type[complex], Type[bool], - # Type[object]]"; expected "Union[dtype[Any], None, type, - # _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, Union[int, - # Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]]" [arg-type] + # error: Argument "dtype" to "array" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], + # Type[int], Type[complex], Type[bool], Type[object]]"; expected + # "Union[dtype[Any], None, type, _SupportsDType, str, Union[Tuple[Any, + # int], Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict, + # Tuple[Any, Any]]]" dtype=dtype, # type: ignore[arg-type] copy=copy, ) @@ -485,12 +483,11 @@ def astype(self, dtype: Dtype, copy: bool = True) -> ArrayLike: # GH8628 (PERF): astype category codes instead of astyping array try: new_cats = np.asarray(self.categories) - # pandas/core/arrays/categorical.py:488: error: Argument "dtype" to - # "astype" of "_ArrayOrScalarCommon" has incompatible type - # "Union[ExtensionDtype, dtype[Any]]"; expected "Union[dtype[Any], None, - # type, _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, - # Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]]" - # [arg-type] + # error: Argument "dtype" to "astype" of "_ArrayOrScalarCommon" has + # incompatible type "Union[ExtensionDtype, dtype[Any]]"; expected + # "Union[dtype[Any], None, type, _SupportsDType, str, Union[Tuple[Any, + # int], Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict, + # Tuple[Any, Any]]]" new_cats = new_cats.astype( dtype=dtype, copy=copy # type: ignore[arg-type] ) @@ -503,8 +500,7 @@ def astype(self, dtype: Dtype, copy: bool = True) -> ArrayLike: result = take_nd(new_cats, libalgos.ensure_platform_int(self._codes)) - # pandas/core/arrays/categorical.py:441: error: Incompatible return value type - # (got "Categorical", expected "ndarray") [return-value] + # error: Incompatible return value type (got "Categorical", expected "ndarray") return result # type: ignore[return-value] @cache_readonly @@ -2591,8 +2587,7 @@ def _get_codes_for_values(values, categories: Index) -> np.ndarray: # Only hit here when we've already coerced to object dtypee. hash_klass, vals = get_data_algo(values) - # pandas/core/arrays/categorical.py:2579: error: Value of type variable "ArrayLike" - # of "get_data_algo" cannot be "Index" [type-var] + # error: Value of type variable "ArrayLike" of "get_data_algo" cannot be "Index" _, cats = get_data_algo(categories) # type: ignore[type-var] t = hash_klass(len(cats)) t.map_locations(cats) diff --git a/pandas/core/arrays/datetimelike.py b/pandas/core/arrays/datetimelike.py index 05bd4e0e7b48c..c6e9dc00fac89 100644 --- a/pandas/core/arrays/datetimelike.py +++ b/pandas/core/arrays/datetimelike.py @@ -762,9 +762,8 @@ def isin(self, values) -> np.ndarray: # ------------------------------------------------------------------ # Null Handling - # pandas/core/arrays/datetimelike.py:766: error: Return type "ndarray" of "isna" - # incompatible with return type "ArrayLike" in supertype "ExtensionArray" - # [override] + # error: Return type "ndarray" of "isna" incompatible with return type "ArrayLike" + # in supertype "ExtensionArray" def isna(self) -> np.ndarray: # type: ignore[override] return self._isnan @@ -780,8 +779,7 @@ def _hasnans(self) -> np.ndarray: """ return if I have any nans; enables various perf speedups """ - # pandas/core/arrays/datetimelike.py:781: error: Incompatible return value type - # (got "bool", expected "ndarray") [return-value] + # error: Incompatible return value type (got "bool", expected "ndarray") return bool(self._isnan.any()) # type: ignore[return-value] def _maybe_mask_results( @@ -1126,8 +1124,8 @@ def _addsub_object_array(self, other: np.ndarray, op): res_values = op(self.astype("O"), np.asarray(other)) result = array(res_values.ravel()) - # pandas/core/arrays/datetimelike.py:1122: error: Item "ExtensionArray" of - # "Union[Any, ExtensionArray]" has no attribute "reshape" [union-attr] + # error: Item "ExtensionArray" of "Union[Any, ExtensionArray]" has no attribute + # "reshape" result = extract_array(result, extract_numpy=True).reshape( # type: ignore[union-attr] # noqa self.shape ) diff --git a/pandas/core/arrays/floating.py b/pandas/core/arrays/floating.py index 215280c84c9c7..43c34a5548d41 100644 --- a/pandas/core/arrays/floating.py +++ b/pandas/core/arrays/floating.py @@ -285,8 +285,8 @@ def astype(self, dtype, copy: bool = True) -> ArrayLike: dtype = pandas_dtype(dtype) if isinstance(dtype, ExtensionDtype): - # pandas/core/arrays/floating.py:337: error: Incompatible return value type - # (got "ExtensionArray", expected "ndarray") [return-value] + # error: Incompatible return value type (got "ExtensionArray", expected + # "ndarray") return super().astype(dtype, copy=copy) # type: ignore[return-value] # coerce @@ -294,8 +294,8 @@ def astype(self, dtype, copy: bool = True) -> ArrayLike: # In astype, we consider dtype=float to also mean na_value=np.nan kwargs = {"na_value": np.nan} elif is_datetime64_dtype(dtype): - # pandas/core/arrays/floating.py:397: error: Dict entry 0 has incompatible - # type "str": "datetime64"; expected "str": "float" [dict-item] + # error: Dict entry 0 has incompatible type "str": "datetime64"; expected + # "str": "float" kwargs = {"na_value": np.datetime64("NaT")} # type: ignore[dict-item] else: kwargs = {} @@ -303,8 +303,8 @@ def astype(self, dtype, copy: bool = True) -> ArrayLike: # error: Argument 2 to "to_numpy" of "BaseMaskedArray" has incompatible # type "**Dict[str, float]"; expected "bool" data = self.to_numpy(dtype=dtype, **kwargs) # type: ignore[arg-type] - # pandas\core\arrays\floating.py:405: error: Incompatible return value - # type (got "ExtensionArray", expected "ndarray") [return-value] + # error: Incompatible return value type (got "ExtensionArray", expected + # "ndarray") return astype_nansafe(data, dtype, copy=False) # type: ignore[return-value] def _values_for_argsort(self) -> np.ndarray: diff --git a/pandas/core/arrays/integer.py b/pandas/core/arrays/integer.py index 31937ccc1274e..156ae8f673419 100644 --- a/pandas/core/arrays/integer.py +++ b/pandas/core/arrays/integer.py @@ -355,8 +355,8 @@ def astype(self, dtype, copy: bool = True) -> ArrayLike: dtype = pandas_dtype(dtype) if isinstance(dtype, ExtensionDtype): - # pandas/core/arrays/integer.py:406: error: Incompatible return value type - # (got "ExtensionArray", expected "ndarray") [return-value] + # error: Incompatible return value type (got "ExtensionArray", expected + # "ndarray") return super().astype(dtype, copy=copy) # type: ignore[return-value] # coerce diff --git a/pandas/core/arrays/interval.py b/pandas/core/arrays/interval.py index 1c7841057bb3f..e51b1e987bbcc 100644 --- a/pandas/core/arrays/interval.py +++ b/pandas/core/arrays/interval.py @@ -598,10 +598,10 @@ def __getitem__(self, key): if is_scalar(left) and isna(left): return self._fill_value return Interval(left, right, self.closed) - # pandas/core/arrays/interval.py:567: error: Argument 1 to "ndim" has - # incompatible type "Union[ndarray, ExtensionArray]"; expected "Union[Union[int, - # float, complex, str, bytes, generic], Sequence[Union[int, float, complex, str, - # bytes, generic]], Sequence[Sequence[Any]], _SupportsArray]" [arg-type] + # error: Argument 1 to "ndim" has incompatible type "Union[ndarray, + # ExtensionArray]"; expected "Union[Union[int, float, complex, str, bytes, + # generic], Sequence[Union[int, float, complex, str, bytes, generic]], + # Sequence[Sequence[Any]], _SupportsArray]" if np.ndim(left) > 1: # type: ignore[arg-type] # GH#30588 multi-dimensional indexer disallowed raise ValueError("multi-dimensional indexing not allowed") diff --git a/pandas/core/arrays/masked.py b/pandas/core/arrays/masked.py index a1b6ad4063f21..5ee729182499b 100644 --- a/pandas/core/arrays/masked.py +++ b/pandas/core/arrays/masked.py @@ -147,16 +147,9 @@ def __len__(self) -> int: def __invert__(self: BaseMaskedArrayT) -> BaseMaskedArrayT: return type(self)(~self._data, self._mask) - # pandas/core/arrays/masked.py:149: error: Argument 1 of "to_numpy" is incompatible - # with supertype "ExtensionArray"; supertype defines the argument type as - # "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], Type[int], - # Type[complex], Type[bool], Type[object], None]" [override] - - # pandas/core/arrays/masked.py:149: note: This violates the Liskov substitution - # principle - - # pandas/core/arrays/masked.py:149: note: See - # https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides + # error: Argument 1 of "to_numpy" is incompatible with supertype "ExtensionArray"; + # supertype defines the argument type as "Union[ExtensionDtype, str, dtype[Any], + # Type[str], Type[float], Type[int], Type[complex], Type[bool], Type[object], None]" def to_numpy( # type: ignore[override] self, dtype: Optional[NpDtype] = None, diff --git a/pandas/core/base.py b/pandas/core/base.py index a2ed09c67d046..33af5b615c50f 100644 --- a/pandas/core/base.py +++ b/pandas/core/base.py @@ -598,12 +598,11 @@ def to_numpy( f"to_numpy() got an unexpected keyword argument '{bad_keys}'" ) - # pandas/core/base.py:616: error: Argument "dtype" to "asarray" has incompatible - # type "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], - # Type[int], Type[complex], Type[bool], Type[object], None]"; expected - # "Union[dtype[Any], None, type, _SupportsDType, str, Union[Tuple[Any, int], - # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, - # Any]]]" [arg-type] + # error: Argument "dtype" to "asarray" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], Type[int], + # Type[complex], Type[bool], Type[object], None]"; expected "Union[dtype[Any], + # None, type, _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, Union[int, + # Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]]" result = np.asarray(self._values, dtype=dtype) # type: ignore[arg-type] # TODO(GH-24345): Avoid potential double copy if copy or na_value is not lib.no_default: @@ -717,15 +716,14 @@ def argmax(self, axis=None, skipna: bool = True, *args, **kwargs) -> int: skipna = nv.validate_argmax_with_skipna(skipna, args, kwargs) if isinstance(delegate, ExtensionArray): - # pandas/core/base.py:723: error: "ExtensionArray" has no attribute "any" - # [attr-defined] + # error: "ExtensionArray" has no attribute "any" if not skipna and delegate.isna().any(): # type: ignore[attr-defined] return -1 else: return delegate.argmax() else: - # pandas/core/base.py:728: error: Incompatible return value type (got - # "Union[int, ndarray]", expected "int") [return-value] + # error: Incompatible return value type (got "Union[int, ndarray]", expected + # "int") return nanops.nanargmax( # type: ignore[return-value] delegate, skipna=skipna ) @@ -781,15 +779,14 @@ def argmin(self, axis=None, skipna=True, *args, **kwargs) -> int: skipna = nv.validate_argmin_with_skipna(skipna, args, kwargs) if isinstance(delegate, ExtensionArray): - # pandas/core/base.py:781: error: "ExtensionArray" has no attribute "any" - # [attr-defined] + # error: "ExtensionArray" has no attribute "any" if not skipna and delegate.isna().any(): # type: ignore[attr-defined] return -1 else: return delegate.argmin() else: - # pandas/core/base.py:786: error: Incompatible return value type (got - # "Union[int, ndarray]", expected "int") [return-value] + # error: Incompatible return value type (got "Union[int, ndarray]", expected + # "int") return nanops.nanargmin( # type: ignore[return-value] delegate, skipna=skipna ) @@ -1317,6 +1314,6 @@ def drop_duplicates(self, keep="first"): return self[~duplicated] # type: ignore[index] def duplicated(self, keep="first"): - # pandas/core/base.py:1316: error: Value of type variable "ArrayLike" of - # "duplicated" cannot be "Union[ExtensionArray, ndarray]" [type-var] + # error: Value of type variable "ArrayLike" of "duplicated" cannot be + # "Union[ExtensionArray, ndarray]" return duplicated(self._values, keep=keep) # type: ignore[type-var] diff --git a/pandas/core/common.py b/pandas/core/common.py index 8cda7d17e71b5..42c8c0dcdeb28 100644 --- a/pandas/core/common.py +++ b/pandas/core/common.py @@ -211,12 +211,12 @@ def asarray_tuplesafe(values, dtype: Optional[NpDtype] = None) -> np.ndarray: if not (isinstance(values, (list, tuple)) or hasattr(values, "__array__")): values = list(values) elif isinstance(values, ABCIndex): - # pandas/core/common.py:203: error: Incompatible return value type (got - # "Union[ExtensionArray, ndarray]", expected "ndarray") [return-value] + # error: Incompatible return value type (got "Union[ExtensionArray, ndarray]", + # expected "ndarray") return values._values # type: ignore[return-value] - # pandas/core/common.py:205: error: Non-overlapping container check (element type: - # "Union[str, dtype[Any], None]", container item type: "type") [comparison-overlap] + # error: Non-overlapping container check (element type: "Union[str, dtype[Any], + # None]", container item type: "type") if isinstance(values, list) and dtype in [ # type: ignore[comparison-overlap] np.object_, object, diff --git a/pandas/core/construction.py b/pandas/core/construction.py index 5bc7e19ff623b..2d9b5aeca5a61 100644 --- a/pandas/core/construction.py +++ b/pandas/core/construction.py @@ -285,21 +285,14 @@ def array( # error: Value of type variable "AnyArrayLike" of "extract_array" cannot be # "Union[Sequence[object], ExtensionArray]" - # error: Value of type variable "AnyArrayLike" of "extract_array" cannot be # "Union[Sequence[object], Index]" - - # pandas\core\construction.py:295: error: Incompatible types in assignment - # (expression has type "ExtensionArray", variable has type - # "Union[Sequence[object], Index]") [assignment] - - # pandas\core\construction.py:295: error: Incompatible types in assignment - # (expression has type "ExtensionArray", variable has type - # "Union[Sequence[object], Series]") [assignment] - - # pandas\core\construction.py:295: error: Incompatible types in assignment - # (expression has type "ExtensionArray", variable has type - # "Union[Sequence[object], ndarray]") [assignment] + # error: Incompatible types in assignment (expression has type "ExtensionArray", + # variable has type "Union[Sequence[object], Index]") + # error: Incompatible types in assignment (expression has type "ExtensionArray", + # variable has type "Union[Sequence[object], Series]") + # error: Incompatible types in assignment (expression has type "ExtensionArray", + # variable has type "Union[Sequence[object], ndarray]") data = extract_array(data, extract_numpy=True) # type: ignore[type-var,assignment] # this returns None for not-found dtypes. @@ -551,27 +544,21 @@ def _sanitize_ndim( if is_object_dtype(dtype) and isinstance(dtype, ExtensionDtype): # i.e. PandasDtype("O") - # pandas/core/construction.py:553: error: Incompatible types in assignment - # (expression has type "ndarray", variable has type "ExtensionArray") - # [assignment] - - # pandas/core/construction.py:553: error: Argument "dtype" to - # "asarray_tuplesafe" has incompatible type "Type[object]"; expected - # "Union[str, dtype[Any], None]" [arg-type] + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "ExtensionArray") + # error: Argument "dtype" to "asarray_tuplesafe" has incompatible type + # "Type[object]"; expected "Union[str, dtype[Any], None]" result = com.asarray_tuplesafe( # type: ignore[assignment] data, dtype=object # type: ignore[arg-type] ) cls = dtype.construct_array_type() result = cls._from_sequence(result, dtype=dtype) else: - # pandas/core/construction.py:553: error: Incompatible types in assignment - # (expression has type "ndarray", variable has type "ExtensionArray") - # [assignment] - - # pandas/core/construction.py:553: error: Argument "dtype" to - # "asarray_tuplesafe" has incompatible type "Union[dtype[Any], - # ExtensionDtype, None]"; expected "Union[str, dtype[Any], None]" - # [arg-type] + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "ExtensionArray") + # error: Argument "dtype" to "asarray_tuplesafe" has incompatible type + # "Union[dtype[Any], ExtensionDtype, None]"; expected "Union[str, + # dtype[Any], None]" result = com.asarray_tuplesafe( # type: ignore[assignment] data, dtype=dtype # type: ignore[arg-type] ) @@ -593,11 +580,10 @@ def _sanitize_str_dtypes( # GH#19853: If data is a scalar, result has already the result if not lib.is_scalar(data): if not np.all(isna(data)): - # pandas/core/construction.py:572: error: Argument "dtype" to "array" - # has incompatible type "Union[dtype[Any], ExtensionDtype, None]"; - # expected "Union[dtype[Any], None, type, _SupportsDType, str, - # Union[Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], - # List[Any], _DTypeDict, Tuple[Any, Any]]]" [arg-type] + # error: Argument "dtype" to "array" has incompatible type + # "Union[dtype[Any], ExtensionDtype, None]"; expected "Union[dtype[Any], + # None, type, _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, + # Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]]" data = np.array(data, dtype=dtype, copy=False) # type: ignore[arg-type] result = np.array(data, dtype=object, copy=copy) return result @@ -656,11 +642,10 @@ def _try_cast(arr, dtype: Optional[DtypeObj], copy: bool, raise_cast_failure: bo if is_integer_dtype(dtype): # this will raise if we have e.g. floats - # pandas\core\construction.py:595: error: Argument 2 to - # "maybe_cast_to_integer_array" has incompatible type "Union[dtype, - # ExtensionDtype, None]"; expected "Union[ExtensionDtype, str, - # dtype, Type[str], Type[float], Type[int], Type[complex], - # Type[bool], Type[object]]" [arg-type] + # error: Argument 2 to "maybe_cast_to_integer_array" has incompatible type + # "Union[dtype, ExtensionDtype, None]"; expected "Union[ExtensionDtype, str, + # dtype, Type[str], Type[float], Type[int], Type[complex], Type[bool], + # Type[object]]" maybe_cast_to_integer_array(arr, dtype) # type: ignore[arg-type] subarr = arr else: diff --git a/pandas/core/describe.py b/pandas/core/describe.py index adfe4ccab7e5b..b7bc34d2a0dcf 100644 --- a/pandas/core/describe.py +++ b/pandas/core/describe.py @@ -174,8 +174,8 @@ def _select_data(self): # when some numerics are found, keep only numerics default_include = [np.number] if self.datetime_is_numeric: - # pandas/core/describe.py:177: error: Argument 1 to "append" of "list" - # has incompatible type "str"; expected "Type[number[Any]]" [arg-type] + # error: Argument 1 to "append" of "list" has incompatible type "str"; + # expected "Type[number[Any]]" default_include.append("datetime") # type: ignore[arg-type] data = self.obj.select_dtypes(include=default_include) if len(data.columns) == 0: @@ -216,9 +216,9 @@ def describe_numeric_1d(series: Series, percentiles: Sequence[float]) -> Series: """ from pandas import Series - # pandas/core/describe.py:217: error: Argument 1 to "format_percentiles" has - # incompatible type "Sequence[float]"; expected "Union[ndarray, List[Union[int, - # float]], List[float], List[Union[str, float]]]" [arg-type] + # error: Argument 1 to "format_percentiles" has incompatible type "Sequence[float]"; + # expected "Union[ndarray, List[Union[int, float]], List[float], List[Union[str, + # float]]]" formatted_percentiles = format_percentiles(percentiles) # type: ignore[arg-type] stat_index = ["count", "mean", "std", "min"] + formatted_percentiles + ["max"] @@ -323,9 +323,9 @@ def describe_timestamp_1d(data: Series, percentiles: Sequence[float]) -> Series: # GH-30164 from pandas import Series - # pandas/core/describe.py:321: error: Argument 1 to "format_percentiles" has - # incompatible type "Sequence[float]"; expected "Union[ndarray, List[Union[int, - # float]], List[float], List[Union[str, float]]]" [arg-type] + # error: Argument 1 to "format_percentiles" has incompatible type "Sequence[float]"; + # expected "Union[ndarray, List[Union[int, float]], List[float], List[Union[str, + # float]]]" formatted_percentiles = format_percentiles(percentiles) # type: ignore[arg-type] stat_index = ["count", "mean", "min"] + formatted_percentiles + ["max"] @@ -382,8 +382,8 @@ def refine_percentiles(percentiles: Optional[Sequence[float]]) -> Sequence[float The percentiles to include in the output. """ if percentiles is None: - # pandas/core/describe.py:377: error: Incompatible return value type (got - # "ndarray", expected "Sequence[float]") [return-value] + # error: Incompatible return value type (got "ndarray", expected + # "Sequence[float]") return np.array([0.25, 0.5, 0.75]) # type: ignore[return-value] # explicit conversion of `percentiles` to list @@ -396,8 +396,8 @@ def refine_percentiles(percentiles: Optional[Sequence[float]]) -> Sequence[float if 0.5 not in percentiles: percentiles.append(0.5) - # pandas/core/describe.py:389: error: Incompatible types in assignment (expression - # has type "ndarray", variable has type "Optional[Sequence[float]]") [assignment] + # error: Incompatible types in assignment (expression has type "ndarray", variable + # has type "Optional[Sequence[float]]") percentiles = np.asarray(percentiles) # type: ignore[assignment] # sort and check for duplicates diff --git a/pandas/core/frame.py b/pandas/core/frame.py index e74fa6a8fe2e1..396948f73d985 100644 --- a/pandas/core/frame.py +++ b/pandas/core/frame.py @@ -549,10 +549,9 @@ def __init__( # a masked array else: data = sanitize_masked_array(data) - # pandas/core/frame.py:558: error: Argument "dtype" to "init_ndarray" - # has incompatible type "Union[ExtensionDtype, str, dtype[Any], - # Type[object], None]"; expected "Union[dtype[Any], ExtensionDtype, - # None]" [arg-type] + # error: Argument "dtype" to "init_ndarray" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; expected + # "Union[dtype[Any], ExtensionDtype, None]" mgr = init_ndarray( data, index, @@ -574,14 +573,11 @@ def __init__( data, index, columns, dtype=dtype # type: ignore[arg-type] ) elif getattr(data, "name", None) is not None: - # pandas\core\frame.py:510: error: Item "ndarray" of - # "Union[ndarray, Series, Index]" has no attribute "name" - # [union-attr] - - # pandas\core\frame.py:510: error: Argument "dtype" to - # "init_dict" has incompatible type "Union[ExtensionDtype, str, - # dtype, Type[object], None]"; expected "Union[dtype, - # ExtensionDtype, None]" [arg-type] + # error: Item "ndarray" of "Union[ndarray, Series, Index]" has no + # attribute "name" + # error: Argument "dtype" to "init_dict" has incompatible type + # "Union[ExtensionDtype, str, dtype, Type[object], None]"; expected + # "Union[dtype, ExtensionDtype, None]" mgr = init_dict( {data.name: data}, # type: ignore[union-attr] index, @@ -609,27 +605,21 @@ def __init__( data = dataclasses_to_dicts(data) if treat_as_nested(data): arrays, columns, index = nested_data_to_arrays( - # pandas/core/frame.py:608: error: Argument 2 to - # "nested_data_to_arrays" has incompatible type - # "Optional[Collection[Any]]"; expected "Optional[Index]" - # [arg-type] - # pandas/core/frame.py:608: error: Argument 3 to - # "nested_data_to_arrays" has incompatible type - # "Optional[Collection[Any]]"; expected "Optional[Index]" - # [arg-type] - # pandas/core/frame.py:608: error: Argument 4 to - # "nested_data_to_arrays" has incompatible type - # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; - # expected "Union[dtype[Any], ExtensionDtype, None]" [arg-type] + # error: Argument 2 to "nested_data_to_arrays" has incompatible + # type "Optional[Collection[Any]]"; expected "Optional[Index]" + # error: Argument 3 to "nested_data_to_arrays" has incompatible + # type "Optional[Collection[Any]]"; expected "Optional[Index]" + # error: Argument 4 to "nested_data_to_arrays" has incompatible + # type "Union[ExtensionDtype, str, dtype[Any], Type[object], + # None]"; expected "Union[dtype[Any], ExtensionDtype, None]" data, columns, # type: ignore[arg-type] index, # type: ignore[arg-type] dtype, # type: ignore[arg-type] ) - # pandas/core/frame.py:610: error: Argument "dtype" to - # "arrays_to_mgr" has incompatible type "Union[ExtensionDtype, str, - # dtype[Any], Type[object], None]"; expected "Union[dtype[Any], - # ExtensionDtype, None]" [arg-type] + # error: Argument "dtype" to "arrays_to_mgr" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; + # expected "Union[dtype[Any], ExtensionDtype, None]" mgr = arrays_to_mgr( arrays, columns, @@ -679,14 +669,12 @@ def __init__( ] mgr = arrays_to_mgr(values, columns, index, columns, dtype=None) else: - # pandas/core/frame.py:653: error: Incompatible types in assignment - # (expression has type "ndarray", variable has type - # "List[ExtensionArray]") [assignment] + # error: Incompatible types in assignment (expression has type + # "ndarray", variable has type "List[ExtensionArray]") values = construct_2d_arraylike_from_scalar( # type: ignore[assignment] - # pandas/core/frame.py:654: error: Argument 4 to - # "construct_2d_arraylike_from_scalar" has incompatible type - # "Union[ExtensionDtype, str, dtype[Any], Type[object]]"; expected - # "dtype[Any]" [arg-type] + # error: Argument 4 to "construct_2d_arraylike_from_scalar" has + # incompatible type "Union[ExtensionDtype, str, dtype[Any], + # Type[object]]"; expected "dtype[Any]" data, len(index), len(columns), @@ -1278,8 +1266,8 @@ def dot(self, other: Series) -> Series: def dot(self, other: Union[DataFrame, Index, ArrayLike]) -> DataFrame: ... - # pandas/core/frame.py:1216: error: Overloaded function implementation cannot - # satisfy signature 2 due to inconsistencies in how they use type variables [misc] + # error: Overloaded function implementation cannot satisfy signature 2 due to + # inconsistencies in how they use type variables def dot( # type: ignore[misc] self, other: Union[AnyArrayLike, FrameOrSeriesUnion] ) -> FrameOrSeriesUnion: @@ -2130,8 +2118,8 @@ def to_records( # array of tuples to numpy cols. copy copy copy ix_vals = list(map(np.array, zip(*self.index._values))) else: - # pandas/core/frame.py:2059: error: List item 0 has incompatible type - # "ArrayLike"; expected "ndarray" [list-item] + # error: List item 0 has incompatible type "ArrayLike"; expected + # "ndarray" ix_vals = [self.index.values] # type: ignore[list-item] arrays = ix_vals + [ @@ -3391,8 +3379,8 @@ def _set_item_frame_value(self, key, value: DataFrame) -> None: # now align rows - # pandas/core/frame.py:3393: error: Incompatible types in assignment (expression - # has type "ExtensionArray", variable has type "DataFrame") [assignment] + # error: Incompatible types in assignment (expression has type "ExtensionArray", + # variable has type "DataFrame") value = _reindex_for_setitem(value, self.index) # type: ignore[assignment] self._set_item_mgr(key, value) @@ -3902,14 +3890,13 @@ def check_int_infer_dtype(dtypes): # see https://github.com/numpy/numpy/issues/9464 if (isinstance(dtype, str) and dtype == "int") or (dtype is int): converted_dtypes.append(np.int32) - # pandas/core/frame.py:3902: error: Argument 1 to "append" of "list" - # has incompatible type "Type[signedinteger[Any]]"; expected - # "Type[signedinteger[Any]]" [arg-type] + # error: Argument 1 to "append" of "list" has incompatible type + # "Type[signedinteger[Any]]"; expected "Type[signedinteger[Any]]" converted_dtypes.append(np.int64) # type: ignore[arg-type] else: - # pandas/core/frame.py:3904: error: Argument 1 to "append" of "list" - # has incompatible type "Union[dtype[Any], ExtensionDtype]"; - # expected "Type[signedinteger[Any]]" [arg-type] + # error: Argument 1 to "append" of "list" has incompatible type + # "Union[dtype[Any], ExtensionDtype]"; expected + # "Type[signedinteger[Any]]" converted_dtypes.append( infer_dtype_from_object(dtype) # type: ignore[arg-type] ) @@ -4272,8 +4259,8 @@ def _reindex_multi(self, axes, copy: bool, fill_value) -> DataFrame: if row_indexer is not None and col_indexer is not None: indexer = row_indexer, col_indexer - # pandas/core/frame.py:4265: error: Argument 2 to "take_2d_multi" has - # incompatible type "Tuple[Any, Any]"; expected "ndarray" [arg-type] + # error: Argument 2 to "take_2d_multi" has incompatible type "Tuple[Any, + # Any]"; expected "ndarray" new_values = algorithms.take_2d_multi( self.values, indexer, fill_value=fill_value # type: ignore[arg-type] ) @@ -4960,14 +4947,13 @@ def set_index( arrays.append(col) # type:ignore[arg-type] names.append(col.name) elif isinstance(col, (list, np.ndarray)): - # pandas/core/frame.py:4950: error: Argument 1 to "append" of "list" has - # incompatible type "Union[List[Any], ndarray]"; expected "Index" - # [arg-type] + # error: Argument 1 to "append" of "list" has incompatible type + # "Union[List[Any], ndarray]"; expected "Index" arrays.append(col) # type: ignore[arg-type] names.append(None) elif isinstance(col, abc.Iterator): - # pandas/core/frame.py:4953: error: Argument 1 to "append" of "list" has - # incompatible type "List[Any]"; expected "Index" [arg-type] + # error: Argument 1 to "append" of "list" has incompatible type + # "List[Any]"; expected "Index" arrays.append(list(col)) # type: ignore[arg-type] names.append(None) # from here, col can only be a column label @@ -9859,8 +9845,8 @@ def _reindex_for_setitem(value: FrameOrSeriesUnion, index: Index) -> ArrayLike: # reindex if necessary if value.index.equals(index) or not len(index): - # pandas/core/frame.py:9718: error: Incompatible return value type (got - # "Union[ndarray, Any]", expected "ExtensionArray") [return-value] + # error: Incompatible return value type (got "Union[ndarray, Any]", expected + # "ExtensionArray") return value._values.copy() # type: ignore[return-value] # GH#4107 diff --git a/pandas/core/generic.py b/pandas/core/generic.py index 12f1b0fb89d6d..cc31c3c0e7339 100644 --- a/pandas/core/generic.py +++ b/pandas/core/generic.py @@ -1928,8 +1928,8 @@ def __array_wrap__( def __array_ufunc__( self, ufunc: Callable, method: str, *inputs: Any, **kwargs: Any ): - # pandas/core/generic.py:1931: error: Argument 2 to "array_ufunc" has - # incompatible type "Callable[..., Any]"; expected "ufunc" [arg-type] + # error: Argument 2 to "array_ufunc" has incompatible type "Callable[..., Any]"; + # expected "ufunc" return arraylike.array_ufunc( self, ufunc, method, *inputs, **kwargs # type: ignore[arg-type] ) @@ -6923,8 +6923,7 @@ def interpolate( f"`limit_direction` must be 'backward' for method `{method}`" ) - # pandas\core\generic.py:7073: error: Value of type variable - # "_DTypeScalar" of "dtype" cannot be "object" [type-var] + # error: Value of type variable "_DTypeScalar" of "dtype" cannot be "object" if obj.ndim == 2 and np.all( obj.dtypes == np.dtype(object) # type: ignore[type-var] ): @@ -8305,8 +8304,7 @@ def last(self: FrameOrSeries, offset) -> FrameOrSeries: start_date = self.index[-1] - offset start = self.index.searchsorted(start_date, side="right") - # pandas/core/generic.py:8502: error: Slice index must be an integer or None - # [misc] + # error: Slice index must be an integer or None return self.iloc[start:] # type: ignore[misc] @final @@ -8898,9 +8896,8 @@ def _where( # we are the same shape, so create an actual object for alignment else: - # pandas\core\generic.py:8963: error: Argument 1 to "NDFrame" - # has incompatible type "ndarray"; expected "BlockManager" - # [arg-type] + # error: Argument 1 to "NDFrame" has incompatible type "ndarray"; + # expected "BlockManager" other = self._constructor( other, **self._construct_axes_dict() # type: ignore[arg-type] ) @@ -9832,10 +9829,10 @@ def abs(self: FrameOrSeries) -> FrameOrSeries: 2 6 30 -30 3 7 40 -50 """ - # pandas/core/generic.py:9851: error: Argument 1 to "__call__" of "ufunc" has - # incompatible type "FrameOrSeries"; expected "Union[Union[int, float, complex, - # str, bytes, generic], Sequence[Union[int, float, complex, str, bytes, - # generic]], Sequence[Sequence[Any]], _SupportsArray]" [arg-type] + # error: Argument 1 to "__call__" of "ufunc" has incompatible type + # "FrameOrSeries"; expected "Union[Union[int, float, complex, str, bytes, + # generic], Sequence[Union[int, float, complex, str, bytes, generic]], + # Sequence[Sequence[Any]], _SupportsArray]" return np.abs(self) # type: ignore[arg-type] @final diff --git a/pandas/core/missing.py b/pandas/core/missing.py index de340a8d2a31c..ef8433f0b8e63 100644 --- a/pandas/core/missing.py +++ b/pandas/core/missing.py @@ -43,10 +43,10 @@ def mask_missing(arr: ArrayLike, values_to_mask) -> np.ndarray: # known to be holdable by arr. # When called from Series._single_replace, values_to_mask is tuple or list dtype, values_to_mask = infer_dtype_from(values_to_mask) - # pandas/core/missing.py:44: error: Argument "dtype" to "array" has incompatible - # type "Union[dtype[Any], ExtensionDtype]"; expected "Union[dtype[Any], None, type, - # _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], - # List[Any], _DTypeDict, Tuple[Any, Any]]]" [arg-type] + # error: Argument "dtype" to "array" has incompatible type "Union[dtype[Any], + # ExtensionDtype]"; expected "Union[dtype[Any], None, type, _SupportsDType, str, + # Union[Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], List[Any], + # _DTypeDict, Tuple[Any, Any]]]" values_to_mask = np.array(values_to_mask, dtype=dtype) # type: ignore[arg-type] na_mask = isna(values_to_mask) @@ -278,10 +278,10 @@ def interpolate_1d( if method in NP_METHODS: # np.interp requires sorted X values, #21037 - # pandas/core/missing.py:274: error: Argument 1 to "argsort" has incompatible - # type "Union[ExtensionArray, Any]"; expected "Union[Union[int, float, complex, - # str, bytes, generic], Sequence[Union[int, float, complex, str, bytes, - # generic]], Sequence[Sequence[Any]], _SupportsArray]" [arg-type] + # error: Argument 1 to "argsort" has incompatible type "Union[ExtensionArray, + # Any]"; expected "Union[Union[int, float, complex, str, bytes, generic], + # Sequence[Union[int, float, complex, str, bytes, generic]], + # Sequence[Sequence[Any]], _SupportsArray]" indexer = np.argsort(inds[valid]) # type: ignore[arg-type] result[invalid] = np.interp( inds[invalid], inds[valid][indexer], yvalues[valid][indexer] @@ -812,8 +812,7 @@ def _rolling_window(a: np.ndarray, window: int): # https://stackoverflow.com/a/6811241 shape = a.shape[:-1] + (a.shape[-1] - window + 1, window) strides = a.strides + (a.strides[-1],) - # pandas\core\missing.py:756: error: Module has no attribute - # "stride_tricks" [attr-defined] + # error: Module has no attribute "stride_tricks" return np.lib.stride_tricks.as_strided( # type: ignore[attr-defined] a, shape=shape, strides=strides ) diff --git a/pandas/core/nanops.py b/pandas/core/nanops.py index b7eac25e5a70e..17c99c23f4a90 100644 --- a/pandas/core/nanops.py +++ b/pandas/core/nanops.py @@ -388,9 +388,8 @@ def new_func( if datetimelike: result = _wrap_results(result, orig_values.dtype, fill_value=iNaT) if not skipna: - # pandas\core\nanops.py:400: error: Argument 3 to - # "_mask_datetimelike_result" has incompatible type - # "Optional[ndarray]"; expected "ndarray" [arg-type] + # error: Argument 3 to "_mask_datetimelike_result" has incompatible type + # "Optional[ndarray]"; expected "ndarray" result = _mask_datetimelike_result( result, axis, mask, orig_values # type: ignore[arg-type] ) @@ -468,8 +467,8 @@ def nanany( False """ values, _, _, _, _ = _get_values(values, skipna, fill_value=False, mask=mask) - # pandas\core\nanops.py:443: error: Incompatible return value type (got - # "Union[bool_, ndarray]", expected "bool") [return-value] + # error: Incompatible return value type (got "Union[bool_, ndarray]", expected + # "bool") return values.any(axis) # type: ignore[return-value] @@ -508,8 +507,8 @@ def nanall( False """ values, _, _, _, _ = _get_values(values, skipna, fill_value=True, mask=mask) - # pandas\core\nanops.py:480: error: Incompatible return value type (got - # "Union[bool_, ndarray]", expected "bool") [return-value] + # error: Incompatible return value type (got "Union[bool_, ndarray]", expected + # "bool") return values.all(axis) # type: ignore[return-value] @@ -558,19 +557,16 @@ def nansum( dtype_sum = np.float64 # type: ignore[assignment] the_sum = values.sum(axis, dtype=dtype_sum) - # pandas\core\nanops.py:525: error: Incompatible types in assignment - # (expression has type "float", variable has type "Union[number, ndarray]") - # [assignment] - - # pandas\core\nanops.py:525: error: Argument 1 to "_maybe_null_out" has - # incompatible type "Union[number, ndarray]"; expected "ndarray" - # [arg-type] + # error: Incompatible types in assignment (expression has type "float", variable has + # type "Union[number, ndarray]") + # error: Argument 1 to "_maybe_null_out" has incompatible type "Union[number, + # ndarray]"; expected "ndarray" the_sum = _maybe_null_out( # type: ignore[assignment] the_sum, axis, mask, values.shape, min_count=min_count # type: ignore[arg-type] ) - # pandas\core\nanops.py:553: error: Incompatible return value type (got - # "Union[number, ndarray]", expected "float") [return-value] + # error: Incompatible return value type (got "Union[number, ndarray]", expected + # "float") return the_sum # type: ignore[return-value] @@ -633,20 +629,17 @@ def nanmean( # not using needs_i8_conversion because that includes period if dtype.kind in ["m", "M"]: - # pandas\core\nanops.py:619: error: Incompatible types in assignment - # (expression has type "Type[float64]", variable has type "dtype[Any]") - # [assignment] + # error: Incompatible types in assignment (expression has type "Type[float64]", + # variable has type "dtype[Any]") dtype_sum = np.float64 # type: ignore[assignment] elif is_integer_dtype(dtype): - # pandas\core\nanops.py:621: error: Incompatible types in assignment - # (expression has type "Type[float64]", variable has type "dtype[Any]") - # [assignment] + # error: Incompatible types in assignment (expression has type "Type[float64]", + # variable has type "dtype[Any]") dtype_sum = np.float64 # type: ignore[assignment] elif is_float_dtype(dtype): dtype_sum = dtype - # pandas\core\nanops.py:624: error: Incompatible types in assignment - # (expression has type "dtype[Any]", variable has type "Type[float64]") - # [assignment] + # error: Incompatible types in assignment (expression has type "dtype[Any]", + # variable has type "Type[float64]") dtype_count = dtype # type: ignore[assignment] count = _get_counts(values.shape, mask, axis, dtype=dtype_count) @@ -799,13 +792,8 @@ def _get_counts_nanvar( """ dtype = get_dtype(dtype) count = _get_counts(value_counts, mask, axis, dtype=dtype) - # pandas\core\nanops.py:702: error: Unsupported operand types for - ("int" - # and "generic") [operator] - - # pandas\core\nanops.py:702: error: Unsupported operand types for - - # ("float" and "generic") [operator] - - # pandas\core\nanops.py:702: note: Both left and right operands are unions + # error: Unsupported operand types for - ("int" and "generic") + # error: Unsupported operand types for - ("float" and "generic") d = count - dtype.type(ddof) # type: ignore[operator] # always return NaN, never inf @@ -1062,8 +1050,7 @@ def nanargmax( array([2, 2, 1, 1], dtype=int64) """ values, mask, _, _, _ = _get_values(values, True, fill_value_typ="-inf", mask=mask) - # pandas\core\nanops.py:971: error: Need type annotation for 'result' - # [var-annotated] + # error: Need type annotation for 'result' result = values.argmax(axis) # type: ignore[var-annotated] result = _maybe_arg_null_out(result, axis, mask, skipna) return result @@ -1109,8 +1096,7 @@ def nanargmin( array([0, 0, 1, 1], dtype=int64) """ values, mask, _, _, _ = _get_values(values, True, fill_value_typ="+inf", mask=mask) - # pandas\core\nanops.py:1015: error: Need type annotation for 'result' - # [var-annotated] + # error: Need type annotation for 'result' result = values.argmin(axis) # type: ignore[var-annotated] result = _maybe_arg_null_out(result, axis, mask, skipna) return result @@ -1332,9 +1318,8 @@ def nanprod( values = values.copy() values[mask] = 1 result = values.prod(axis) - # pandas\core\nanops.py:1321: error: Argument 1 to "_maybe_null_out" has - # incompatible type "Union[number, ndarray]"; expected "ndarray" - # [arg-type] + # error: Argument 1 to "_maybe_null_out" has incompatible type "Union[number, + # ndarray]"; expected "ndarray" return _maybe_null_out( result, axis, mask, values.shape, min_count=min_count # type: ignore[arg-type] ) @@ -1412,15 +1397,12 @@ def _get_counts( # expected "Union[int, float, ndarray]") return dtype.type(count) # type: ignore[return-value] try: - # pandas\core\nanops.py:1396: error: Incompatible return value type - # (got "Union[ndarray, generic]", expected "Union[int, float, - # ndarray]") [return-value] - - # pandas\core\nanops.py:1396: error: Argument 1 to "astype" of - # "_ArrayOrScalarCommon" has incompatible type "Union[ExtensionDtype, - # dtype]"; expected "Union[dtype, None, type, _SupportsDtype, str, - # Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], List[Any], - # _DtypeDict, Tuple[Any, Any]]" [arg-type] + # error: Incompatible return value type (got "Union[ndarray, generic]", expected + # "Union[int, float, ndarray]") + # error: Argument 1 to "astype" of "_ArrayOrScalarCommon" has incompatible type + # "Union[ExtensionDtype, dtype]"; expected "Union[dtype, None, type, + # _SupportsDtype, str, Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], + # List[Any], _DtypeDict, Tuple[Any, Any]]" return count.astype(dtype) # type: ignore[return-value,arg-type] except AttributeError: # error: Argument "dtype" to "array" has incompatible type diff --git a/pandas/core/series.py b/pandas/core/series.py index 587448e866d11..35fcf3c81e1c6 100644 --- a/pandas/core/series.py +++ b/pandas/core/series.py @@ -316,8 +316,8 @@ def __init__( copy = False elif isinstance(data, np.ndarray): - # pandas\core\series.py:267: error: Argument 1 to "len" has - # incompatible type "dtype"; expected "Sized" [arg-type] + # error: Argument 1 to "len" has incompatible type "dtype"; expected + # "Sized" if len(data.dtype): # type: ignore[arg-type] # GH#13296 we are dealing with a compound dtype, which # should be treated as 2D @@ -369,10 +369,9 @@ def __init__( elif copy: data = data.copy() else: - # pandas/core/series.py:342: error: Argument 3 to "sanitize_array" has - # incompatible type "Union[ExtensionDtype, str, dtype[Any], - # Type[object], None]"; expected "Union[dtype[Any], ExtensionDtype, - # None]" [arg-type] + # error: Argument 3 to "sanitize_array" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; expected + # "Union[dtype[Any], ExtensionDtype, None]" data = sanitize_array( data, index, @@ -427,9 +426,8 @@ def _init_dict(self, data, index=None, dtype: Optional[Dtype] = None): # TODO: passing np.float64 to not break anything yet. See GH-17261 - # pandas\core\series.py:375: error: Value of type variable "ArrayLike" - # of "create_series_with_explicit_dtype" cannot be "Tuple[Any, ...]" - # [type-var] + # error: Value of type variable "ArrayLike" of + # "create_series_with_explicit_dtype" cannot be "Tuple[Any, ...]" s = create_series_with_explicit_dtype( # type: ignore[type-var] values, index=keys, dtype=dtype, dtype_if_empty=np.float64 ) @@ -1029,9 +1027,8 @@ def __setitem__(self, key, value): def _set_with_engine(self, key, value): # fails with AttributeError for IntervalIndex loc = self.index._engine.get_loc(key) - # pandas\core\series.py:1050: error: Argument 1 to - # "validate_numeric_casting" has incompatible type "Union[dtype, - # ExtensionDtype]"; expected "dtype" [arg-type] + # error: Argument 1 to "validate_numeric_casting" has incompatible type + # "Union[dtype, ExtensionDtype]"; expected "dtype" validate_numeric_casting(self.dtype, value) # type: ignore[arg-type] self._values[loc] = value @@ -2965,9 +2962,8 @@ def combine(self, other, func, fill_value=None) -> Series: # The function can return something of any type, so check # if the type is compatible with the calling EA. - # pandas\core\series.py:2978: error: Value of type variable - # "ArrayLike" of "maybe_cast_to_extension_array" cannot be - # "List[Any]" [type-var] + # error: Value of type variable "ArrayLike" of + # "maybe_cast_to_extension_array" cannot be "List[Any]" new_values = maybe_cast_to_extension_array( type(self._values), new_values # type: ignore[type-var] ) From 765687e2908e43e40ba5f0a17fb04311e51a1003 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Tue, 16 Feb 2021 15:10:46 +0000 Subject: [PATCH 59/86] tidy comments (wip) --- pandas/core/arrays/masked.py | 28 ++-- pandas/core/arrays/numpy_.py | 23 ++-- pandas/core/arrays/period.py | 6 +- pandas/core/arrays/sparse/array.py | 116 +++++++--------- pandas/core/arrays/sparse/dtype.py | 3 +- pandas/core/arrays/string_.py | 15 +-- pandas/core/arrays/string_arrow.py | 32 ++--- pandas/core/dtypes/cast.py | 196 +++++++++++----------------- pandas/core/dtypes/common.py | 14 +- pandas/core/dtypes/concat.py | 9 +- pandas/core/dtypes/missing.py | 12 +- pandas/core/groupby/generic.py | 25 ++-- pandas/core/groupby/groupby.py | 5 +- pandas/core/groupby/ops.py | 23 ++-- pandas/core/indexes/base.py | 118 ++++++++--------- pandas/core/indexes/category.py | 10 +- pandas/core/indexes/datetimelike.py | 10 +- pandas/core/indexes/datetimes.py | 5 +- pandas/core/indexes/extension.py | 4 +- pandas/core/indexes/interval.py | 10 +- pandas/core/indexes/multi.py | 28 ++-- pandas/core/indexes/numeric.py | 4 +- 22 files changed, 284 insertions(+), 412 deletions(-) diff --git a/pandas/core/arrays/masked.py b/pandas/core/arrays/masked.py index 5ee729182499b..60bc6f09b047d 100644 --- a/pandas/core/arrays/masked.py +++ b/pandas/core/arrays/masked.py @@ -219,9 +219,8 @@ def to_numpy( # type: ignore[override] if na_value is lib.no_default: na_value = libmissing.NA if dtype is None: - # pandas/core/arrays/masked.py:218: error: Incompatible types in assignment - # (expression has type "Type[object]", variable has type "Union[str, - # dtype[Any], None]") [assignment] + # error: Incompatible types in assignment (expression has type + # "Type[object]", variable has type "Union[str, dtype[Any], None]") dtype = object # type: ignore[assignment] if self._hasna: if ( @@ -246,11 +245,11 @@ def astype(self, dtype: Dtype, copy: bool = True) -> ArrayLike: if is_dtype_equal(dtype, self.dtype): if copy: - # pandas/core/arrays/masked.py:242: error: Incompatible return value - # type (got "BaseMaskedArray", expected "ndarray") [return-value] + # error: Incompatible return value type (got "BaseMaskedArray", expected + # "ndarray") return self.copy() # type: ignore[return-value] - # pandas/core/arrays/masked.py:243: error: Incompatible return value type - # (got "BaseMaskedArray", expected "ndarray") [return-value] + # error: Incompatible return value type (got "BaseMaskedArray", expected + # "ndarray") return self # type: ignore[return-value] # if we are astyping to another nullable masked dtype, we can fastpath @@ -261,8 +260,8 @@ def astype(self, dtype: Dtype, copy: bool = True) -> ArrayLike: # not directly depending on the `copy` keyword mask = self._mask if data is self._data else self._mask.copy() cls = dtype.construct_array_type() - # pandas/core/arrays/masked.py:253: error: Incompatible return value type - # (got "BaseMaskedArray", expected "ndarray") [return-value] + # error: Incompatible return value type (got "BaseMaskedArray", expected + # "ndarray") return cls(data, mask, copy=False) # type: ignore[return-value] if isinstance(dtype, ExtensionDtype): @@ -294,8 +293,7 @@ def _hasna(self) -> bool: # make this faster by having an optional mask, but not have to change # source code using it.. - # pandas\core\arrays\masked.py:249: error: Incompatible return value - # type (got "bool_", expected "bool") [return-value] + # error: Incompatible return value type (got "bool_", expected "bool") return self._mask.any() # type: ignore[return-value] # error: Return type "ndarray" of "isna" incompatible with return type @@ -346,8 +344,8 @@ def take( return type(self)(result, mask, copy=False) - # pandas/core/arrays/masked.py:356: error: Return type "BooleanArray" of "isin" - # incompatible with return type "ndarray" in supertype "ExtensionArray" [override] + # error: Return type "BooleanArray" of "isin" incompatible with return type + # "ndarray" in supertype "ExtensionArray" def isin(self, values) -> BooleanArray: # type: ignore[override] from pandas.core.arrays import BooleanArray @@ -358,8 +356,8 @@ def isin(self, values) -> BooleanArray: # type: ignore[override] result += self._mask else: result *= np.invert(self._mask) - # pandas/core/arrays/masked.py:366: error: No overload variant of "zeros_like" - # matches argument types "BaseMaskedArray", "Type[bool]" [call-overload] + # error: No overload variant of "zeros_like" matches argument types + # "BaseMaskedArray", "Type[bool]" mask = np.zeros_like(self, dtype=bool) # type: ignore[call-overload] return BooleanArray(result, mask, copy=False) diff --git a/pandas/core/arrays/numpy_.py b/pandas/core/arrays/numpy_.py index fcb6844b9f0f5..c2b92d66f7815 100644 --- a/pandas/core/arrays/numpy_.py +++ b/pandas/core/arrays/numpy_.py @@ -85,11 +85,11 @@ def _from_sequence( if isinstance(dtype, PandasDtype): dtype = dtype._dtype - # pandas/core/arrays/numpy_.py:181: error: Argument "dtype" to "asarray" has - # incompatible type "Union[ExtensionDtype, str, dtype[Any], - # dtype[floating[_64Bit]], Type[object], None]"; expected "Union[dtype[Any], - # None, type, _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, Union[int, - # Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]]" [arg-type] + # error: Argument "dtype" to "asarray" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], dtype[floating[_64Bit]], Type[object], + # None]"; expected "Union[dtype[Any], None, type, _SupportsDType, str, + # Union[Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], List[Any], + # _DTypeDict, Tuple[Any, Any]]]" result = np.asarray(scalars, dtype=dtype) # type: ignore[arg-type] if copy and result is scalars: result = result.copy() @@ -328,16 +328,9 @@ def skew( # ------------------------------------------------------------------------ # Additional Methods - # pandas/core/arrays/numpy_.py:419: error: Argument 1 of "to_numpy" is incompatible - # with supertype "ExtensionArray"; supertype defines the argument type as - # "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], Type[int], - # Type[complex], Type[bool], Type[object], None]" [override] - - # pandas/core/arrays/numpy_.py:419: note: This violates the Liskov substitution - # principle - - # pandas/core/arrays/numpy_.py:419: note: See - # https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides + # error: Argument 1 of "to_numpy" is incompatible with supertype "ExtensionArray"; + # supertype defines the argument type as "Union[ExtensionDtype, str, dtype[Any], + # Type[str], Type[float], Type[int], Type[complex], Type[bool], Type[object], None]" def to_numpy( # type: ignore[override] self, dtype: Optional[NpDtype] = None, diff --git a/pandas/core/arrays/period.py b/pandas/core/arrays/period.py index 774ff9401e131..ba1be8177d40e 100644 --- a/pandas/core/arrays/period.py +++ b/pandas/core/arrays/period.py @@ -1094,11 +1094,9 @@ def _make_field_arrays(*fields): elif length is None: length = len(x) - # pandas\core\arrays\period.py:1101: error: Argument 2 to "repeat" has - # incompatible type "Optional[int]"; expected "Union[Union[int, - # integer[Any]], Union[bool, bool_], ndarray, Sequence[Union[int, + # error: Argument 2 to "repeat" has incompatible type "Optional[int]"; expected + # "Union[Union[int, integer[Any]], Union[bool, bool_], ndarray, Sequence[Union[int, # integer[Any]]], Sequence[Union[bool, bool_]], Sequence[Sequence[Any]]]" - # [arg-type] return [ np.asarray(x) if isinstance(x, (np.ndarray, list, ABCSeries)) diff --git a/pandas/core/arrays/sparse/array.py b/pandas/core/arrays/sparse/array.py index 2ec8ddb3fea5c..16bf3a81abdbc 100644 --- a/pandas/core/arrays/sparse/array.py +++ b/pandas/core/arrays/sparse/array.py @@ -340,11 +340,10 @@ def __init__( if data is None: # TODO: What should the empty dtype be? Object or float? - # pandas/core/arrays/sparse/array.py:340: error: Argument "dtype" to "array" - # has incompatible type "Union[ExtensionDtype, dtype[Any], None]"; expected - # "Union[dtype[Any], None, type, _SupportsDType, str, Union[Tuple[Any, int], - # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, - # Any]]]" [arg-type] + # error: Argument "dtype" to "array" has incompatible type + # "Union[ExtensionDtype, dtype[Any], None]"; expected "Union[dtype[Any], + # None, type, _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, + # Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]]" data = np.array([], dtype=dtype) # type: ignore[arg-type] if not is_array_like(data): @@ -374,11 +373,11 @@ def __init__( if isinstance(data, type(self)) and sparse_index is None: sparse_index = data._sparse_index - # pandas/core/arrays/sparse/array.py:369: error: Argument "dtype" to - # "asarray" has incompatible type "Union[ExtensionDtype, dtype[Any], - # Type[object], None]"; expected "Union[dtype[Any], None, type, - # _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, Union[int, - # Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]]" [arg-type] + # error: Argument "dtype" to "asarray" has incompatible type + # "Union[ExtensionDtype, dtype[Any], Type[object], None]"; expected + # "Union[dtype[Any], None, type, _SupportsDType, str, Union[Tuple[Any, int], + # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, + # Any]]]" sparse_values = np.asarray( data.sp_values, dtype=dtype # type: ignore[arg-type] ) @@ -399,21 +398,20 @@ def __init__( fill_value = np.datetime64("NaT", "ns") data = np.asarray(data) sparse_values, sparse_index, fill_value = make_sparse( - # pandas/core/arrays/sparse/array.py:385: error: Argument "dtype" to - # "make_sparse" has incompatible type "Union[ExtensionDtype, dtype[Any], - # Type[object], None]"; expected "Union[str, dtype[Any], None]" - # [arg-type] + # error: Argument "dtype" to "make_sparse" has incompatible type + # "Union[ExtensionDtype, dtype[Any], Type[object], None]"; expected + # "Union[str, dtype[Any], None]" data, kind=kind, fill_value=fill_value, dtype=dtype, # type: ignore[arg-type] ) else: - # pandas/core/arrays/sparse/array.py:388: error: Argument "dtype" to - # "asarray" has incompatible type "Union[ExtensionDtype, dtype[Any], - # Type[object], None]"; expected "Union[dtype[Any], None, type, - # _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, Union[int, - # Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]]" [arg-type] + # error: Argument "dtype" to "asarray" has incompatible type + # "Union[ExtensionDtype, dtype[Any], Type[object], None]"; expected + # "Union[dtype[Any], None, type, _SupportsDType, str, Union[Tuple[Any, int], + # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, + # Any]]]" sparse_values = np.asarray(data, dtype=dtype) # type: ignore[arg-type] if len(sparse_values) != sparse_index.npoints: raise AssertionError( @@ -502,9 +500,8 @@ def __array__(self, dtype: Optional[NpDtype] = None) -> np.ndarray: try: dtype = np.result_type(self.sp_values.dtype, type(fill_value)) except TypeError: - # pandas/core/arrays/sparse/array.py:476: error: Incompatible types in - # assignment (expression has type "Type[object]", variable has type - # "Union[str, dtype[Any], None]") [assignment] + # error: Incompatible types in assignment (expression has type + # "Type[object]", variable has type "Union[str, dtype[Any], None]") dtype = object # type: ignore[assignment] out = np.full(self.shape, fill_value, dtype=dtype) @@ -750,9 +747,8 @@ def factorize(self, na_sentinel=-1): # Given that we have to return a dense array of codes, why bother # implementing an efficient factorize? codes, uniques = algos.factorize(np.asarray(self), na_sentinel=na_sentinel) - # pandas/core/arrays/sparse/array.py:722: error: Incompatible types in - # assignment (expression has type "SparseArray", variable has type - # "Union[ndarray, Index]") [assignment] + # error: Incompatible types in assignment (expression has type "SparseArray", + # variable has type "Union[ndarray, Index]") uniques = SparseArray(uniques, dtype=self.dtype) # type: ignore[assignment] return codes, uniques @@ -1098,48 +1094,35 @@ def astype(self, dtype: Optional[Dtype] = None, copy=True): else: return self.copy() dtype = self.dtype.update_dtype(dtype) - # pandas/core/arrays/sparse/array.py:1069: error: Item "ExtensionDtype" of - # "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], Type[int], - # Type[complex], Type[bool], Type[object], None]" has no attribute - # "_subtype_with_str" [union-attr] - - # pandas/core/arrays/sparse/array.py:1069: error: Item "str" of - # "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], Type[int], - # Type[complex], Type[bool], Type[object], None]" has no attribute - # "_subtype_with_str" [union-attr] - - # pandas/core/arrays/sparse/array.py:1069: error: Item "dtype[Any]" of - # "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], Type[int], - # Type[complex], Type[bool], Type[object], None]" has no attribute - # "_subtype_with_str" [union-attr] - - # pandas/core/arrays/sparse/array.py:1069: error: Item "ABCMeta" of - # "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], Type[int], - # Type[complex], Type[bool], Type[object], None]" has no attribute - # "_subtype_with_str" [union-attr] - - # pandas/core/arrays/sparse/array.py:1069: error: Item "type" of - # "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], Type[int], - # Type[complex], Type[bool], Type[object], None]" has no attribute - # "_subtype_with_str" [union-attr] - - # pandas/core/arrays/sparse/array.py:1069: error: Item "None" of - # "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], Type[int], - # Type[complex], Type[bool], Type[object], None]" has no attribute - # "_subtype_with_str" [union-attr] + # error: Item "ExtensionDtype" of "Union[ExtensionDtype, str, dtype[Any], + # Type[str], Type[float], Type[int], Type[complex], Type[bool], Type[object], + # None]" has no attribute "_subtype_with_str" + # error: Item "str" of "Union[ExtensionDtype, str, dtype[Any], Type[str], + # Type[float], Type[int], Type[complex], Type[bool], Type[object], None]" has no + # attribute "_subtype_with_str" + # error: Item "dtype[Any]" of "Union[ExtensionDtype, str, dtype[Any], Type[str], + # Type[float], Type[int], Type[complex], Type[bool], Type[object], None]" has no + # attribute "_subtype_with_str" + # error: Item "ABCMeta" of "Union[ExtensionDtype, str, dtype[Any], Type[str], + # Type[float], Type[int], Type[complex], Type[bool], Type[object], None]" has no + # attribute "_subtype_with_str" + # error: Item "type" of "Union[ExtensionDtype, str, dtype[Any], Type[str], + # Type[float], Type[int], Type[complex], Type[bool], Type[object], None]" has no + # attribute "_subtype_with_str" + # error: Item "None" of "Union[ExtensionDtype, str, dtype[Any], Type[str], + # Type[float], Type[int], Type[complex], Type[bool], Type[object], None]" has no + # attribute "_subtype_with_str" subtype = pandas_dtype(dtype._subtype_with_str) # type: ignore[union-attr] # TODO copy=False is broken for astype_nansafe with int -> float, so cannot # passthrough copy keyword: https://github.com/pandas-dev/pandas/issues/34456 sp_values = astype_nansafe(self.sp_values, subtype, copy=True) - # pandas\core\arrays\sparse\array.py:1074: error: Non-overlapping - # identity check (left operand type: "ExtensionArray", right operand - # t...ype: "ndarray") [comparison-overlap] + # error: Non-overlapping identity check (left operand type: "ExtensionArray", + # right operand t...ype: "ndarray") if sp_values is self.sp_values and copy: # type: ignore[comparison-overlap] sp_values = sp_values.copy() - # pandas\core\arrays\sparse\array.py:1077: error: Argument 1 to - # "_simple_new" of "SparseArray" has incompatible type - # "ExtensionArray"; expected "ndarray" [arg-type] + # error: Argument 1 to "_simple_new" of "SparseArray" has incompatible type + # "ExtensionArray"; expected "ndarray" return self._simple_new( sp_values, self.sp_index, dtype # type: ignore[arg-type] ) @@ -1437,10 +1420,10 @@ def __array_ufunc__(self, ufunc, method, *inputs, **kwargs): return type(self)(result) def __abs__(self): - # pandas/core/arrays/sparse/array.py:1379: error: Argument 1 to "__call__" of - # "ufunc" has incompatible type "SparseArray"; expected "Union[Union[int, float, - # complex, str, bytes, generic], Sequence[Union[int, float, complex, str, bytes, - # generic]], Sequence[Sequence[Any]], _SupportsArray]" [arg-type] + # error: Argument 1 to "__call__" of "ufunc" has incompatible type + # "SparseArray"; expected "Union[Union[int, float, complex, str, bytes, + # generic], Sequence[Union[int, float, complex, str, bytes, generic]], + # Sequence[Sequence[Any]], _SupportsArray]" return np.abs(self) # type: ignore[arg-type] # ------------------------------------------------------------------------ @@ -1590,9 +1573,8 @@ def make_sparse( index = make_sparse_index(length, indices, kind) sparsified_values = arr[mask] if dtype is not None: - # pandas/core/arrays/sparse/array.py:1528: error: Argument "dtype" to - # "astype_nansafe" has incompatible type "Union[str, dtype[Any]]"; expected - # "Union[dtype[Any], ExtensionDtype]" [arg-type] + # error: Argument "dtype" to "astype_nansafe" has incompatible type "Union[str, + # dtype[Any]]"; expected "Union[dtype[Any], ExtensionDtype]" sparsified_values = astype_nansafe( sparsified_values, dtype=dtype # type: ignore[arg-type] ) diff --git a/pandas/core/arrays/sparse/dtype.py b/pandas/core/arrays/sparse/dtype.py index 4feec66448a3b..3e255caca9cb6 100644 --- a/pandas/core/arrays/sparse/dtype.py +++ b/pandas/core/arrays/sparse/dtype.py @@ -326,8 +326,7 @@ def update_dtype(self, dtype): if is_extension_array_dtype(dtype): raise TypeError("sparse arrays of extension dtypes not supported") - # pandas\core\arrays\sparse\dtype.py:328: error: "ExtensionArray" - # has no attribute "item" [attr-defined] + # error: "ExtensionArray" has no attribute "item" fill_value = astype_nansafe( np.array(self.fill_value), dtype ).item() # type: ignore[attr-defined] diff --git a/pandas/core/arrays/string_.py b/pandas/core/arrays/string_.py index 9153f241528f3..63a91fe33ca91 100644 --- a/pandas/core/arrays/string_.py +++ b/pandas/core/arrays/string_.py @@ -302,9 +302,8 @@ def astype(self, dtype, copy=True): values = arr.astype(dtype.numpy_dtype) return IntegerArray(values, mask, copy=False) elif isinstance(dtype, FloatingDtype): - # pandas/core/arrays/string_.py:299: error: Incompatible types in assignment - # (expression has type "StringArray", variable has type "ndarray") - # [assignment] + # error: Incompatible types in assignment (expression has type + # "StringArray", variable has type "ndarray") arr = self.copy() # type: ignore[assignment] mask = self.isna() arr[mask] = "0" @@ -415,11 +414,11 @@ def _str_map(self, f, na_value=None, dtype: Optional[Dtype] = None): mask.view("uint8"), convert=False, na_value=na_value, - # pandas/core/arrays/string_.py:417: error: Value of type variable - # "_DTypeScalar" of "dtype" cannot be "object" [type-var] - # pandas/core/arrays/string_.py:417: error: Argument 1 to "dtype" has - # incompatible type "Union[ExtensionDtype, str, dtype[Any], - # Type[object]]"; expected "Type[object]" [arg-type] + # error: Value of type variable "_DTypeScalar" of "dtype" cannot be + # "object" + # error: Argument 1 to "dtype" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object]]"; expected + # "Type[object]" dtype=np.dtype(dtype), # type: ignore[type-var,arg-type] ) diff --git a/pandas/core/arrays/string_arrow.py b/pandas/core/arrays/string_arrow.py index 4fdb2f5bc4857..eea8dec8b8d5c 100644 --- a/pandas/core/arrays/string_arrow.py +++ b/pandas/core/arrays/string_arrow.py @@ -230,11 +230,9 @@ def __arrow_array__(self, type=None): """Convert myself to a pyarrow Array or ChunkedArray.""" return self._data - # pandas/core/arrays/string_arrow.py:233: error: Argument 1 of "to_numpy" is - # incompatible with supertype "ExtensionArray"; supertype defines the - # argument type as "Union[ExtensionDtype, str, dtype[Any], Type[str], - # Type[float], Type[int], Type[complex], Type[bool], Type[object], None]" - # [override] + # error: Argument 1 of "to_numpy" is incompatible with supertype "ExtensionArray"; + # supertype defines the argument type as "Union[ExtensionDtype, str, dtype[Any], + # Type[str], Type[float], Type[int], Type[complex], Type[bool], Type[object], None]" def to_numpy( # type: ignore[override] self, dtype: Optional[NpDtype] = None, @@ -315,9 +313,8 @@ def __getitem__(self, item: Any) -> Any: if not len(item): return type(self)(pa.chunked_array([], type=pa.string())) elif is_integer_dtype(item.dtype): - # pandas/core/arrays/string_arrow.py:308: error: Argument 1 to "take" of - # "ArrowStringArray" has incompatible type "ndarray"; expected - # "Sequence[int]" [arg-type] + # error: Argument 1 to "take" of "ArrowStringArray" has incompatible + # type "ndarray"; expected "Sequence[int]" return self.take(item) # type: ignore[arg-type] elif is_bool_dtype(item.dtype): return type(self)(self._data.filter(item)) @@ -384,10 +381,8 @@ def fillna(self, value=None, method=None, limit=None): if mask.any(): if method is not None: func = get_fill_func(method) - # pandas/core/arrays/string_arrow.py:382: error: Argument 1 to - # "to_numpy" of "ArrowStringArray" has incompatible type - # "Type[object]"; expected "Union[str, dtype[Any], None]" - # [arg-type] + # error: Argument 1 to "to_numpy" of "ArrowStringArray" has incompatible + # type "Type[object]"; expected "Union[str, dtype[Any], None]" new_values = func( self.to_numpy(object), # type: ignore[arg-type] limit=limit, @@ -415,9 +410,8 @@ def nbytes(self) -> int: """ return self._data.nbytes - # pandas/core/arrays/string_arrow.py:397: error: Return type "ndarray" of "isna" - # incompatible with return type "ArrayLike" in supertype "ExtensionArray" - # [override] + # error: Return type "ndarray" of "isna" incompatible with return type "ArrayLike" + # in supertype "ExtensionArray" def isna(self) -> np.ndarray: # type: ignore[override] """ Boolean NumPy array indicating if each value is missing. @@ -493,8 +487,7 @@ def __setitem__(self, key: Union[int, np.ndarray], value: Any) -> None: # Slice data and insert in-between new_data = [ - # pandas/core/arrays/string_arrow.py:472: error: Slice index must be an - # integer or None [misc] + # error: Slice index must be an integer or None *self._data[0:key].chunks, # type: ignore[misc] pa.array([value], type=pa.string()), *self._data[(key + 1) :].chunks, @@ -586,9 +579,8 @@ def take( if not is_array_like(indices): indices_array = np.asanyarray(indices) else: - # pandas/core/arrays/string_arrow.py:563: error: Incompatible types in - # assignment (expression has type "Sequence[int]", variable has type - # "ndarray") [assignment] + # error: Incompatible types in assignment (expression has type + # "Sequence[int]", variable has type "ndarray") indices_array = indices # type: ignore[assignment] if len(self._data) == 0 and (indices_array >= 0).any(): diff --git a/pandas/core/dtypes/cast.py b/pandas/core/dtypes/cast.py index 70ef29c76f3d3..d413ca7d23d42 100644 --- a/pandas/core/dtypes/cast.py +++ b/pandas/core/dtypes/cast.py @@ -236,8 +236,7 @@ def maybe_downcast_to_dtype(result, dtype: Union[str, np.dtype]): # e.g. TypeError: int() argument must be a string, a # bytes-like object or a number, not 'Period - # pandas\core\dtypes\cast.py:179: error: "dtype[Any]" has no - # attribute "freq" [attr-defined] + # error: "dtype[Any]" has no attribute "freq" return PeriodArray(result, freq=dtype.freq) # type: ignore[attr-defined] converted = maybe_downcast_numeric(result, dtype, do_round) @@ -363,20 +362,18 @@ def maybe_cast_result( # We have to special case categorical so as not to upcast # things like counts back to categorical - # pandas/core/dtypes/cast.py:337: error: Item "dtype[Any]" of "Union[dtype[Any], - # ExtensionDtype]" has no attribute "construct_array_type" [union-attr] + # error: Item "dtype[Any]" of "Union[dtype[Any], ExtensionDtype]" has no + # attribute "construct_array_type" cls = dtype.construct_array_type() # type: ignore[union-attr] - # pandas/core/dtypes/cast.py:338: error: Argument "dtype" to - # "maybe_cast_to_extension_array" has incompatible type "Union[dtype[Any], - # ExtensionDtype]"; expected "Optional[ExtensionDtype]" [arg-type] + # error: Argument "dtype" to "maybe_cast_to_extension_array" has incompatible + # type "Union[dtype[Any], ExtensionDtype]"; expected "Optional[ExtensionDtype]" result = maybe_cast_to_extension_array( cls, result, dtype=dtype # type: ignore[arg-type] ) elif numeric_only and is_numeric_dtype(dtype) or not numeric_only: - # pandas/core/dtypes/cast.py:341: error: Argument 2 to "maybe_downcast_to_dtype" - # has incompatible type "Union[dtype[Any], ExtensionDtype]"; expected - # "Union[str, dtype[Any]]" [arg-type] + # error: Argument 2 to "maybe_downcast_to_dtype" has incompatible type + # "Union[dtype[Any], ExtensionDtype]"; expected "Union[str, dtype[Any]]" result = maybe_downcast_to_dtype(result, dtype) # type: ignore[arg-type] return result @@ -496,11 +493,10 @@ def maybe_upcast_putmask(result: np.ndarray, mask: np.ndarray) -> np.ndarray: new_dtype = ensure_dtype_can_hold_na(result.dtype) if new_dtype != result.dtype: - # pandas/core/dtypes/cast.py:499: error: Argument 1 to "astype" of - # "_ArrayOrScalarCommon" has incompatible type "Union[dtype[Any], - # ExtensionDtype]"; expected "Union[dtype[Any], None, type, _SupportsDType, - # str, Union[Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], - # List[Any], _DTypeDict, Tuple[Any, Any]]]" [arg-type] + # error: Argument 1 to "astype" of "_ArrayOrScalarCommon" has incompatible + # type "Union[dtype[Any], ExtensionDtype]"; expected "Union[dtype[Any], + # None, type, _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, + # Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]]" result = result.astype(new_dtype, copy=True) # type: ignore[arg-type] np.place(result, mask, np.nan) @@ -555,9 +551,8 @@ def maybe_promote(dtype: np.dtype, fill_value=np.nan): kinds = ["i", "u", "f", "c", "m", "M"] if is_valid_na_for_dtype(fill_value, dtype) and dtype.kind in kinds: - # pandas/core/dtypes/cast.py:553: error: Incompatible types in assignment - # (expression has type "Union[dtype[Any], ExtensionDtype]", variable has type - # "dtype[Any]") [assignment] + # error: Incompatible types in assignment (expression has type + # "Union[dtype[Any], ExtensionDtype]", variable has type "dtype[Any]") dtype = ensure_dtype_can_hold_na(dtype) # type: ignore[assignment] fv = na_value_for_dtype(dtype) return dtype, fv @@ -609,8 +604,7 @@ def maybe_promote(dtype: np.dtype, fill_value=np.nan): if fv.tz is None: return dtype, fv.asm8 - # pandas/core/dtypes/cast.py:612: error: Value of type variable "_DTypeScalar" - # of "dtype" cannot be "object" [type-var] + # error: Value of type variable "_DTypeScalar" of "dtype" cannot be "object" return np.dtype(object), fill_value # type: ignore[type-var] elif issubclass(dtype.type, np.timedelta64): @@ -878,9 +872,8 @@ def infer_dtype_from_array( (dtype('O'), [1, '1']) """ if isinstance(arr, np.ndarray): - # pandas\core\dtypes\cast.py:846: error: Incompatible return value type - # (got "Tuple[dtype, ndarray]", expected "Tuple[Union[dtype, - # ExtensionDtype], ExtensionArray]") [return-value] + # error: Incompatible return value type (got "Tuple[dtype, ndarray]", expected + # "Tuple[Union[dtype, ExtensionDtype], ExtensionArray]") return arr.dtype, arr # type: ignore[return-value] if not is_list_like(arr): @@ -890,9 +883,8 @@ def infer_dtype_from_array( return arr.dtype, arr elif isinstance(arr, ABCSeries): - # pandas\core\dtypes\cast.py:855: error: Incompatible return value type - # (got "Tuple[Any, ndarray]", expected "Tuple[Union[dtype, - # ExtensionDtype], ExtensionArray]") [return-value] + # error: Incompatible return value type (got "Tuple[Any, ndarray]", expected + # "Tuple[Union[dtype, ExtensionDtype], ExtensionArray]") return arr.dtype, np.asarray(arr) # type: ignore[return-value] # don't force numpy coerce with nan's @@ -973,11 +965,10 @@ def invalidate_string_dtypes(dtype_set: Set[DtypeObj]): Change string like dtypes to object for ``DataFrame.select_dtypes()``. """ - # pandas/core/dtypes/cast.py:949: error: Argument 1 to has incompatible type - # "Type[generic]"; expected "Union[dtype[Any], ExtensionDtype, None]" [arg-type] - - # pandas/core/dtypes/cast.py:949: error: Argument 2 to has incompatible type - # "Type[generic]"; expected "Union[dtype[Any], ExtensionDtype, None]" [arg-type] + # error: Argument 1 to has incompatible type "Type[generic]"; expected + # "Union[dtype[Any], ExtensionDtype, None]" + # error: Argument 2 to has incompatible type "Type[generic]"; expected + # "Union[dtype[Any], ExtensionDtype, None]" non_string_dtypes = dtype_set - { np.dtype("S").type, # type: ignore[arg-type] np.dtype(" Union[DatetimeScalar, Any] - # pandas/core/dtypes/cast.py:1158: note: def to_datetime(arg: - # Series, errors: str = ..., dayfirst: bool = ..., yearfirst: bool = - # ..., utc: Optional[bool] = ..., format: Optional[str] = ..., exact: - # bool = ..., unit: Optional[str] = ..., infer_datetime_format: bool = - # ..., origin: Any = ..., cache: bool = ...) -> Series - # pandas/core/dtypes/cast.py:1158: note: def to_datetime(arg: - # Union[List[Any], Tuple[Any, ...]], errors: str = ..., dayfirst: bool = - # ..., yearfirst: bool = ..., utc: Optional[bool] = ..., format: - # Optional[str] = ..., exact: bool = ..., unit: Optional[str] = ..., - # infer_datetime_format: bool = ..., origin: Any = ..., cache: bool = - # ...) -> DatetimeIndex + # error: No overload variant of "to_datetime" matches argument type + # "ndarray" to_datetime(arr).values, # type: ignore[call-overload] dtype, copy=copy, @@ -1284,8 +1249,8 @@ def astype_nansafe( elif is_timedelta64_dtype(dtype): from pandas import to_timedelta - # pandas\core\dtypes\cast.py:1086: error: Incompatible return value - # type (got "ExtensionArray", expected "ndarray") [return-value] + # error: Incompatible return value type (got "ExtensionArray", expected + # "ndarray") return astype_nansafe( # type: ignore[return-value] to_timedelta(arr)._values, dtype, copy=copy ) @@ -1300,12 +1265,11 @@ def astype_nansafe( if copy or is_object_dtype(arr.dtype) or is_object_dtype(dtype): # Explicit copy, or required since NumPy can't view from / to object. - # pandas/core/dtypes/cast.py:1178: error: Incompatible return value type (got - # "ndarray", expected "ExtensionArray") [return-value] + # error: Incompatible return value type (got "ndarray", expected + # "ExtensionArray") return arr.astype(dtype, copy=True) # type: ignore[return-value] - # pandas/core/dtypes/cast.py:1180: error: Incompatible return value type (got - # "ndarray", expected "ExtensionArray") [return-value] + # error: Incompatible return value type (got "ndarray", expected "ExtensionArray") return arr.astype(dtype, copy=copy) # type: ignore[return-value] @@ -1611,12 +1575,11 @@ def maybe_cast_to_datetime(value, dtype: Optional[DtypeObj]): # pandas supports dtype whose granularity is less than [ns] # e.g., [ps], [fs], [as] - # pandas/core/dtypes/cast.py:1450: error: Unsupported operand types - # for >= ("dtype[Any]" and "ExtensionDtype") [operator] + # error: Unsupported operand types for >= ("dtype[Any]" and + # "ExtensionDtype") if dtype <= np.dtype("M8[ns]"): # type: ignore[operator] - # pandas/core/dtypes/cast.py:1451: error: Item "None" of - # "Union[dtype[Any], ExtensionDtype, None]" has no attribute - # "name" [union-attr] + # error: Item "None" of "Union[dtype[Any], ExtensionDtype, + # None]" has no attribute "name" if dtype.name == "datetime64": # type: ignore[union-attr] raise ValueError(msg) dtype = DT64NS_DTYPE @@ -1630,8 +1593,7 @@ def maybe_cast_to_datetime(value, dtype: Optional[DtypeObj]): # pandas supports dtype whose granularity is less than [ns] # e.g., [ps], [fs], [as] - # pandas\core\dtypes\cast.py:1468: error: Unsupported operand - # types for >= ("dtype" and "ExtensionDtype") [operator] + # error: Unsupported operand types for >= ("dtype" and "ExtensionDtype") if dtype <= np.dtype("m8[ns]"): # type: ignore[operator] if dtype.name == "timedelta64": raise ValueError(msg) @@ -1648,10 +1610,9 @@ def maybe_cast_to_datetime(value, dtype: Optional[DtypeObj]): # we have an array of datetime or timedeltas & nulls elif np.prod(value.shape) or not is_dtype_equal(value.dtype, dtype): - # pandas/core/dtypes/cast.py:1524: error: Argument 2 to - # "_disallow_mismatched_datetimelike" has incompatible type - # "Union[dtype[Any], ExtensionDtype, None]"; expected - # "Union[dtype[Any], ExtensionDtype]" [arg-type] + # error: Argument 2 to "_disallow_mismatched_datetimelike" has + # incompatible type "Union[dtype[Any], ExtensionDtype, None]"; + # expected "Union[dtype[Any], ExtensionDtype]" _disallow_mismatched_datetimelike( value, dtype # type: ignore[arg-type] ) @@ -1674,14 +1635,10 @@ def maybe_cast_to_datetime(value, dtype: Optional[DtypeObj]): if is_dt_string: # Strings here are naive, so directly localize - # pandas\core\dtypes\cast.py:1504: error: Item - # "dtype" of "Union[dtype, ExtensionDtype]" has - # no attribute "tz" [union-attr] - - # pandas\core\dtypes\cast.py:1504: error: Item - # "ExtensionDtype" of "Union[dtype, + # error: Item "dtype" of "Union[dtype, ExtensionDtype]" + # has no attribute "tz" + # error: Item "ExtensionDtype" of "Union[dtype, # ExtensionDtype]" has no attribute "tz" - # [union-attr] value = value.tz_localize( dtype.tz # type: ignore[union-attr] ) @@ -1689,14 +1646,10 @@ def maybe_cast_to_datetime(value, dtype: Optional[DtypeObj]): # Numeric values are UTC at this point, # so localize and convert - # pandas\core\dtypes\cast.py:1508: error: Item - # "dtype" of "Union[dtype, ExtensionDtype]" has - # no attribute "tz" [union-attr] - - # pandas\core\dtypes\cast.py:1508: error: Item - # "ExtensionDtype" of "Union[dtype, + # error: Item "dtype" of "Union[dtype, ExtensionDtype]" + # has no attribute "tz" + # error: Item "ExtensionDtype" of "Union[dtype, # ExtensionDtype]" has no attribute "tz" - # [union-attr] value = value.tz_localize("UTC").tz_convert( dtype.tz # type: ignore[union-attr] ) @@ -1957,11 +1910,10 @@ def construct_1d_ndarray_preserving_na( else: if dtype is not None: _disallow_mismatched_datetimelike(values, dtype) - # pandas/core/dtypes/cast.py:1820: error: Argument "dtype" to "array" has - # incompatible type "Union[dtype[Any], ExtensionDtype, None]"; expected - # "Union[dtype[Any], None, type, _SupportsDType, str, Union[Tuple[Any, int], - # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, - # Any]]]" [arg-type] + # error: Argument "dtype" to "array" has incompatible type "Union[dtype[Any], + # ExtensionDtype, None]"; expected "Union[dtype[Any], None, type, + # _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, Union[int, + # Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]]" subarr = np.array(values, dtype=dtype, copy=copy) # type: ignore[arg-type] return subarr @@ -2013,13 +1965,11 @@ def maybe_cast_to_integer_array(arr, dtype: Dtype, copy: bool = False): try: if not hasattr(arr, "astype"): - # pandas\core\dtypes\cast.py:1811: error: Argument "dtype" to - # "array" has incompatible type "Union[ExtensionDtype, str, dtype, - # Type[str], Type[float], Type[int], Type[complex], Type[bool], - # Type[object]]"; expected "Union[dtype, None, type, - # _SupportsDtype, str, Tuple[Any, int], Tuple[Any, Union[int, + # error: Argument "dtype" to "array" has incompatible type + # "Union[ExtensionDtype, str, dtype, Type[str], Type[float], Type[int], + # Type[complex], Type[bool], Type[object]]"; expected "Union[dtype, None, + # type, _SupportsDtype, str, Tuple[Any, int], Tuple[Any, Union[int, # Sequence[int]]], List[Any], _DtypeDict, Tuple[Any, Any]]" - # [arg-type] casted = np.array(arr, dtype=dtype, copy=copy) # type: ignore[arg-type] else: casted = arr.astype(dtype, copy=copy) diff --git a/pandas/core/dtypes/common.py b/pandas/core/dtypes/common.py index 85d49af1cd1a9..437779170e293 100644 --- a/pandas/core/dtypes/common.py +++ b/pandas/core/dtypes/common.py @@ -1707,8 +1707,8 @@ def infer_dtype_from_object(dtype) -> DtypeObj: if isinstance(dtype, type) and issubclass(dtype, np.generic): # Type object from a dtype - # pandas/core/dtypes/common.py:1704: error: Incompatible return value type (got - # "Type[generic]", expected "Union[dtype[Any], ExtensionDtype]") [return-value] + # error: Incompatible return value type (got "Type[generic]", expected + # "Union[dtype[Any], ExtensionDtype]") return dtype # type: ignore[return-value] elif isinstance(dtype, (np.dtype, ExtensionDtype)): # dtype object @@ -1717,9 +1717,8 @@ def infer_dtype_from_object(dtype) -> DtypeObj: except TypeError: # Should still pass if we don't have a date-like pass - # pandas/core/dtypes/common.py:1712: error: Incompatible return value type (got - # "Union[Type[generic], Type[Any]]", expected "Union[dtype[Any], - # ExtensionDtype]") [return-value] + # error: Incompatible return value type (got "Union[Type[generic], Type[Any]]", + # expected "Union[dtype[Any], ExtensionDtype]") return dtype.type # type: ignore[return-value] try: @@ -1734,9 +1733,8 @@ def infer_dtype_from_object(dtype) -> DtypeObj: # TODO(jreback) # should deprecate these if dtype in ["datetimetz", "datetime64tz"]: - # pandas/core/dtypes/common.py:1726: error: Incompatible return value type - # (got "Type[Any]", expected "Union[dtype[Any], ExtensionDtype]") - # [return-value] + # error: Incompatible return value type (got "Type[Any]", expected + # "Union[dtype[Any], ExtensionDtype]") return DatetimeTZDtype.type # type: ignore[return-value] elif dtype in ["period"]: raise NotImplementedError diff --git a/pandas/core/dtypes/concat.py b/pandas/core/dtypes/concat.py index 4c142cea98162..3a46d5e313847 100644 --- a/pandas/core/dtypes/concat.py +++ b/pandas/core/dtypes/concat.py @@ -62,11 +62,10 @@ def _cast_to_common_type(arr: ArrayLike, dtype: DtypeObj) -> ArrayLike: if is_extension_array_dtype(dtype) and isinstance(arr, np.ndarray): # numpy's astype cannot handle ExtensionDtypes return array(arr, dtype=dtype, copy=False) - # pandas/core/dtypes/concat.py:101: error: Argument 1 to "astype" of - # "_ArrayOrScalarCommon" has incompatible type "Union[dtype[Any], ExtensionDtype]"; - # expected "Union[dtype[Any], None, type, _SupportsDType, str, Union[Tuple[Any, - # int], Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, - # Any]]]" [arg-type] + # error: Argument 1 to "astype" of "_ArrayOrScalarCommon" has incompatible type + # "Union[dtype[Any], ExtensionDtype]"; expected "Union[dtype[Any], None, type, + # _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], + # List[Any], _DTypeDict, Tuple[Any, Any]]]" return arr.astype(dtype, copy=False) # type: ignore[arg-type] diff --git a/pandas/core/dtypes/missing.py b/pandas/core/dtypes/missing.py index 65eb7684f339e..7a6d11f1748ff 100644 --- a/pandas/core/dtypes/missing.py +++ b/pandas/core/dtypes/missing.py @@ -562,8 +562,8 @@ def na_value_for_dtype(dtype: DtypeObj, compat: bool = True): """ if is_extension_array_dtype(dtype): - # pandas/core/dtypes/missing.py:565: error: Item "dtype[Any]" of - # "Union[dtype[Any], ExtensionDtype]" has no attribute "na_value" [union-attr] + # error: Item "dtype[Any]" of "Union[dtype[Any], ExtensionDtype]" has no + # attribute "na_value" return dtype.na_value # type: ignore[union-attr] elif needs_i8_conversion(dtype): return dtype.type("NaT", "ns") @@ -649,10 +649,10 @@ def isna_all(arr: ArrayLike) -> bool: ) return all( - # pandas/core/dtypes/missing.py:648: error: Argument 1 to "__call__" of "ufunc" - # has incompatible type "Union[ExtensionArray, Any]"; expected "Union[Union[int, - # float, complex, str, bytes, generic], Sequence[Union[int, float, complex, str, - # bytes, generic]], Sequence[Sequence[Any]], _SupportsArray]" [arg-type] + # error: Argument 1 to "__call__" of "ufunc" has incompatible type + # "Union[ExtensionArray, Any]"; expected "Union[Union[int, float, complex, str, + # bytes, generic], Sequence[Union[int, float, complex, str, bytes, generic]], + # Sequence[Sequence[Any]], _SupportsArray]" checker(arr[i : i + chunk_len]).all() # type: ignore[arg-type] for i in range(0, total_len, chunk_len) ) diff --git a/pandas/core/groupby/generic.py b/pandas/core/groupby/generic.py index de2a0389837d0..bbc7f464b6a00 100644 --- a/pandas/core/groupby/generic.py +++ b/pandas/core/groupby/generic.py @@ -736,15 +736,14 @@ def apply_series_value_counts(): lab = cut(Series(val), bins, include_lowest=True) # error: "ndarray" has no attribute "cat" lev = lab.cat.categories # type: ignore[attr-defined] - # pandas/core/groupby/generic.py:719: error: No overload variant of "take" - # of "_ArrayOrScalarCommon" matches argument types "Any", "bool", - # "Union[Any, float]" [call-overload] + # error: No overload variant of "take" of "_ArrayOrScalarCommon" matches + # argument types "Any", "bool", "Union[Any, float]" lab = lev.take( # type: ignore[call-overload] # error: "ndarray" has no attribute "cat" lab.cat.codes, # type: ignore[attr-defined] allow_fill=True, - # pandas/core/groupby/generic.py:722: error: Item "ndarray" of - # "Union[ndarray, Index]" has no attribute "_na_value" [union-attr] + # error: Item "ndarray" of "Union[ndarray, Index]" has no attribute + # "_na_value" fill_value=lev._na_value, # type: ignore[union-attr] ) llab = lambda lab, inc: lab[inc]._multiindex.codes[-1] @@ -752,11 +751,8 @@ def apply_series_value_counts(): if is_interval_dtype(lab.dtype): # TODO: should we do this inside II? - # pandas\core\groupby\generic.py:727: error: "ndarray" has no - # attribute "left" [attr-defined] - - # pandas\core\groupby\generic.py:727: error: "ndarray" has no - # attribute "right" [attr-defined] + # error: "ndarray" has no attribute "left" + # error: "ndarray" has no attribute "right" sorter = np.lexsort( (lab.left, lab.right, ids) # type: ignore[attr-defined] ) @@ -1145,8 +1141,8 @@ def py_fallback(bvalues: ArrayLike) -> ArrayLike: # about a single block input returning a single block output # is a lie. See eg GH-39329 - # pandas/core/groupby/generic.py:1140: error: Incompatible return value - # type (got "ndarray", expected "ExtensionArray") [return-value] + # error: Incompatible return value type (got "ndarray", expected + # "ExtensionArray") return mgr.as_array() # type: ignore[return-value] else: result = mgr.blocks[0].values @@ -1169,9 +1165,8 @@ def blk_func(bvalues: ArrayLike) -> ArrayLike: assert how == "ohlc" raise - # pandas/core/groupby/generic.py:1128: error: Incompatible types in - # assignment (expression has type "ExtensionArray", variable has type - # "ndarray") [assignment] + # error: Incompatible types in assignment (expression has type + # "ExtensionArray", variable has type "ndarray") result = py_fallback(bvalues) # type: ignore[assignment] return cast_agg_result(result, bvalues, how) diff --git a/pandas/core/groupby/groupby.py b/pandas/core/groupby/groupby.py index d63898f2dada4..98c8c635aba07 100644 --- a/pandas/core/groupby/groupby.py +++ b/pandas/core/groupby/groupby.py @@ -2246,9 +2246,8 @@ def pre_processor(vals: np.ndarray) -> Tuple[np.ndarray, Optional[Type]]: inference = "datetime64[ns]" # type: ignore[assignment] vals = np.asarray(vals).astype(float) elif is_timedelta64_dtype(vals.dtype): - # pandas\core\groupby\groupby.py:2196: error: Incompatible - # types in assignment (expression has type "str", variable has - # type "Optional[Type[signedinteger[Any]]]") [assignment] + # error: Incompatible types in assignment (expression has type "str", + # variable has type "Optional[Type[signedinteger[Any]]]") inference = "timedelta64[ns]" # type: ignore[assignment] vals = np.asarray(vals).astype(float) diff --git a/pandas/core/groupby/ops.py b/pandas/core/groupby/ops.py index 9d605ddcb9c19..0c3d8550b0f80 100644 --- a/pandas/core/groupby/ops.py +++ b/pandas/core/groupby/ops.py @@ -515,9 +515,8 @@ def _ea_wrap_cython_operation( if how in ["rank"]: # preserve float64 dtype - # pandas/core/groupby/ops.py:513: error: Incompatible return value type - # (got "ndarray", expected "Tuple[ndarray, Optional[List[str]]]") - # [return-value] + # error: Incompatible return value type (got "ndarray", expected + # "Tuple[ndarray, Optional[List[str]]]") return res_values # type: ignore[return-value] res_values = res_values.astype("i8", copy=False) @@ -532,13 +531,12 @@ def _ea_wrap_cython_operation( ) dtype = maybe_cast_result_dtype(orig_values.dtype, how) if is_extension_array_dtype(dtype): - # pandas/core/groupby/ops.py:527: error: Item "dtype[Any]" of - # "Union[dtype[Any], ExtensionDtype]" has no attribute - # "construct_array_type" [union-attr] + # error: Item "dtype[Any]" of "Union[dtype[Any], ExtensionDtype]" has no + # attribute "construct_array_type" cls = dtype.construct_array_type() # type: ignore[union-attr] return cls._from_sequence(res_values, dtype=dtype) - # pandas/core/groupby/ops.py:529: error: Incompatible return value type (got - # "ndarray", expected "Tuple[ndarray, Optional[List[str]]]") [return-value] + # error: Incompatible return value type (got "ndarray", expected + # "Tuple[ndarray, Optional[List[str]]]") return res_values # type: ignore[return-value] elif is_float_dtype(values.dtype): @@ -576,8 +574,8 @@ def _cython_operation( self._disallow_invalid_ops(values, how) if is_extension_array_dtype(values.dtype): - # pandas/core/groupby/ops.py:564: error: Incompatible return value type (got - # "Tuple[ndarray, Optional[List[str]]]", expected "ndarray") [return-value] + # error: Incompatible return value type (got "Tuple[ndarray, + # Optional[List[str]]]", expected "ndarray") return self._ea_wrap_cython_operation( # type: ignore[return-value] kind, values, how, axis, min_count, **kwargs ) @@ -666,9 +664,8 @@ def _cython_operation( # e.g. if we are int64 and need to restore to datetime64/timedelta64 # "rank" is the only member of cython_cast_blocklist we get here dtype = maybe_cast_result_dtype(orig_values.dtype, how) - # pandas/core/groupby/ops.py:652: error: Argument 2 to - # "maybe_downcast_to_dtype" has incompatible type "Union[dtype[Any], - # ExtensionDtype]"; expected "Union[str, dtype[Any]]" [arg-type] + # error: Argument 2 to "maybe_downcast_to_dtype" has incompatible type + # "Union[dtype[Any], ExtensionDtype]"; expected "Union[str, dtype[Any]]" result = maybe_downcast_to_dtype(result, dtype) # type: ignore[arg-type] return result diff --git a/pandas/core/indexes/base.py b/pandas/core/indexes/base.py index 28c9db14bddb6..40290e6aaab90 100644 --- a/pandas/core/indexes/base.py +++ b/pandas/core/indexes/base.py @@ -139,8 +139,7 @@ str_t = str -# pandas\core\indexes\base.py:121: error: Value of type variable "_DTypeScalar" -# of "dtype" cannot be "object" [type-var] +# error: Value of type variable "_DTypeScalar" of "dtype" cannot be "object" _o_dtype = np.dtype(object) # type: ignore[type-var] @@ -354,9 +353,8 @@ def __new__( # should not be coerced # GH 11836 - # pandas/core/indexes/base.py:341: error: Argument 1 to - # "_maybe_cast_with_dtype" has incompatible type "Union[ndarray, Index, - # Series]"; expected "ndarray" [arg-type] + # error: Argument 1 to "_maybe_cast_with_dtype" has incompatible type + # "Union[ndarray, Index, Series]"; expected "ndarray" data = _maybe_cast_with_dtype( data, dtype, copy # type: ignore[arg-type] ) @@ -366,9 +364,8 @@ def __new__( # maybe coerce to a sub-class arr = data else: - # pandas/core/indexes/base.py:354: error: Argument "dtype" to - # "asarray_tuplesafe" has incompatible type "Type[object]"; expected - # "Union[str, dtype[Any], None]" [arg-type] + # error: Argument "dtype" to "asarray_tuplesafe" has incompatible type + # "Type[object]"; expected "Union[str, dtype[Any], None]" arr = com.asarray_tuplesafe( data, dtype=object # type: ignore[arg-type] ) @@ -406,9 +403,8 @@ def __new__( ) # other iterable of some kind - # pandas/core/indexes/base.py:388: error: Argument "dtype" to - # "asarray_tuplesafe" has incompatible type "Type[object]"; expected - # "Union[str, dtype[Any], None]" [arg-type] + # error: Argument "dtype" to "asarray_tuplesafe" has incompatible type + # "Type[object]"; expected "Union[str, dtype[Any], None]" subarr = com.asarray_tuplesafe(data, dtype=object) # type: ignore[arg-type] return Index(subarr, dtype=dtype, copy=copy, name=name, **kwargs) @@ -2835,16 +2831,15 @@ def union(self, other, sort=None): # | -> T # | -> object if not (is_integer_dtype(self.dtype) and is_integer_dtype(other.dtype)): - # pandas/core/indexes/base.py:2809: error: Incompatible types in - # assignment (expression has type "str", variable has type - # "Union[dtype[Any], ExtensionDtype]") [assignment] + # error: Incompatible types in assignment (expression has type + # "str", variable has type "Union[dtype[Any], ExtensionDtype]") dtype = "float64" # type: ignore[assignment] else: # one is int64 other is uint64 - # pandas/core/indexes/base.py:2812: error: Incompatible types in - # assignment (expression has type "Type[object]", variable has type - # "Union[dtype[Any], ExtensionDtype]") [assignment] + # error: Incompatible types in assignment (expression has type + # "Type[object]", variable has type "Union[dtype[Any], + # ExtensionDtype]") dtype = object # type: ignore[assignment] left = self.astype(dtype, copy=False) @@ -3905,16 +3900,14 @@ def join(self, other, how="left", level=None, return_indexers=False, sort=False) if return_indexers: if join_index is self: - # pandas\core\indexes\base.py:3564: error: Incompatible types - # in assignment (expression has type "None", variable has type - # "ndarray") [assignment] + # error: Incompatible types in assignment (expression has type "None", + # variable has type "ndarray") lindexer = None # type: ignore[assignment] else: lindexer = self.get_indexer(join_index) if join_index is other: - # pandas\core\indexes\base.py:3568: error: Incompatible types - # in assignment (expression has type "None", variable has type - # "ndarray") [assignment] + # error: Incompatible types in assignment (expression has type "None", + # variable has type "ndarray") rindexer = None # type: ignore[assignment] else: rindexer = other.get_indexer(join_index) @@ -4022,8 +4015,8 @@ def _join_non_unique(self, other, how="left", return_indexers=False): mask = left_idx == -1 np.putmask(join_index, mask, rvalues.take(right_idx)) - # pandas/core/indexes/base.py:3845: error: Incompatible types in assignment - # (expression has type "Index", variable has type "ndarray") [assignment] + # error: Incompatible types in assignment (expression has type "Index", variable + # has type "ndarray") join_index = self._wrap_joined_index( join_index, other # type: ignore[assignment] ) @@ -4204,17 +4197,15 @@ def _join_monotonic(self, other, how="left", return_indexers=False): ridx = None elif how == "inner": join_index, lidx, ridx = self._inner_indexer(sv, ov) - # pandas/core/indexes/base.py:4023: error: Argument 1 to - # "_wrap_joined_index" of "Index" has incompatible type "Index"; - # expected "ndarray" [arg-type] + # error: Argument 1 to "_wrap_joined_index" of "Index" has incompatible + # type "Index"; expected "ndarray" join_index = self._wrap_joined_index( join_index, other # type: ignore[arg-type] ) elif how == "outer": join_index, lidx, ridx = self._outer_indexer(sv, ov) - # pandas/core/indexes/base.py:4026: error: Argument 1 to - # "_wrap_joined_index" of "Index" has incompatible type "Index"; - # expected "ndarray" [arg-type] + # error: Argument 1 to "_wrap_joined_index" of "Index" has incompatible + # type "Index"; expected "ndarray" join_index = self._wrap_joined_index( join_index, other # type: ignore[arg-type] ) @@ -4227,9 +4218,8 @@ def _join_monotonic(self, other, how="left", return_indexers=False): join_index, lidx, ridx = self._inner_indexer(sv, ov) elif how == "outer": join_index, lidx, ridx = self._outer_indexer(sv, ov) - # pandas/core/indexes/base.py:4036: error: Argument 1 to - # "_wrap_joined_index" of "Index" has incompatible type "Index"; expected - # "ndarray" [arg-type] + # error: Argument 1 to "_wrap_joined_index" of "Index" has incompatible type + # "Index"; expected "ndarray" join_index = self._wrap_joined_index( join_index, other # type: ignore[arg-type] ) @@ -4275,11 +4265,10 @@ def values(self) -> ArrayLike: Index.array : Reference to the underlying data. Index.to_numpy : A NumPy array representing the underlying data. """ - # pandas/core/indexes/base.py:4244: error: Incompatible return value type (got - # "Union[ExtensionArray, ndarray]", expected "ExtensionArray") [return-value] - - # pandas/core/indexes/base.py:4244: error: Incompatible return value type (got - # "Union[ExtensionArray, ndarray]", expected "ndarray") [return-value] + # error: Incompatible return value type (got "Union[ExtensionArray, ndarray]", + # expected "ExtensionArray") + # error: Incompatible return value type (got "Union[ExtensionArray, ndarray]", + # expected "ndarray") return self._data # type: ignore[return-value] @cache_readonly @@ -4516,11 +4505,10 @@ def __getitem__(self, key): result = getitem(key) if not is_scalar(result): - # pandas\core\indexes\base.py:4283: error: Argument 1 to "ndim" has - # incompatible type "Union[ExtensionArray, Any]"; expected - # "Union[Union[int, float, complex, str, bytes, generic], + # error: Argument 1 to "ndim" has incompatible type "Union[ExtensionArray, + # Any]"; expected "Union[Union[int, float, complex, str, bytes, generic], # Sequence[Union[int, float, complex, str, bytes, generic]], - # Sequence[Sequence[Any]], _SupportsArray]" [arg-type] + # Sequence[Sequence[Any]], _SupportsArray]" if np.ndim(result) > 1: # type: ignore[arg-type] deprecate_ndim_indexing(result) return result @@ -4592,8 +4580,8 @@ def putmask(self, mask, value): numpy.ndarray.putmask : Changes elements of an array based on conditional and input values. """ - # pandas/core/indexes/base.py:4595: error: Value of type variable "ArrayLike" of - # "validate_putmask" cannot be "Union[ExtensionArray, ndarray]" [type-var] + # error: Value of type variable "ArrayLike" of "validate_putmask" cannot be + # "Union[ExtensionArray, ndarray]" mask, noop = validate_putmask(self._values, mask) # type: ignore[type-var] if noop: return self.copy() @@ -4610,9 +4598,8 @@ def putmask(self, mask, value): return self.astype(dtype).putmask(mask, value) values = self._values.copy() - # pandas/core/indexes/base.py:4611: error: Argument 1 to - # "setitem_datetimelike_compat" has incompatible type "Union[ExtensionArray, - # ndarray]"; expected "ndarray" [arg-type] + # error: Argument 1 to "setitem_datetimelike_compat" has incompatible type + # "Union[ExtensionArray, ndarray]"; expected "ndarray" converted = setitem_datetimelike_compat( values, mask.sum(), converted # type: ignore[arg-type] ) @@ -5479,8 +5466,8 @@ def isin(self, values, level=None): """ if level is not None: self._validate_index_level(level) - # pandas/core/indexes/base.py:5253: error: Value of type variable "AnyArrayLike" - # of "isin" cannot be "Union[ExtensionArray, ndarray]" [type-var] + # error: Value of type variable "AnyArrayLike" of "isin" cannot be + # "Union[ExtensionArray, ndarray]" return algos.isin(self._values, values) # type: ignore[type-var] def _get_string_slice(self, key: str_t): @@ -5902,9 +5889,8 @@ def _cmp_method(self, other, op): else: with np.errstate(all="ignore"): - # pandas\core\indexes\base.py:5393: error: Value of type - # variable "ArrayLike" of "comparison_op" cannot be - # "Union[ExtensionArray, ndarray]" [type-var] + # error: Value of type variable "ArrayLike" of "comparison_op" cannot be + # "Union[ExtensionArray, ndarray]" result = ops.comparison_op( self._values, other, op # type: ignore[type-var] ) @@ -5980,10 +5966,10 @@ def any(self, *args, **kwargs): """ # FIXME: docstr inaccurate, args/kwargs not passed self._maybe_disable_logical_methods("any") - # pandas/core/indexes/base.py:5901: error: Argument 1 to "any" has incompatible - # type "ArrayLike"; expected "Union[Union[int, float, complex, str, bytes, - # generic], Sequence[Union[int, float, complex, str, bytes, generic]], - # Sequence[Sequence[Any]], _SupportsArray]" [arg-type] + # error: Argument 1 to "any" has incompatible type "ArrayLike"; expected + # "Union[Union[int, float, complex, str, bytes, generic], Sequence[Union[int, + # float, complex, str, bytes, generic]], Sequence[Sequence[Any]], + # _SupportsArray]" return np.any(self.values) # type: ignore[arg-type] def all(self): @@ -6042,10 +6028,10 @@ def all(self): # FIXME: docstr inaccurate, args/kwargs not passed self._maybe_disable_logical_methods("all") - # pandas/core/indexes/base.py:5959: error: Argument 1 to "all" has incompatible - # type "ArrayLike"; expected "Union[Union[int, float, complex, str, bytes, - # generic], Sequence[Union[int, float, complex, str, bytes, generic]], - # Sequence[Sequence[Any]], _SupportsArray]" [arg-type] + # error: Argument 1 to "all" has incompatible type "ArrayLike"; expected + # "Union[Union[int, float, complex, str, bytes, generic], Sequence[Union[int, + # float, complex, str, bytes, generic]], Sequence[Sequence[Any]], + # _SupportsArray]" return np.all(self.values) # type: ignore[arg-type] @final @@ -6333,9 +6319,8 @@ def _maybe_cast_data_without_dtype(subarr): if inferred == "integer": try: - # pandas/core/indexes/base.py:6246: error: Argument 3 to - # "_try_convert_to_int_array" has incompatible type "None"; expected - # "dtype[Any]" [arg-type] + # error: Argument 3 to "_try_convert_to_int_array" has incompatible type + # "None"; expected "dtype[Any]" data = _try_convert_to_int_array( subarr, False, None # type: ignore[arg-type] ) @@ -6372,9 +6357,8 @@ def _maybe_cast_data_without_dtype(subarr): pass elif inferred.startswith("timedelta"): - # pandas/core/indexes/base.py:6280: error: Incompatible types in assignment - # (expression has type "TimedeltaArray", variable has type "ndarray") - # [assignment] + # error: Incompatible types in assignment (expression has type + # "TimedeltaArray", variable has type "ndarray") data = TimedeltaArray._from_sequence( # type: ignore[assignment] subarr, copy=False ) diff --git a/pandas/core/indexes/category.py b/pandas/core/indexes/category.py index 92670d66dd829..aaf403ca2c0a7 100644 --- a/pandas/core/indexes/category.py +++ b/pandas/core/indexes/category.py @@ -168,9 +168,8 @@ def _engine_type(self): # self.codes can have dtype int8, int16, int32 or int64, so we need # to return the corresponding engine type (libindex.Int8Engine, etc.). - # pandas\core\indexes\category.py:181: error: Invalid index type - # "Type[generic]" for "Dict[Type[signedinteger[Any]], Any]"; expected - # type "Type[signedinteger[Any]]" [index] + # error: Invalid index type "Type[generic]" for "Dict[Type[signedinteger[Any]], + # Any]"; expected type "Type[signedinteger[Any]]" return { np.int8: libindex.Int8Engine, np.int16: libindex.Int16Engine, @@ -476,9 +475,8 @@ def _get_indexer( if self.equals(target): return np.arange(len(self), dtype="intp") - # pandas/core/indexes/category.py:513: error: Value of type variable "ArrayLike" - # of "_get_indexer_non_unique" of "CategoricalIndex" cannot be - # "Union[ExtensionArray, ndarray]" [type-var] + # error: Value of type variable "ArrayLike" of "_get_indexer_non_unique" of + # "CategoricalIndex" cannot be "Union[ExtensionArray, ndarray]" return self._get_indexer_non_unique(target._values)[0] # type: ignore[type-var] @Appender(_index_shared_docs["get_indexer_non_unique"] % _index_doc_kwargs) diff --git a/pandas/core/indexes/datetimelike.py b/pandas/core/indexes/datetimelike.py index c04988e7e6a6a..532f1d1221ca2 100644 --- a/pandas/core/indexes/datetimelike.py +++ b/pandas/core/indexes/datetimelike.py @@ -142,8 +142,8 @@ def _is_all_dates(self) -> bool: # Abstract data attributes @property - # pandas/core/indexes/datetimelike.py:134: error: Return type "ndarray" of "values" - # incompatible with return type "ArrayLike" in supertype "Index" [override] + # error: Return type "ndarray" of "values" incompatible with return type "ArrayLike" + # in supertype "Index" def values(self) -> np.ndarray: # type: ignore[override] # Note: PeriodArray overrides this to return an ndarray of objects. return self._data._data @@ -792,8 +792,7 @@ def _fast_union(self: _T, other: _T, sort=None) -> _T: left, right = self, other left_start = left[0] loc = right.searchsorted(left_start, side="left") - # pandas/core/indexes/datetimelike.py:798: error: Slice index must be an - # integer or None [misc] + # error: Slice index must be an integer or None right_chunk = right._values[:loc] # type: ignore[misc] dates = concat_compat((left._values, right_chunk)) # With sort being False, we can't infer that result.freq == self.freq @@ -810,8 +809,7 @@ def _fast_union(self: _T, other: _T, sort=None) -> _T: # concatenate if left_end < right_end: loc = right.searchsorted(left_end, side="right") - # pandas/core/indexes/datetimelike.py:813: error: Slice index must be an - # integer or None [misc] + # error: Slice index must be an integer or None right_chunk = right._values[loc:] # type: ignore[misc] dates = concat_compat([left._values, right_chunk]) # The can_fast_union check ensures that the result.freq diff --git a/pandas/core/indexes/datetimes.py b/pandas/core/indexes/datetimes.py index 65db6902f1e99..5d4d2a05613ab 100644 --- a/pandas/core/indexes/datetimes.py +++ b/pandas/core/indexes/datetimes.py @@ -502,9 +502,8 @@ def to_series(self, keep_tz=lib.no_default, index=None, name=None): # preserve the tz & copy values = self.copy(deep=True) else: - # pandas\core\indexes\datetimes.py:531: error: Incompatible types - # in assignment (expression has type "Union[ExtensionArray, - # ndarray]", variable has type "DatetimeIndex") [assignment] + # error: Incompatible types in assignment (expression has type + # "Union[ExtensionArray, ndarray]", variable has type "DatetimeIndex") values = self._values.view("M8[ns]").copy() # type: ignore[assignment] return Series(values, index=index, name=name) diff --git a/pandas/core/indexes/extension.py b/pandas/core/indexes/extension.py index 31de56a6f8549..af8da9a9d91ad 100644 --- a/pandas/core/indexes/extension.py +++ b/pandas/core/indexes/extension.py @@ -304,8 +304,8 @@ def astype(self, dtype, copy=True): @cache_readonly def _isnan(self) -> np.ndarray: - # pandas\core\indexes\extension.py:283: error: Incompatible return - # value type (got "ExtensionArray", expected "ndarray") [return-value] + # error: Incompatible return value type (got "ExtensionArray", expected + # "ndarray") return self._data.isna() # type: ignore[return-value] @doc(Index.equals) diff --git a/pandas/core/indexes/interval.py b/pandas/core/indexes/interval.py index cc128ef421ba7..77dc15a759b09 100644 --- a/pandas/core/indexes/interval.py +++ b/pandas/core/indexes/interval.py @@ -1190,16 +1190,14 @@ def interval_range( else: # delegate to the appropriate range function if isinstance(endpoint, Timestamp): - # pandas\core\indexes\interval.py:1293: error: Incompatible types - # in assignment (expression has type "DatetimeIndex", variable has - # type "ndarray") [assignment] + # error: Incompatible types in assignment (expression has type + # "DatetimeIndex", variable has type "ndarray") breaks = date_range( # type: ignore[assignment] start=start, end=end, periods=periods, freq=freq ) else: - # pandas\core\indexes\interval.py:1295: error: Incompatible types - # in assignment (expression has type "TimedeltaIndex", variable has - # type "ndarray") [assignment] + # error: Incompatible types in assignment (expression has type + # "TimedeltaIndex", variable has type "ndarray") breaks = timedelta_range( # type: ignore[assignment] start=start, end=end, periods=periods, freq=freq ) diff --git a/pandas/core/indexes/multi.py b/pandas/core/indexes/multi.py index 2dbd171b5c8ef..4022852a2fe2b 100644 --- a/pandas/core/indexes/multi.py +++ b/pandas/core/indexes/multi.py @@ -533,9 +533,8 @@ def from_tuples( elif isinstance(tuples, list): arrays = list(lib.to_object_array_tuples(tuples).T) else: - # pandas/core/indexes/multi.py:533: error: Incompatible types in assignment - # (expression has type "Iterator[Any]", variable has type - # "List[Sequence[Optional[Hashable]]]") [assignment] + # error: Incompatible types in assignment (expression has type + # "Iterator[Any]", variable has type "List[Sequence[Optional[Hashable]]]") arrays = zip(*tuples) # type: ignore[assignment] return cls.from_arrays(arrays, sortorder=sortorder, names=names) @@ -683,8 +682,8 @@ def _values(self) -> np.ndarray: vals, (ABCDatetimeIndex, ABCTimedeltaIndex) ): vals = vals.astype(object) - # pandas/core/indexes/multi.py:686: error: Incompatible types in assignment - # (expression has type "ndarray", variable has type "Index") [assignment] + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "Index") vals = np.array(vals, copy=False) # type: ignore[assignment] values.append(vals) @@ -692,8 +691,8 @@ def _values(self) -> np.ndarray: return arr @property - # pandas/core/indexes/multi.py:693: error: Return type "ndarray" of "values" - # incompatible with return type "ArrayLike" in supertype "Index" [override] + # error: Return type "ndarray" of "values" incompatible with return type "ArrayLike" + # in supertype "Index" def values(self) -> np.ndarray: # type: ignore[override] return self._values @@ -2177,9 +2176,8 @@ def drop(self, codes, level=None, errors="raise"): if not isinstance(codes, (np.ndarray, Index)): try: - # pandas/core/indexes/multi.py:2185: error: Argument "dtype" to - # "index_labels_to_array" has incompatible type "Type[object]"; expected - # "Union[str, dtype[Any], None]" [arg-type] + # error: Argument "dtype" to "index_labels_to_array" has incompatible + # type "Type[object]"; expected "Union[str, dtype[Any], None]" codes = com.index_labels_to_array( codes, dtype=object # type: ignore[arg-type] ) @@ -3124,15 +3122,13 @@ def convert_indexer(start, stop, step, indexer=indexer, codes=level_codes): indexer = codes.take(ensure_platform_int(indexer)) result = Series(Index(indexer).isin(r).nonzero()[0]) m = result.map(mapper) - # pandas\core\indexes\multi.py:2998: error: Incompatible types - # in assignment (expression has type "ndarray", variable has - # type "Series") [assignment] + # error: Incompatible types in assignment (expression has type + # "ndarray", variable has type "Series") m = np.asarray(m) # type: ignore[assignment] else: - # pandas\core\indexes\multi.py:3001: error: Incompatible types - # in assignment (expression has type "ndarray", variable has - # type "Series") [assignment] + # error: Incompatible types in assignment (expression has type + # "ndarray", variable has type "Series") m = np.zeros(len(codes), dtype=bool) # type: ignore[assignment] m[np.in1d(codes, r, assume_unique=Index(codes).is_unique)] = True diff --git a/pandas/core/indexes/numeric.py b/pandas/core/indexes/numeric.py index c5f45d55f352e..6528905f41d72 100644 --- a/pandas/core/indexes/numeric.py +++ b/pandas/core/indexes/numeric.py @@ -240,8 +240,8 @@ def asi8(self) -> np.ndarray: FutureWarning, stacklevel=2, ) - # pandas/core/indexes/numeric.py:287: error: Incompatible return value type (got - # "Union[ExtensionArray, ndarray]", expected "ndarray") [return-value] + # error: Incompatible return value type (got "Union[ExtensionArray, ndarray]", + # expected "ndarray") return self._values.view(self._default_dtype) # type: ignore[return-value] From 2f8a14fc6483d5ce323d260f96d44a8fe87809c5 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Tue, 16 Feb 2021 17:15:49 +0000 Subject: [PATCH 60/86] tidy comments --- pandas/core/indexes/numeric.py | 10 +- pandas/core/indexes/period.py | 4 +- pandas/core/indexes/range.py | 23 ++- pandas/core/internals/array_manager.py | 66 +++---- pandas/core/internals/blocks.py | 168 +++++++----------- pandas/core/internals/concat.py | 44 +++-- pandas/core/internals/construction.py | 64 +++---- pandas/core/internals/managers.py | 67 ++++--- pandas/core/internals/ops.py | 33 ++-- pandas/core/reshape/merge.py | 24 +-- pandas/core/reshape/pivot.py | 7 +- pandas/core/reshape/reshape.py | 5 +- pandas/core/strings/accessor.py | 3 +- pandas/core/strings/object_array.py | 5 +- pandas/core/tools/datetimes.py | 5 +- pandas/core/tools/numeric.py | 4 +- pandas/core/window/ewm.py | 14 +- pandas/core/window/rolling.py | 6 +- pandas/io/formats/format.py | 56 +++--- pandas/io/formats/string.py | 15 +- pandas/io/json/_json.py | 22 ++- pandas/io/parsers/base_parser.py | 34 ++-- pandas/io/pytables.py | 18 +- pandas/io/sql.py | 24 +-- pandas/io/stata.py | 9 +- .../tests/io/parser/common/test_chunksize.py | 3 +- 26 files changed, 300 insertions(+), 433 deletions(-) diff --git a/pandas/core/indexes/numeric.py b/pandas/core/indexes/numeric.py index 6528905f41d72..ec81eae5b858a 100644 --- a/pandas/core/indexes/numeric.py +++ b/pandas/core/indexes/numeric.py @@ -281,10 +281,9 @@ def _convert_arr_indexer(self, keyarr): ): dtype = np.uint64 - # pandas/core/indexes/numeric.py:321: error: Argument "dtype" to - # "asarray_tuplesafe" has incompatible type + # error: Argument "dtype" to "asarray_tuplesafe" has incompatible type # "Optional[Type[unsignedinteger[Any]]]"; expected "Union[str, dtype[Any], - # None]" [arg-type] + # None]" return com.asarray_tuplesafe(keyarr, dtype=dtype) # type: ignore[arg-type] @@ -322,9 +321,8 @@ def astype(self, dtype, copy=True): # TODO(jreback); this can change once we have an EA Index type # GH 13149 - # pandas/core/indexes/numeric.py:357: error: Argument 1 to "astype_nansafe" - # has incompatible type "Union[ExtensionArray, ndarray]"; expected "ndarray" - # [arg-type] + # error: Argument 1 to "astype_nansafe" has incompatible type + # "Union[ExtensionArray, ndarray]"; expected "ndarray" arr = astype_nansafe(self._values, dtype=dtype) # type: ignore[arg-type] return Int64Index(arr, name=self.name) return super().astype(dtype, copy=copy) diff --git a/pandas/core/indexes/period.py b/pandas/core/indexes/period.py index 4cd9f790a2085..ce15bd397b25d 100644 --- a/pandas/core/indexes/period.py +++ b/pandas/core/indexes/period.py @@ -252,8 +252,8 @@ def __new__( # Data @property - # pandas/core/indexes/period.py:251: error: Return type "ndarray" of "values" - # incompatible with return type "ArrayLike" in supertype "Index" [override] + # error: Return type "ndarray" of "values" incompatible with return type "ArrayLike" + # in supertype "Index" def values(self) -> np.ndarray: # type: ignore[override] return np.asarray(self, dtype=object) diff --git a/pandas/core/indexes/range.py b/pandas/core/indexes/range.py index fd65869549d71..fd4a0b635ebcf 100644 --- a/pandas/core/indexes/range.py +++ b/pandas/core/indexes/range.py @@ -97,11 +97,11 @@ def __new__( name=None, ): - # pandas/core/indexes/range.py:95: error: Argument 1 to "_validate_dtype" of - # "NumericIndex" has incompatible type "Union[ExtensionDtype, str, dtype[Any], - # Type[str], Type[float], Type[int], Type[complex], Type[bool], Type[object], - # None]"; expected "Union[ExtensionDtype, Union[str, dtype[Any]], Type[str], - # Type[float], Type[int], Type[complex], Type[bool], Type[object]]" [arg-type] + # error: Argument 1 to "_validate_dtype" of "NumericIndex" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], Type[int], + # Type[complex], Type[bool], Type[object], None]"; expected + # "Union[ExtensionDtype, Union[str, dtype[Any]], Type[str], Type[float], + # Type[int], Type[complex], Type[bool], Type[object]]" cls._validate_dtype(dtype) # type: ignore[arg-type] name = maybe_extract_name(name, start, cls) @@ -145,11 +145,11 @@ def from_range( f"range, {repr(data)} was passed" ) - # pandas/core/indexes/range.py:138: error: Argument 1 to "_validate_dtype" of - # "NumericIndex" has incompatible type "Union[ExtensionDtype, str, dtype[Any], - # Type[str], Type[float], Type[int], Type[complex], Type[bool], Type[object], - # None]"; expected "Union[ExtensionDtype, Union[str, dtype[Any]], Type[str], - # Type[float], Type[int], Type[complex], Type[bool], Type[object]]" [arg-type] + # error: Argument 1 to "_validate_dtype" of "NumericIndex" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], Type[int], + # Type[complex], Type[bool], Type[object], None]"; expected + # "Union[ExtensionDtype, Union[str, dtype[Any]], Type[str], Type[float], + # Type[int], Type[complex], Type[bool], Type[object]]" cls._validate_dtype(dtype) # type: ignore[arg-type] return cls._simple_new(data, name=name) @@ -889,8 +889,7 @@ def _arith_method(self, other, op): # apply if we have an override if step: with np.errstate(all="ignore"): - # pandas\core\indexes\range.py:867: error: "bool" not - # callable [operator] + # error: "bool" not callable rstep = step(left.step, right) # type: ignore[operator] # we don't have a representable op diff --git a/pandas/core/internals/array_manager.py b/pandas/core/internals/array_manager.py index 185bb25cc1037..4f4e9ca85057e 100644 --- a/pandas/core/internals/array_manager.py +++ b/pandas/core/internals/array_manager.py @@ -183,15 +183,8 @@ def reduce( res_arrays.append(np.array([res])) index = Index([None]) # placeholder - # pandas/core/internals/array_manager.py:186: error: Argument 1 to - # "ArrayManager" has incompatible type "List[ndarray]"; expected - # "List[Union[ndarray, ExtensionArray]]" [arg-type] - - # pandas/core/internals/array_manager.py:186: note: "List" is invariant -- see - # http://mypy.readthedocs.io/en/latest/common_issues.html#variance - - # pandas/core/internals/array_manager.py:186: note: Consider using "Sequence" - # instead, which is covariant + # error: Argument 1 to "ArrayManager" has incompatible type "List[ndarray]"; + # expected "List[Union[ndarray, ExtensionArray]]" new_mgr = type(self)(res_arrays, [index, self.items]) # type: ignore[arg-type] indexer = np.arange(self.shape[0]) return new_mgr, indexer @@ -284,15 +277,8 @@ def apply( if len(result_arrays) == 0: return self.make_empty(new_axes) - # pandas/core/internals/array_manager.py:278: error: Argument 1 to - # "ArrayManager" has incompatible type "List[ndarray]"; expected - # "List[Union[ndarray, ExtensionArray]]" [arg-type] - - # pandas/core/internals/array_manager.py:278: note: "List" is invariant -- see - # http://mypy.readthedocs.io/en/latest/common_issues.html#variance - - # pandas/core/internals/array_manager.py:278: note: Consider using "Sequence" - # instead, which is covariant + # error: Argument 1 to "ArrayManager" has incompatible type "List[ndarray]"; + # expected "List[Union[ndarray, ExtensionArray]]" return type(self)(result_arrays, new_axes) # type: ignore[arg-type] def apply_with_block(self: T, f, align_keys=None, **kwargs) -> T: @@ -576,9 +562,8 @@ def as_array( result = np.empty(self.shape_proper, dtype=dtype) - # pandas/core/internals/array_manager.py:580: error: Incompatible types in - # assignment (expression has type "Union[ndarray, ExtensionArray]", variable has - # type "ndarray") [assignment] + # error: Incompatible types in assignment (expression has type "Union[ndarray, + # ExtensionArray]", variable has type "ndarray") for i, arr in enumerate(self.arrays): # type: ignore[assignment] arr = arr.astype(dtype, copy=copy) result[:, i] = arr @@ -630,8 +615,8 @@ def fast_xs(self, loc: int) -> ArrayLike: result = np.array([arr[loc] for arr in self.arrays], dtype=temp_dtype) if isinstance(dtype, ExtensionDtype): result = dtype.construct_array_type()._from_sequence(result, dtype=dtype) - # pandas/core/internals/array_manager.py:631: error: Incompatible return value - # type (got "ndarray", expected "ExtensionArray") [return-value] + # error: Incompatible return value type (got "ndarray", expected + # "ExtensionArray") return result # type: ignore[return-value] def iget(self, i: int) -> SingleBlockManager: @@ -649,13 +634,10 @@ def iget_values(self, i: int) -> ArrayLike: """ Return the data for column i as the values (ndarray or ExtensionArray). """ - # pandas/core/internals/array_manager.py:648: error: Incompatible return value - # type (got "Union[ndarray, ExtensionArray]", expected "ExtensionArray") - # [return-value] - - # pandas/core/internals/array_manager.py:648: error: Incompatible return value - # type (got "Union[ndarray, ExtensionArray]", expected "ndarray") - # [return-value] + # error: Incompatible return value type (got "Union[ndarray, ExtensionArray]", + # expected "ExtensionArray") + # error: Incompatible return value type (got "Union[ndarray, ExtensionArray]", + # expected "ndarray") return self.arrays[i] # type: ignore[return-value] def idelete(self, indexer): @@ -695,9 +677,8 @@ def iset(self, loc: Union[int, slice, np.ndarray], value): assert isinstance(value, (np.ndarray, ExtensionArray)) assert value.ndim == 1 assert len(value) == len(self._axes[0]) - # pandas/core/internals/array_manager.py:675: error: Invalid index type - # "Union[int, slice, ndarray]" for "List[Union[ndarray, ExtensionArray]]"; - # expected type "int" [index] + # error: Invalid index type "Union[int, slice, ndarray]" for + # "List[Union[ndarray, ExtensionArray]]"; expected type "int" self.arrays[loc] = value # type: ignore[index] return @@ -711,9 +692,8 @@ def iset(self, loc: Union[int, slice, np.ndarray], value): else: assert isinstance(loc, np.ndarray) assert loc.dtype == "bool" - # pandas/core/internals/array_manager.py:714: error: Incompatible types in - # assignment (expression has type "ndarray", variable has type "range") - # [assignment] + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "range") indices = np.nonzero(loc)[0] # type: ignore[assignment] assert value.ndim == 2 @@ -871,11 +851,10 @@ def _make_na_array(self, fill_value=None): fill_value = np.nan dtype, fill_value = infer_dtype_from_scalar(fill_value) - # pandas/core/internals/array_manager.py:828: error: Argument "dtype" to "empty" - # has incompatible type "Union[dtype[Any], ExtensionDtype]"; expected - # "Union[dtype[Any], None, type, _SupportsDType, str, Union[Tuple[Any, int], - # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, - # Any]]]" [arg-type] + # error: Argument "dtype" to "empty" has incompatible type "Union[dtype[Any], + # ExtensionDtype]"; expected "Union[dtype[Any], None, type, _SupportsDType, str, + # Union[Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], List[Any], + # _DTypeDict, Tuple[Any, Any]]]" values = np.empty(self.shape_proper[0], dtype=dtype) # type: ignore[arg-type] values.fill(fill_value) return values @@ -886,9 +865,8 @@ def _equal_values(self, other) -> bool: assuming shape and indexes have already been checked. """ for left, right in zip(self.arrays, other.arrays): - # pandas/core/internals/array_manager.py:876: error: Value of type variable - # "ArrayLike" of "array_equals" cannot be "Union[Any, ndarray, - # ExtensionArray]" [type-var] + # error: Value of type variable "ArrayLike" of "array_equals" cannot be + # "Union[Any, ndarray, ExtensionArray]" if not array_equals(left, right): # type: ignore[type-var] return False else: diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index adcdeb614ec33..bd56dfb1a1180 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -239,9 +239,8 @@ def array_values(self) -> ExtensionArray: """ The array that Series.array returns. Always an ExtensionArray. """ - # pandas\core\internals\blocks.py:232: error: Argument 1 to - # "PandasArray" has incompatible type "Union[ndarray, ExtensionArray]"; - # expected "Union[ndarray, PandasArray]" [arg-type] + # error: Argument 1 to "PandasArray" has incompatible type "Union[ndarray, + # ExtensionArray]"; expected "Union[ndarray, PandasArray]" return PandasArray(self.values) # type: ignore[arg-type] def get_values(self, dtype: Optional[DtypeObj] = None) -> np.ndarray: @@ -251,8 +250,8 @@ def get_values(self, dtype: Optional[DtypeObj] = None) -> np.ndarray: """ if is_object_dtype(dtype): return self.values.astype(object) - # pandas/core/internals/blocks.py:252: error: Incompatible return value type - # (got "Union[ndarray, ExtensionArray]", expected "ndarray") [return-value] + # error: Incompatible return value type (got "Union[ndarray, ExtensionArray]", + # expected "ndarray") return self.values # type: ignore[return-value] def get_block_values_for_json(self) -> np.ndarray: @@ -431,8 +430,8 @@ def fillna( inplace = validate_bool_kwarg(inplace, "inplace") mask = isna(self.values) - # pandas/core/internals/blocks.py:435: error: Value of type variable "ArrayLike" - # of "validate_putmask" cannot be "Union[ndarray, ExtensionArray]" [type-var] + # error: Value of type variable "ArrayLike" of "validate_putmask" cannot be + # "Union[ndarray, ExtensionArray]" mask, noop = validate_putmask(self.values, mask) # type: ignore[type-var] if limit is not None: @@ -636,9 +635,8 @@ def astype(self, dtype, copy: bool = False, errors: str = "raise"): # e.g. astype_nansafe can fail on object-dtype of strings # trying to convert to float if errors == "ignore": - # pandas/core/internals/blocks.py:635: error: Incompatible types in - # assignment (expression has type "Union[ndarray, ExtensionArray]", - # variable has type "ExtensionArray") [assignment] + # error: Incompatible types in assignment (expression has type + # "Union[ndarray, ExtensionArray]", variable has type "ExtensionArray") new_values = self.values # type: ignore[assignment] else: raise @@ -657,32 +655,25 @@ def _astype(self, dtype: DtypeObj, copy: bool) -> ArrayLike: values = self.values if is_datetime64tz_dtype(dtype) and is_datetime64_dtype(values.dtype): - # pandas/core/internals/blocks.py:653: error: Value of type variable - # "ArrayLike" of "astype_dt64_to_dt64tz" cannot be "Union[ndarray, - # ExtensionArray]" [type-var] - - # pandas/core/internals/blocks.py:653: error: Incompatible return value type - # (got "DatetimeArray", expected "ndarray") [return-value] + # error: Value of type variable "ArrayLike" of "astype_dt64_to_dt64tz" + # cannot be "Union[ndarray, ExtensionArray]" + # error: Incompatible return value type (got "DatetimeArray", expected + # "ndarray") return astype_dt64_to_dt64tz( # type: ignore[type-var,return-value] values, dtype, copy, via_utc=True ) if is_dtype_equal(values.dtype, dtype): if copy: - # pandas/core/internals/blocks.py:657: error: Incompatible return value - # type (got "Union[ndarray, ExtensionArray]", expected "ExtensionArray") - # [return-value] - - # pandas/core/internals/blocks.py:657: error: Incompatible return value - # type (got "Union[ndarray, ExtensionArray]", expected "ndarray") - # [return-value] + # error: Incompatible return value type (got "Union[ndarray, + # ExtensionArray]", expected "ExtensionArray") + # error: Incompatible return value type (got "Union[ndarray, + # ExtensionArray]", expected "ndarray") return values.copy() # type: ignore[return-value] - # pandas/core/internals/blocks.py:658: error: Incompatible return value type - # (got "Union[ndarray, ExtensionArray]", expected "ExtensionArray") - # [return-value] - - # pandas/core/internals/blocks.py:658: error: Incompatible return value type - # (got "Union[ndarray, ExtensionArray]", expected "ndarray") [return-value] + # error: Incompatible return value type (got "Union[ndarray, + # ExtensionArray]", expected "ExtensionArray") + # error: Incompatible return value type (got "Union[ndarray, + # ExtensionArray]", expected "ndarray") return values # type: ignore[return-value] if isinstance(values, ExtensionArray): @@ -691,12 +682,10 @@ def _astype(self, dtype: DtypeObj, copy: bool) -> ArrayLike: else: values = astype_nansafe(values, dtype, copy=copy) - # pandas/core/internals/blocks.py:666: error: Incompatible return value type - # (got "Union[ndarray, ExtensionArray]", expected "ExtensionArray") - # [return-value] - - # pandas/core/internals/blocks.py:666: error: Incompatible return value type - # (got "Union[ndarray, ExtensionArray]", expected "ndarray") [return-value] + # error: Incompatible return value type (got "Union[ndarray, ExtensionArray]", + # expected "ExtensionArray") + # error: Incompatible return value type (got "Union[ndarray, ExtensionArray]", + # expected "ndarray") return values # type: ignore[return-value] def convert( @@ -784,8 +773,8 @@ def replace( values = self.values - # pandas/core/internals/blocks.py:791: error: Value of type variable "ArrayLike" - # of "mask_missing" cannot be "Union[ndarray, ExtensionArray]" [type-var] + # error: Value of type variable "ArrayLike" of "mask_missing" cannot be + # "Union[ndarray, ExtensionArray]" mask = missing.mask_missing(values, to_replace) # type: ignore[type-var] if not mask.any(): # Note: we get here with test_replace_extension_other incorrectly @@ -842,9 +831,8 @@ def _replace_regex( rx = re.compile(to_replace) new_values = self.values if inplace else self.values.copy() - # pandas\core\internals\blocks.py:855: error: Value of type variable - # "ArrayLike" of "replace_regex" cannot be "Union[ndarray, - # ExtensionArray]" [type-var] + # error: Value of type variable "ArrayLike" of "replace_regex" cannot be + # "Union[ndarray, ExtensionArray]" replace_regex(new_values, rx, value, mask) # type: ignore[type-var] block = self.make_block(new_values) @@ -885,9 +873,8 @@ def _replace_list( # in order to avoid repeating the same computations mask = ~isna(self.values) masks = [ - # pandas/core/internals/blocks.py:885: error: Value of type variable - # "ArrayLike" of "compare_or_regex_search" cannot be "Union[ndarray, - # ExtensionArray]" [type-var] + # error: Value of type variable "ArrayLike" of "compare_or_regex_search" + # cannot be "Union[ndarray, ExtensionArray]" compare_or_regex_search( # type: ignore[type-var] self.values, s[0], regex=regex, mask=mask ) @@ -896,17 +883,15 @@ def _replace_list( else: # GH#38086 faster if we know we dont need to check for regex - # pandas/core/internals/blocks.py:906: error: Value of type variable - # "ArrayLike" of "mask_missing" cannot be "Union[ndarray, ExtensionArray]" - # [type-var] + # error: Value of type variable "ArrayLike" of "mask_missing" cannot be + # "Union[ndarray, ExtensionArray]" masks = [ missing.mask_missing(self.values, s[0]) # type: ignore[type-var] for s in pairs ] - # pandas/core/internals/blocks.py:899: error: Value of type variable "ArrayLike" - # of "extract_bool_array" cannot be "Union[ndarray, ExtensionArray, bool]" - # [type-var] + # error: Value of type variable "ArrayLike" of "extract_bool_array" cannot be + # "Union[ndarray, ExtensionArray, bool]" masks = [extract_bool_array(x) for x in masks] # type: ignore[type-var] rb = [self if inplace else self.copy()] @@ -964,9 +949,8 @@ def _replace_coerce( nb = self.coerce_to_target_dtype(value) if nb is self and not inplace: nb = nb.copy() - # pandas/core/internals/blocks.py:967: error: Value of type variable - # "ArrayLike" of "putmask_inplace" cannot be "Union[ndarray, - # ExtensionArray]" [type-var] + # error: Value of type variable "ArrayLike" of "putmask_inplace" cannot + # be "Union[ndarray, ExtensionArray]" putmask_inplace(nb.values, mask, value) # type: ignore[type-var] return [nb] else: @@ -1040,9 +1024,8 @@ def setitem(self, indexer, value): # length checking check_setitem_lengths(indexer, value, values) - # pandas/core/internals/blocks.py:1000: error: Value of type variable - # "ArrayLike" of "is_exact_shape_match" cannot be "Union[Any, ndarray, - # ExtensionArray]" [type-var] + # error: Value of type variable "ArrayLike" of "is_exact_shape_match" cannot be + # "Union[Any, ndarray, ExtensionArray]" exact_match = is_exact_shape_match(values, arr_value) # type: ignore[type-var] if is_empty_indexer(indexer, arr_value): # GH#8669 empty indexers @@ -1062,9 +1045,8 @@ def setitem(self, indexer, value): if values.shape[-1] != 1: # shouldn't get here (at least until 2D EAs) raise NotImplementedError - # pandas/core/internals/blocks.py:1019: error: Invalid index type - # "Tuple[slice, int]" for "Union[ndarray, ExtensionArray]"; expected - # type "Union[int, slice, ndarray]" [index] + # error: Invalid index type "Tuple[slice, int]" for "Union[ndarray, + # ExtensionArray]"; expected type "Union[int, slice, ndarray]" values = values[:, 0] # type: ignore[index] return self.make_block(Categorical(values, dtype=arr_value.dtype)) @@ -1090,9 +1072,8 @@ def setitem(self, indexer, value): values[indexer] = value.to_numpy(values.dtype).reshape(-1, 1) else: - # pandas/core/internals/blocks.py:1028: error: Argument 1 to - # "setitem_datetimelike_compat" has incompatible type "Union[ndarray, - # ExtensionArray]"; expected "ndarray" [arg-type] + # error: Argument 1 to "setitem_datetimelike_compat" has incompatible type + # "Union[ndarray, ExtensionArray]"; expected "ndarray" value = setitem_datetimelike_compat( values, len(values[indexer]), value # type: ignore[arg-type] ) @@ -1120,9 +1101,8 @@ def putmask(self, mask, new) -> List[Block]: List[Block] """ transpose = self.ndim == 2 - # pandas/core/internals/blocks.py:1053: error: Value of type variable - # "ArrayLike" of "validate_putmask" cannot be "Union[ndarray, ExtensionArray]" - # [type-var] + # error: Value of type variable "ArrayLike" of "validate_putmask" cannot be + # "Union[ndarray, ExtensionArray]" mask, noop = validate_putmask(self.values.T, mask) # type: ignore[type-var] assert not isinstance(new, (ABCIndex, ABCSeries, ABCDataFrame)) @@ -1136,9 +1116,8 @@ def putmask(self, mask, new) -> List[Block]: if transpose: new_values = new_values.T - # pandas/core/internals/blocks.py:1090: error: Argument 1 to - # "putmask_without_repeat" has incompatible type "Union[ndarray, - # ExtensionArray]"; expected "ndarray" [arg-type] + # error: Argument 1 to "putmask_without_repeat" has incompatible type + # "Union[ndarray, ExtensionArray]"; expected "ndarray" putmask_without_repeat(new_values, mask, new) # type: ignore[arg-type] return [self] @@ -1384,9 +1363,8 @@ def shift(self, periods: int, axis: int = 0, fill_value: Any = None) -> List[Blo # convert integer to float if necessary. need to do a lot more than # that, handle boolean etc also - # pandas/core/internals/blocks.py:1286: error: Argument 1 to "maybe_upcast" has - # incompatible type "Union[ndarray, ExtensionArray]"; expected "ndarray" - # [arg-type] + # error: Argument 1 to "maybe_upcast" has incompatible type "Union[ndarray, + # ExtensionArray]"; expected "ndarray" new_values, fill_value = maybe_upcast( self.values, fill_value # type: ignore[arg-type] ) @@ -1424,9 +1402,8 @@ def where(self, other, cond, errors="raise", axis: int = 0) -> List[Block]: if transpose: values = values.T - # pandas/core/internals/blocks.py:1354: error: Value of type variable - # "ArrayLike" of "validate_putmask" cannot be "Union[ndarray, ExtensionArray]" - # [type-var] + # error: Value of type variable "ArrayLike" of "validate_putmask" cannot be + # "Union[ndarray, ExtensionArray]" icond, noop = validate_putmask(values, ~cond) # type: ignore[type-var] if is_valid_na_for_dtype(other, self.dtype) and not self.is_object: @@ -1445,13 +1422,10 @@ def where(self, other, cond, errors="raise", axis: int = 0) -> List[Block]: blocks = block.where(orig_other, cond, errors=errors, axis=axis) return self._maybe_downcast(blocks, "infer") - # pandas/core/internals/blocks.py:1372: error: Argument 1 to - # "setitem_datetimelike_compat" has incompatible type "Union[ndarray, - # ExtensionArray]"; expected "ndarray" [arg-type] - - # pandas/core/internals/blocks.py:1372: error: Argument 2 to - # "setitem_datetimelike_compat" has incompatible type "number[Any]"; - # expected "int" [arg-type] + # error: Argument 1 to "setitem_datetimelike_compat" has incompatible type + # "Union[ndarray, ExtensionArray]"; expected "ndarray" + # error: Argument 2 to "setitem_datetimelike_compat" has incompatible type + # "number[Any]"; expected "int" alt = setitem_datetimelike_compat( values, icond.sum(), other # type: ignore[arg-type] ) @@ -1545,9 +1519,8 @@ def quantile( values = self.values mask = np.asarray(isna(values)) - # pandas/core/internals/blocks.py:1463: error: Argument 1 to - # "quantile_with_mask" has incompatible type "Union[ndarray, ExtensionArray]"; - # expected "ndarray" [arg-type] + # error: Argument 1 to "quantile_with_mask" has incompatible type + # "Union[ndarray, ExtensionArray]"; expected "ndarray" result = quantile_with_mask( values, mask, fill_value, qs, interpolation, axis # type: ignore[arg-type] ) @@ -1617,9 +1590,8 @@ def iget(self, col): elif isinstance(col, slice): if col != slice(None): raise NotImplementedError(col) - # pandas\core\internals\blocks.py:1740: error: Invalid index - # type "List[Any]" for "ExtensionArray"; expected type - # "Union[int, slice, ndarray]" [index] + # error: Invalid index type "List[Any]" for "ExtensionArray"; expected + # type "Union[int, slice, ndarray]" return self.values[[loc]] # type: ignore[index] return self.values[loc] else: @@ -1744,9 +1716,8 @@ def to_native_types(self, na_rep="nan", quoting=None, **kwargs): values = self.values mask = isna(values) - # pandas\core\internals\blocks.py:1803: error: Incompatible types in - # assignment (expression has type "ndarray", variable has type - # "ExtensionArray") [assignment] + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "ExtensionArray") values = np.asarray(values.astype(object)) # type: ignore[assignment] values[mask] = na_rep @@ -1889,9 +1860,8 @@ def where(self, other, cond, errors="raise", axis: int = 0) -> List[Block]: # we want to replace that with the correct NA value # for the type - # pandas/core/internals/blocks.py:1886: error: Item "dtype[Any]" of - # "Union[dtype[Any], ExtensionDtype]" has no attribute "na_value" - # [union-attr] + # error: Item "dtype[Any]" of "Union[dtype[Any], ExtensionDtype]" has no + # attribute "na_value" other = self.dtype.na_value # type: ignore[union-attr] if is_sparse(self.values): @@ -1996,9 +1966,8 @@ class NumericBlock(Block): is_numeric = True def _can_hold_element(self, element: Any) -> bool: - # pandas/core/internals/blocks.py:1972: error: Argument 1 to "can_hold_element" - # has incompatible type "Union[dtype[Any], ExtensionDtype]"; expected - # "dtype[Any]" [arg-type] + # error: Argument 1 to "can_hold_element" has incompatible type + # "Union[dtype[Any], ExtensionDtype]"; expected "dtype[Any]" return can_hold_element(self.dtype, element) # type: ignore[arg-type] @property @@ -2092,8 +2061,8 @@ def get_values(self, dtype: Optional[DtypeObj] = None) -> np.ndarray: if is_object_dtype(dtype): # DTA/TDA constructor and astype can handle 2D return self._holder(self.values).astype(object) - # pandas/core/internals/blocks.py:2039: error: Incompatible return value type - # (got "Union[ndarray, ExtensionArray]", expected "ndarray") [return-value] + # error: Incompatible return value type (got "Union[ndarray, ExtensionArray]", + # expected "ndarray") return self.values # type: ignore[return-value] def internal_values(self): @@ -2599,8 +2568,7 @@ def safe_reshape(arr: ArrayLike, new_shape: Shape) -> ArrayLike: # Note: this will include TimedeltaArray and tz-naive DatetimeArray # TODO(EA2D): special case will be unnecessary with 2D EAs - # pandas/core/internals/blocks.py:2615: error: Incompatible types in assignment - # (expression has type "ndarray", variable has type "ExtensionArray") - # [assignment] + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "ExtensionArray") arr = np.asarray(arr).reshape(new_shape) # type: ignore[assignment] return arr diff --git a/pandas/core/internals/concat.py b/pandas/core/internals/concat.py index 01e2144d99ca7..6b68c19cab071 100644 --- a/pandas/core/internals/concat.py +++ b/pandas/core/internals/concat.py @@ -141,8 +141,8 @@ def _get_mgr_concatenation_plan(mgr: BlockManager, indexers: Dict[int, np.ndarra blk = mgr.blocks[0] return [(blk.mgr_locs, JoinUnit(blk, mgr_shape, indexers))] - # pandas/core/internals/concat.py:131: error: Incompatible types in assignment - # (expression has type "None", variable has type "ndarray") [assignment] + # error: Incompatible types in assignment (expression has type "None", variable + # has type "ndarray") ax0_indexer = None # type: ignore[assignment] blknos = mgr.blknos blklocs = mgr.blklocs @@ -285,8 +285,8 @@ def get_reindexed_values(self, empty_dtype: DtypeObj, upcasted_na) -> ArrayLike: if self.is_valid_na_for(empty_dtype): blk_dtype = getattr(self.block, "dtype", None) - # pandas/core/internals/concat.py:271: error: Value of type variable - # "_DTypeScalar" of "dtype" cannot be "object" [type-var] + # error: Value of type variable "_DTypeScalar" of "dtype" cannot be + # "object" if blk_dtype == np.dtype(object): # type: ignore[type-var] # we want to avoid filling with np.nan if we are # using None; we already know that we are all @@ -298,9 +298,8 @@ def get_reindexed_values(self, empty_dtype: DtypeObj, upcasted_na) -> ArrayLike: if is_datetime64tz_dtype(empty_dtype): # TODO(EA2D): special case unneeded with 2D EAs i8values = np.full(self.shape[1], fill_value.value) - # pandas/core/internals/concat.py:303: error: Incompatible return - # value type (got "DatetimeArray", expected "ndarray") - # [return-value] + # error: Incompatible return value type (got "DatetimeArray", + # expected "ndarray") return DatetimeArray( # type: ignore[return-value] i8values, dtype=empty_dtype ) @@ -309,9 +308,8 @@ def get_reindexed_values(self, empty_dtype: DtypeObj, upcasted_na) -> ArrayLike: elif is_extension_array_dtype(blk_dtype): pass elif is_extension_array_dtype(empty_dtype): - # pandas/core/internals/concat.py:312: error: Item "dtype[Any]" of - # "Union[dtype[Any], ExtensionDtype]" has no attribute - # "construct_array_type" [union-attr] + # error: Item "dtype[Any]" of "Union[dtype[Any], ExtensionDtype]" + # has no attribute "construct_array_type" cls = empty_dtype.construct_array_type() # type: ignore[union-attr] missing_arr = cls._from_sequence([], dtype=empty_dtype) ncols, nrows = self.shape @@ -324,11 +322,11 @@ def get_reindexed_values(self, empty_dtype: DtypeObj, upcasted_na) -> ArrayLike: # NB: we should never get here with empty_dtype integer or bool; # if we did, the missing_arr.fill would cast to gibberish - # pandas/core/internals/concat.py:323: error: Argument "dtype" to - # "empty" has incompatible type "Union[dtype[Any], ExtensionDtype]"; - # expected "Union[dtype[Any], None, type, _SupportsDType, str, - # Union[Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], - # List[Any], _DTypeDict, Tuple[Any, Any]]]" [arg-type] + # error: Argument "dtype" to "empty" has incompatible type + # "Union[dtype[Any], ExtensionDtype]"; expected "Union[dtype[Any], + # None, type, _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, + # Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, + # Any]]]" missing_arr = np.empty( self.shape, dtype=empty_dtype # type: ignore[arg-type] ) @@ -398,9 +396,8 @@ def _concatenate_join_units( # concatting with at least one EA means we are concatting a single column # the non-EA values are 2D arrays with shape (1, n) - # pandas/core/internals/concat.py:389: error: Invalid index type "Tuple[int, - # slice]" for "ExtensionArray"; expected type "Union[int, slice, ndarray]" - # [index] + # error: Invalid index type "Tuple[int, slice]" for "ExtensionArray"; expected + # type "Union[int, slice, ndarray]" to_concat = [ t if isinstance(t, ExtensionArray) else t[0, :] # type: ignore[index] for t in to_concat @@ -412,14 +409,13 @@ def _concatenate_join_units( # special case DatetimeArray/TimedeltaArray, which *is* an EA, but # is put in a consolidated 2D block - # pandas/core/internals/concat.py:396: error: No overload variant of - # "atleast_2d" matches argument type "ExtensionArray" [call-overload] + # error: No overload variant of "atleast_2d" matches argument type + # "ExtensionArray" concat_values = np.atleast_2d(concat_values) # type: ignore[call-overload] else: concat_values = concat_compat(to_concat, axis=concat_axis) - # pandas/core/internals/concat.py:372: error: Incompatible return value type (got - # "ExtensionArray", expected "ndarray") [return-value] + # error: Incompatible return value type (got "ExtensionArray", expected "ndarray") return concat_values # type: ignore[return-value] @@ -428,8 +424,8 @@ def _dtype_to_na_value(dtype: DtypeObj, has_none_blocks: bool): Find the NA value to go with this dtype. """ if is_extension_array_dtype(dtype): - # pandas/core/internals/concat.py:419: error: Item "dtype[Any]" of - # "Union[dtype[Any], ExtensionDtype]" has no attribute "na_value" [union-attr] + # error: Item "dtype[Any]" of "Union[dtype[Any], ExtensionDtype]" has no + # attribute "na_value" return dtype.na_value # type: ignore[union-attr] elif dtype.kind in ["m", "M"]: return dtype.type("NaT") diff --git a/pandas/core/internals/construction.py b/pandas/core/internals/construction.py index ef411779495d4..15df38a6f31bf 100644 --- a/pandas/core/internals/construction.py +++ b/pandas/core/internals/construction.py @@ -296,9 +296,8 @@ def init_dict(data: Dict, index, columns, dtype: Optional[DtypeObj] = None): ): # GH#1783 - # pandas\core\internals\construction.py:272: error: Value of - # type variable "_DTypeScalar" of "dtype" cannot be "object" - # [type-var] + # error: Value of type variable "_DTypeScalar" of "dtype" cannot be + # "object" nan_dtype = np.dtype(object) # type: ignore[type-var] else: # error: Incompatible types in assignment (expression has type @@ -335,8 +334,8 @@ def nested_data_to_arrays( columns = data[0]._fields arrays, columns = to_arrays(data, columns, dtype=dtype) - # pandas/core/internals/construction.py:301: error: Value of type variable - # "AnyArrayLike" of "ensure_index" cannot be "Optional[Index]" [type-var] + # error: Value of type variable "AnyArrayLike" of "ensure_index" cannot be + # "Optional[Index]" columns = ensure_index(columns) # type: ignore[type-var] if index is None: @@ -620,19 +619,12 @@ def to_arrays(data, columns, dtype: Optional[DtypeObj] = None): data = [tuple(x) for x in data] content, columns = _list_to_arrays(data, columns) - # pandas/core/internals/construction.py:584: error: Incompatible types in assignment - # (expression has type "List[ndarray]", variable has type "List[Union[Union[str, - # int, float, bool], Union[Any, Any, Any, Any]]]") [assignment] - - # pandas/core/internals/construction.py:584: note: "List" is invariant -- see - # http://mypy.readthedocs.io/en/latest/common_issues.html#variance - - # pandas/core/internals/construction.py:584: note: Consider using "Sequence" - # instead, which is covariant - - # pandas/core/internals/construction.py:584: error: Argument 1 to - # "_finalize_columns_and_data" has incompatible type "List[Union[Union[str, int, - # float, bool], Union[Any, Any, Any, Any]]]"; expected "ndarray" [arg-type] + # error: Incompatible types in assignment (expression has type "List[ndarray]", + # variable has type "List[Union[Union[str, int, float, bool], Union[Any, Any, Any, + # Any]]]") + # error: Argument 1 to "_finalize_columns_and_data" has incompatible type + # "List[Union[Union[str, int, float, bool], Union[Any, Any, Any, Any]]]"; expected + # "ndarray" content, columns = _finalize_columns_and_data( # type: ignore[assignment] content, columns, dtype # type: ignore[arg-type] ) @@ -679,10 +671,9 @@ def _list_of_series_to_arrays( content = np.vstack(aligned_values) - # pandas/core/internals/construction.py:628: error: Incompatible return value type - # (got "Tuple[ndarray, Union[Index, List[Any]]]", expected - # "Tuple[List[Union[Union[str, int, float, bool], Union[Any, Any, Any, Any]]], - # Union[Index, List[Union[str, int]]]]") [return-value] + # error: Incompatible return value type (got "Tuple[ndarray, Union[Index, + # List[Any]]]", expected "Tuple[List[Union[Union[str, int, float, bool], Union[Any, + # Any, Any, Any]]], Union[Index, List[Union[str, int]]]]") return content, columns # type: ignore[return-value] @@ -730,14 +721,13 @@ def _finalize_columns_and_data( """ Ensure we have valid columns, cast object dtypes if possible. """ - # pandas/core/internals/construction.py:675: error: Incompatible types in assignment - # (expression has type "List[Any]", variable has type "ndarray") [assignment] + # error: Incompatible types in assignment (expression has type "List[Any]", variable + # has type "ndarray") content = list(content.T) # type: ignore[assignment] try: - # pandas/core/internals/construction.py:678: error: Argument 1 to - # "_validate_or_indexify_columns" has incompatible type "ndarray"; expected - # "List[Any]" [arg-type] + # error: Argument 1 to "_validate_or_indexify_columns" has incompatible type + # "ndarray"; expected "List[Any]" columns = _validate_or_indexify_columns( content, columns # type: ignore[arg-type] ) @@ -746,20 +736,18 @@ def _finalize_columns_and_data( raise ValueError(err) from err if len(content) and content[0].dtype == np.object_: - # pandas/core/internals/construction.py:684: error: Incompatible types in - # assignment (expression has type "List[Union[Union[str, int, float, bool], - # Union[Any, Any, Any, Any]]]", variable has type "ndarray") [assignment] - - # pandas/core/internals/construction.py:684: error: Argument 1 to - # "_convert_object_array" has incompatible type "ndarray"; expected - # "List[Union[Union[str, int, float, bool], Union[Any, Any, Any, Any]]]" - # [arg-type] + # error: Incompatible types in assignment (expression has type + # "List[Union[Union[str, int, float, bool], Union[Any, Any, Any, Any]]]", + # variable has type "ndarray") + # error: Argument 1 to "_convert_object_array" has incompatible type "ndarray"; + # expected "List[Union[Union[str, int, float, bool], Union[Any, Any, Any, + # Any]]]" content = _convert_object_array( # type: ignore[assignment] content, dtype=dtype # type: ignore[arg-type] ) - # pandas/core/internals/construction.py:685: error: Incompatible return value type - # (got "Tuple[ndarray, Union[Index, List[Union[str, int]]]]", expected - # "Tuple[List[ndarray], Union[Index, List[Union[str, int]]]]") [return-value] + # error: Incompatible return value type (got "Tuple[ndarray, Union[Index, + # List[Union[str, int]]]]", expected "Tuple[List[ndarray], Union[Index, + # List[Union[str, int]]]]") return content, columns # type: ignore[return-value] diff --git a/pandas/core/internals/managers.py b/pandas/core/internals/managers.py index b7874b81208f4..67b4d48f1dc28 100644 --- a/pandas/core/internals/managers.py +++ b/pandas/core/internals/managers.py @@ -803,21 +803,19 @@ def as_array( if blk.is_extension: # Avoid implicit conversion of extension blocks to object - # pandas\core\internals\managers.py:844: error: Item "ndarray" - # of "Union[ndarray, ExtensionArray]" has no attribute - # "to_numpy" [union-attr] + # error: Item "ndarray" of "Union[ndarray, ExtensionArray]" has no + # attribute "to_numpy" arr = blk.values.to_numpy( # type: ignore[union-attr] dtype=dtype, na_value=na_value ).reshape(blk.shape) else: arr = np.asarray(blk.get_values()) if dtype: - # pandas/core/internals/managers.py:869: error: Argument 1 to - # "astype" of "_ArrayOrScalarCommon" has incompatible type - # "Union[ExtensionDtype, str, dtype[Any], Type[object]]"; expected - # "Union[dtype[Any], None, type, _SupportsDType, str, - # Union[Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], - # List[Any], _DTypeDict, Tuple[Any, Any]]]" [arg-type] + # error: Argument 1 to "astype" of "_ArrayOrScalarCommon" has + # incompatible type "Union[ExtensionDtype, str, dtype[Any], + # Type[object]]"; expected "Union[dtype[Any], None, type, + # _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, Union[int, + # Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]]" arr = arr.astype(dtype, copy=False) # type: ignore[arg-type] else: arr = self._interleave(dtype=dtype, na_value=na_value) @@ -851,11 +849,11 @@ def _interleave( elif is_dtype_equal(dtype, str): dtype = "object" - # pandas/core/internals/managers.py:902: error: Argument "dtype" to "empty" has - # incompatible type "Union[ExtensionDtype, str, dtype[Any], Type[object], - # None]"; expected "Union[dtype[Any], None, type, _SupportsDType, str, - # Union[Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], List[Any], - # _DTypeDict, Tuple[Any, Any]]]" [arg-type] + # error: Argument "dtype" to "empty" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; expected + # "Union[dtype[Any], None, type, _SupportsDType, str, Union[Tuple[Any, int], + # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, + # Any]]]" result = np.empty(self.shape, dtype=dtype) # type: ignore[arg-type] itemmask = np.zeros(self.shape[0]) @@ -865,17 +863,15 @@ def _interleave( if blk.is_extension: # Avoid implicit conversion of extension blocks to object - # pandas\core\internals\managers.py:889: error: Item "ndarray" - # of "Union[ndarray, ExtensionArray]" has no attribute - # "to_numpy" [union-attr] + # error: Item "ndarray" of "Union[ndarray, ExtensionArray]" has no + # attribute "to_numpy" arr = blk.values.to_numpy( # type: ignore[union-attr] dtype=dtype, na_value=na_value ) else: - # pandas/core/internals/managers.py:925: error: Argument 1 to - # "get_values" of "Block" has incompatible type "Union[ExtensionDtype, - # str, dtype[Any], Type[object], None]"; expected "Union[dtype[Any], - # ExtensionDtype, None]" [arg-type] + # error: Argument 1 to "get_values" of "Block" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; expected + # "Union[dtype[Any], ExtensionDtype, None]" arr = blk.get_values(dtype) # type: ignore[arg-type] result[rl.indexer] = arr itemmask[rl.indexer] = 1 @@ -1409,11 +1405,10 @@ def _make_na_block(self, placement, fill_value=None): block_shape[0] = len(placement) dtype, fill_value = infer_dtype_from_scalar(fill_value) - # pandas\core\internals\managers.py:1428: error: Argument "dtype" to - # "empty" has incompatible type "Union[dtype, ExtensionDtype]"; - # expected "Union[dtype, None, type, _SupportsDtype, str, Tuple[Any, - # int], Tuple[Any, Union[int, Sequence[int]]], List[Any], _DtypeDict, - # Tuple[Any, Any]]" [arg-type] + # error: Argument "dtype" to "empty" has incompatible type "Union[dtype, + # ExtensionDtype]"; expected "Union[dtype, None, type, _SupportsDtype, str, + # Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], List[Any], _DtypeDict, + # Tuple[Any, Any]]" block_values = np.empty(block_shape, dtype=dtype) # type: ignore[arg-type] block_values.fill(fill_value) return make_block(block_values, placement=placement, ndim=block_values.ndim) @@ -1456,9 +1451,8 @@ def _equal_values(self: T, other: T) -> bool: return False left = self.blocks[0].values right = other.blocks[0].values - # pandas\core\internals\managers.py:1472: error: Value of type - # variable "ArrayLike" of "array_equals" cannot be "Union[ndarray, - # ExtensionArray]" [type-var] + # error: Value of type variable "ArrayLike" of "array_equals" cannot be + # "Union[ndarray, ExtensionArray]" return array_equals(left, right) # type: ignore[type-var] return blockwise_all(self, other, array_equals) @@ -1806,10 +1800,9 @@ def _multi_blockify(tuples, dtype: Optional[Dtype] = None): new_blocks = [] for dtype, tup_block in grouper: - # pandas/core/internals/managers.py:1810: error: Argument 2 to "_stack_arrays" - # has incompatible type "Union[ExtensionDtype, str, dtype[Any], Type[str], - # Type[float], Type[int], Type[complex], Type[bool], Type[object], None]"; - # expected "dtype[Any]" [arg-type] + # error: Argument 2 to "_stack_arrays" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], Type[int], + # Type[complex], Type[bool], Type[object], None]"; expected "dtype[Any]" values, placement = _stack_arrays( list(tup_block), dtype # type: ignore[arg-type] ) @@ -1893,10 +1886,10 @@ def _merge_blocks( # TODO: optimization potential in case all mgrs contain slices and # combination of those slices is a slice, too. new_mgr_locs = np.concatenate([b.mgr_locs.as_array for b in blocks]) - # pandas\core\internals\managers.py:1931: error: List comprehension has - # incompatible type List[Union[ndarray, ExtensionArray]]; expected - # List[Union[complex, generic, Sequence[Union[int, float, complex, str, - # bytes, generic]], Sequence[Sequence[Any]], _SupportsArray]] [misc] + # error: List comprehension has incompatible type List[Union[ndarray, + # ExtensionArray]]; expected List[Union[complex, generic, Sequence[Union[int, + # float, complex, str, bytes, generic]], Sequence[Sequence[Any]], + # _SupportsArray]] new_values = np.vstack([b.values for b in blocks]) # type: ignore[misc] argsort = np.argsort(new_mgr_locs) diff --git a/pandas/core/internals/ops.py b/pandas/core/internals/ops.py index 8e736b95bac17..c05d770e55cc9 100644 --- a/pandas/core/internals/ops.py +++ b/pandas/core/internals/ops.py @@ -105,9 +105,8 @@ def _get_same_shape_values( # TODO(EA2D): with 2D EAs only this first clause would be needed if not (left_ea or right_ea): - # pandas\core\internals\ops.py:106: error: Invalid index type - # "Tuple[Any, slice]" for "Union[ndarray, ExtensionArray]"; expected - # type "Union[int, slice, ndarray]" [index] + # error: Invalid index type "Tuple[Any, slice]" for "Union[ndarray, + # ExtensionArray]"; expected type "Union[int, slice, ndarray]" lvals = lvals[rblk.mgr_locs.indexer, :] # type: ignore[index] assert lvals.shape == rvals.shape, (lvals.shape, rvals.shape) elif left_ea and right_ea: @@ -115,31 +114,25 @@ def _get_same_shape_values( elif right_ea: # lvals are 2D, rvals are 1D - # pandas\core\internals\ops.py:112: error: Invalid index type - # "Tuple[Any, slice]" for "Union[ndarray, ExtensionArray]"; expected - # type "Union[int, slice, ndarray]" [index] + # error: Invalid index type "Tuple[Any, slice]" for "Union[ndarray, + # ExtensionArray]"; expected type "Union[int, slice, ndarray]" lvals = lvals[rblk.mgr_locs.indexer, :] # type: ignore[index] assert lvals.shape[0] == 1, lvals.shape - # pandas\core\internals\ops.py:114: error: Invalid index type - # "Tuple[int, slice]" for "Union[Any, ExtensionArray]"; expected type - # "Union[int, slice, ndarray]" [index] + # error: Invalid index type "Tuple[int, slice]" for "Union[Any, + # ExtensionArray]"; expected type "Union[int, slice, ndarray]" lvals = lvals[0, :] # type: ignore[index] else: # lvals are 1D, rvals are 2D assert rvals.shape[0] == 1, rvals.shape - # pandas\core\internals\ops.py:118: error: Invalid index type - # "Tuple[int, slice]" for "Union[ndarray, ExtensionArray]"; expected - # type "Union[int, slice, ndarray]" [index] + # error: Invalid index type "Tuple[int, slice]" for "Union[ndarray, + # ExtensionArray]"; expected type "Union[int, slice, ndarray]" rvals = rvals[0, :] # type: ignore[index] - # pandas\core\internals\ops.py:120: error: Incompatible return value type - # (got "Tuple[Union[ndarray, ExtensionArray], Union[ndarray, - # ExtensionArray]]", expected "Tuple[ExtensionArray, ExtensionArray]") - # [return-value] - - # pandas\core\internals\ops.py:120: error: Incompatible return value type - # (got "Tuple[Union[ndarray, ExtensionArray], Union[ndarray, - # ExtensionArray]]", expected "Tuple[ndarray, ndarray]") [return-value] + # error: Incompatible return value type (got "Tuple[Union[ndarray, ExtensionArray], + # Union[ndarray, ExtensionArray]]", expected "Tuple[ExtensionArray, + # ExtensionArray]") + # error: Incompatible return value type (got "Tuple[Union[ndarray, ExtensionArray], + # Union[ndarray, ExtensionArray]]", expected "Tuple[ndarray, ndarray]") return lvals, rvals # type: ignore[return-value] diff --git a/pandas/core/reshape/merge.py b/pandas/core/reshape/merge.py index 887a1ba638749..0d5b9918b996a 100644 --- a/pandas/core/reshape/merge.py +++ b/pandas/core/reshape/merge.py @@ -2049,31 +2049,25 @@ def _factorize_keys( assert isinstance(rk, Categorical) # Cast rk to encoding so we can compare codes with lk - # pandas/core/reshape/merge.py:2035: error: has no attribute - # "_encode_with_my_categories" [attr-defined] + # error: has no attribute "_encode_with_my_categories" rk = lk._encode_with_my_categories(rk) # type: ignore[attr-defined] - # pandas/core/reshape/merge.py:2037: error: has no attribute "codes" - # [attr-defined] + # error: has no attribute "codes" lk = ensure_int64(lk.codes) # type: ignore[attr-defined] - # pandas/core/reshape/merge.py:2038: error: "ndarray" has no attribute "codes" - # [attr-defined] + # error: "ndarray" has no attribute "codes" rk = ensure_int64(rk.codes) # type: ignore[attr-defined] elif is_extension_array_dtype(lk.dtype) and is_dtype_equal(lk.dtype, rk.dtype): - # pandas\core\reshape\merge.py:1967: error: Incompatible types in - # assignment (expression has type "ndarray", variable has type - # "ExtensionArray") [assignment] - - # pandas/core/reshape/merge.py:2047: error: Item "ndarray" of "Union[Any, - # ndarray]" has no attribute "_values_for_factorize" [union-attr] + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "ExtensionArray") + # error: Item "ndarray" of "Union[Any, ndarray]" has no attribute + # "_values_for_factorize" lk, _ = lk._values_for_factorize() # type: ignore[union-attr,assignment] # error: Incompatible types in assignment (expression has type # "ndarray", variable has type "ExtensionArray") - - # pandas/core/reshape/merge.py:2053: error: Item "ndarray" of "Union[Any, - # ndarray]" has no attribute "_values_for_factorize" [union-attr] + # error: Item "ndarray" of "Union[Any, ndarray]" has no attribute + # "_values_for_factorize" rk, _ = rk._values_for_factorize() # type: ignore[union-attr,assignment] if is_integer_dtype(lk.dtype) and is_integer_dtype(rk.dtype): diff --git a/pandas/core/reshape/pivot.py b/pandas/core/reshape/pivot.py index c92dd1ff9bef4..7cce0e09a1ad8 100644 --- a/pandas/core/reshape/pivot.py +++ b/pandas/core/reshape/pivot.py @@ -441,11 +441,8 @@ def pivot( cols = [] append = index is None - # pandas\core\reshape\pivot.py:455: error: Unsupported operand types - # for + ("List[Any]" and "ExtensionArray") [operator] - - # pandas\core\reshape\pivot.py:455: error: Unsupported left operand - # type for + ("ExtensionArray") [operator] + # error: Unsupported operand types for + ("List[Any]" and "ExtensionArray") + # error: Unsupported left operand type for + ("ExtensionArray") indexed = data.set_index( cols + columns, append=append # type: ignore[operator] ) diff --git a/pandas/core/reshape/reshape.py b/pandas/core/reshape/reshape.py index c491196bf2a7a..1f92d3a4e8863 100644 --- a/pandas/core/reshape/reshape.py +++ b/pandas/core/reshape/reshape.py @@ -933,9 +933,8 @@ def _get_dummies_1d( if dtype is None: dtype = np.uint8 - # pandas/core/reshape/reshape.py:936: error: Argument 1 to "dtype" has incompatible - # type "Union[ExtensionDtype, str, dtype[Any], Type[object]]"; expected "Type[Any]" - # [arg-type] + # error: Argument 1 to "dtype" has incompatible type "Union[ExtensionDtype, str, + # dtype[Any], Type[object]]"; expected "Type[Any]" dtype = np.dtype(dtype) # type: ignore[arg-type] if is_object_dtype(dtype): diff --git a/pandas/core/strings/accessor.py b/pandas/core/strings/accessor.py index 06d21768b1cd8..29f8146429ac2 100644 --- a/pandas/core/strings/accessor.py +++ b/pandas/core/strings/accessor.py @@ -602,8 +602,7 @@ def cat(self, others=None, sep=None, na_rep=None, join="left"): result = Series( # type: ignore[assignment] result, dtype=dtype, index=data.index, name=self._orig.name ) - # pandas\core\strings\accessor.py:616: error: "ndarray" has no - # attribute "__finalize__" [attr-defined] + # error: "ndarray" has no attribute "__finalize__" result = result.__finalize__( # type: ignore[attr-defined] self._orig, method="str_cat" ) diff --git a/pandas/core/strings/object_array.py b/pandas/core/strings/object_array.py index 1ea62b2065ec7..085e48d753370 100644 --- a/pandas/core/strings/object_array.py +++ b/pandas/core/strings/object_array.py @@ -56,9 +56,8 @@ def _str_map(self, f, na_value=None, dtype: Optional[Dtype] = None): return np.ndarray(0, dtype=dtype) # type: ignore[arg-type] if not isinstance(arr, np.ndarray): - # pandas\core\strings\object_array.py:59: error: Incompatible types - # in assignment (expression has type "ndarray", variable has type - # "ObjectStringArrayMixin") [assignment] + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "ObjectStringArrayMixin") arr = np.asarray(arr, dtype=object) # type: ignore[assignment] mask = isna(arr) convert = not np.all(mask) diff --git a/pandas/core/tools/datetimes.py b/pandas/core/tools/datetimes.py index 0c0f12f5fa5b6..f2a93acea7197 100644 --- a/pandas/core/tools/datetimes.py +++ b/pandas/core/tools/datetimes.py @@ -392,9 +392,8 @@ def _convert_listlike_datetimes( arg, _ = maybe_convert_dtype(arg, copy=False) except TypeError: if errors == "coerce": - # pandas\core\tools\datetimes.py:392: error: Incompatible types in - # assignment (expression has type "ndarray", variable has type - # "ExtensionArray") [assignment] + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "ExtensionArray") result = np.array( # type: ignore[assignment] ["NaT"], dtype="datetime64[ns]" ).repeat(len(arg)) diff --git a/pandas/core/tools/numeric.py b/pandas/core/tools/numeric.py index aa52d5997341b..a8f1ed5187dca 100644 --- a/pandas/core/tools/numeric.py +++ b/pandas/core/tools/numeric.py @@ -165,8 +165,8 @@ def to_numeric(arg, errors="raise", downcast=None): mask = values._mask values = values._data[~mask] else: - # pandas/core/tools/numeric.py:168: error: Incompatible types in assignment - # (expression has type "None", variable has type "ndarray") [assignment] + # error: Incompatible types in assignment (expression has type "None", variable + # has type "ndarray") mask = None # type: ignore[assignment] values_dtype = getattr(values, "dtype", None) diff --git a/pandas/core/window/ewm.py b/pandas/core/window/ewm.py index e04fd939662f7..7fab91b6f9fbe 100644 --- a/pandas/core/window/ewm.py +++ b/pandas/core/window/ewm.py @@ -243,11 +243,10 @@ def __init__( ) if isna(times).any(): raise ValueError("Cannot convert NaT values to integer") - # pandas/core/window/ewm.py:261: error: Item "str" of "Union[str, ndarray, - # FrameOrSeries, None]" has no attribute "view" [union-attr] - - # pandas/core/window/ewm.py:261: error: Item "None" of "Union[str, ndarray, - # FrameOrSeries, None]" has no attribute "view" [union-attr] + # error: Item "str" of "Union[str, ndarray, FrameOrSeries, None]" has no + # attribute "view" + # error: Item "None" of "Union[str, ndarray, FrameOrSeries, None]" has no + # attribute "view" self.times = np.asarray(times.view(np.int64)) # type: ignore[union-attr] self.halflife = Timedelta(halflife).value # Halflife is no longer applicable when calculating COM @@ -262,9 +261,8 @@ def __init__( "halflife can only be a timedelta convertible argument if " "times is not None." ) - # pandas\core\window\ewm.py:252: error: Incompatible types in - # assignment (expression has type "None", variable has type - # "ndarray") [assignment] + # error: Incompatible types in assignment (expression has type "None", + # variable has type "ndarray") self.times = None # type: ignore[assignment] self.halflife = None # error: Argument 3 to "get_center_of_mass" has incompatible type diff --git a/pandas/core/window/rolling.py b/pandas/core/window/rolling.py index 264ec65492ce8..9559665ed5210 100644 --- a/pandas/core/window/rolling.py +++ b/pandas/core/window/rolling.py @@ -294,12 +294,10 @@ def _prep_values(self, values: Optional[np.ndarray] = None) -> np.ndarray: self._selected_obj, extract_numpy=True ) - # pandas/core/window/rolling.py:300: error: Item "None" of "Optional[ndarray]" - # has no attribute "dtype" [union-attr] + # error: Item "None" of "Optional[ndarray]" has no attribute "dtype" if needs_i8_conversion(values.dtype): # type: ignore[union-attr] raise NotImplementedError( - # pandas/core/window/rolling.py:302: error: Item "None" of - # "Optional[ndarray]" has no attribute "dtype" [union-attr] + # error: Item "None" of "Optional[ndarray]" has no attribute "dtype" f"ops for {type(self).__name__} for this " # type: ignore[union-attr] f"dtype {values.dtype} are not implemented" ) diff --git a/pandas/io/formats/format.py b/pandas/io/formats/format.py index 944eb97d1fe0b..c7a9fef29b1ea 100644 --- a/pandas/io/formats/format.py +++ b/pandas/io/formats/format.py @@ -1536,9 +1536,8 @@ def _format_strings(self) -> List[str]: if is_categorical_dtype(values.dtype): # Categorical is special for now, so that we can preserve tzinfo - # pandas/io/formats/format.py:1546: error: Item "ExtensionArray" of - # "Union[Any, ExtensionArray]" has no attribute "_internal_get_values" - # [union-attr] + # error: Item "ExtensionArray" of "Union[Any, ExtensionArray]" has no + # attribute "_internal_get_values" array = values._internal_get_values() # type: ignore[union-attr] else: array = np.asarray(values) @@ -1607,33 +1606,23 @@ def format_percentiles( percentiles = 100 * percentiles - # pandas/io/formats/format.py:1612: error: Item "List[Union[int, float]]" of - # "Union[ndarray, List[Union[int, float]], List[float], List[Union[str, float]]]" - # has no attribute "astype" [union-attr] - - # pandas/io/formats/format.py:1612: error: Item "List[float]" of "Union[ndarray, - # List[Union[int, float]], List[float], List[Union[str, float]]]" has no attribute - # "astype" [union-attr] - - # pandas/io/formats/format.py:1612: error: Item "List[Union[str, float]]" of - # "Union[ndarray, List[Union[int, float]], List[float], List[Union[str, float]]]" - # has no attribute "astype" [union-attr] + # error: Item "List[Union[int, float]]" of "Union[ndarray, List[Union[int, float]], + # List[float], List[Union[str, float]]]" has no attribute "astype" + # error: Item "List[float]" of "Union[ndarray, List[Union[int, float]], List[float], + # List[Union[str, float]]]" has no attribute "astype" + # error: Item "List[Union[str, float]]" of "Union[ndarray, List[Union[int, float]], + # List[float], List[Union[str, float]]]" has no attribute "astype" int_idx = np.isclose( percentiles.astype(int), percentiles # type: ignore[union-attr] ) if np.all(int_idx): - # pandas/io/formats/format.py:1615: error: Item "List[Union[int, float]]" of - # "Union[ndarray, List[Union[int, float]], List[float], List[Union[str, - # float]]]" has no attribute "astype" [union-attr] - - # pandas/io/formats/format.py:1615: error: Item "List[float]" of "Union[ndarray, - # List[Union[int, float]], List[float], List[Union[str, float]]]" has no - # attribute "astype" [union-attr] - - # pandas/io/formats/format.py:1615: error: Item "List[Union[str, float]]" of - # "Union[ndarray, List[Union[int, float]], List[float], List[Union[str, - # float]]]" has no attribute "astype" [union-attr] + # error: Item "List[Union[int, float]]" of "Union[ndarray, List[Union[int, + # float]], List[float], List[Union[str, float]]]" has no attribute "astype" + # error: Item "List[float]" of "Union[ndarray, List[Union[int, float]], + # List[float], List[Union[str, float]]]" has no attribute "astype" + # error: Item "List[Union[str, float]]" of "Union[ndarray, List[Union[int, + # float]], List[float], List[Union[str, float]]]" has no attribute "astype" out = percentiles.astype(int).astype(str) # type: ignore[union-attr] return [i + "%" for i in out] @@ -1647,21 +1636,16 @@ def format_percentiles( ).astype(int) prec = max(1, prec) out = np.empty_like(percentiles, dtype=object) - # pandas/io/formats/format.py:1635: error: No overload variant of "__getitem__" of - # "list" matches argument type "Union[bool_, ndarray]" [call-overload] + # error: No overload variant of "__getitem__" of "list" matches argument type + # "Union[bool_, ndarray]" out[int_idx] = ( percentiles[int_idx].astype(int).astype(str) # type: ignore[call-overload] ) - # pandas/io/formats/format.py:1636: error: Item "float" of "Union[Any, - # float, str]" has no attribute "round" [union-attr] - - # pandas/io/formats/format.py:1636: error: Item "str" of "Union[Any, float, - # str]" has no attribute "round" [union-attr] - - # pandas/io/formats/format.py:1636: error: Invalid index type "Union[bool_, - # Any]" for "Union[ndarray, List[Union[int, float]], List[float], - # List[Union[str, float]]]"; expected type "int" [index] + # error: Item "float" of "Union[Any, float, str]" has no attribute "round" + # error: Item "str" of "Union[Any, float, str]" has no attribute "round" + # error: Invalid index type "Union[bool_, Any]" for "Union[ndarray, List[Union[int, + # float]], List[float], List[Union[str, float]]]"; expected type "int" out[~int_idx] = ( percentiles[~int_idx].round(prec).astype(str) # type: ignore[union-attr,index] ) diff --git a/pandas/io/formats/string.py b/pandas/io/formats/string.py index 4bf5fee69f129..512954158792d 100644 --- a/pandas/io/formats/string.py +++ b/pandas/io/formats/string.py @@ -113,13 +113,10 @@ def _join_multiline(self, strcols_input: Iterable[List[str]]) -> str: if self.fmt.index: idx = strcols.pop(0) - # pandas\io\formats\string.py:116: error: Argument 1 to "__call__" - # of "_NumberOp" has incompatible type "None"; expected "Union[int, - # float, complex, number, bool_]" [arg-type] - - # pandas\io\formats\string.py:116: error: Incompatible types in - # assignment (expression has type "number", variable has type - # "Optional[int]") [assignment] + # error: Argument 1 to "__call__" of "_NumberOp" has incompatible type + # "None"; expected "Union[int, float, complex, number, bool_]" + # error: Incompatible types in assignment (expression has type "number", + # variable has type "Optional[int]") lwidth -= ( # type: ignore[assignment,arg-type] np.array([self.adj.len(x) for x in idx]).max() + adjoin_width ) @@ -130,8 +127,8 @@ def _join_multiline(self, strcols_input: Iterable[List[str]]) -> str: ] assert lwidth is not None - # pandas\io\formats\string.py:124: error: Argument 1 to "_binify" has - # incompatible type "List[object]"; expected "List[int]" [arg-type] + # error: Argument 1 to "_binify" has incompatible type "List[object]"; expected + # "List[int]" col_bins = _binify(col_widths, lwidth) # type: ignore[arg-type] nbins = len(col_bins) diff --git a/pandas/io/json/_json.py b/pandas/io/json/_json.py index 6824e3c5100e5..b4e6f175ca266 100644 --- a/pandas/io/json/_json.py +++ b/pandas/io/json/_json.py @@ -535,12 +535,11 @@ def read_json( raise ValueError("cannot pass both convert_axes and orient='table'") if dtype is None and orient != "table": - # pandas/io/json/_json.py:538: error: Incompatible types in assignment - # (expression has type "bool", variable has type "Union[ExtensionDtype, str, - # dtype[Any], Type[str], Type[float], Type[int], Type[complex], Type[bool], - # Type[object], Dict[Optional[Hashable], Union[ExtensionDtype, Union[str, - # dtype[Any]], Type[str], Type[float], Type[int], Type[complex], Type[bool], - # Type[object]]], None]") [assignment] + # error: Incompatible types in assignment (expression has type "bool", variable + # has type "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], + # Type[int], Type[complex], Type[bool], Type[object], Dict[Optional[Hashable], + # Union[ExtensionDtype, Union[str, dtype[Any]], Type[str], Type[float], + # Type[int], Type[complex], Type[bool], Type[object]]], None]") dtype = True # type: ignore[assignment] if convert_axes is None and orient != "table": convert_axes = True @@ -881,12 +880,11 @@ def _try_convert_data(self, name, data, use_dtypes=True, convert_dates=True): return data, False return data.fillna(np.nan), True - # pandas/io/json/_json.py:878: error: Non-overlapping identity check (left - # operand type: "Union[ExtensionDtype, str, dtype[Any], Type[object], + # error: Non-overlapping identity check (left operand type: + # "Union[ExtensionDtype, str, dtype[Any], Type[object], # Dict[Optional[Hashable], Union[ExtensionDtype, Union[str, dtype[Any]], # Type[str], Type[float], Type[int], Type[complex], Type[bool], # Type[object]]]]", right operand type: "Literal[True]") - # [comparison-overlap] elif self.dtype is True: # type: ignore[comparison-overlap] pass else: @@ -896,9 +894,9 @@ def _try_convert_data(self, name, data, use_dtypes=True, convert_dates=True): ) if dtype is not None: try: - # pandas/io/json/_json.py:887: error: Argument 1 to "dtype" has - # incompatible type "Union[ExtensionDtype, str, dtype[Any], - # Type[object]]"; expected "Type[Any]" [arg-type] + # error: Argument 1 to "dtype" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object]]"; + # expected "Type[Any]" dtype = np.dtype(dtype) # type: ignore[arg-type] return data.astype(dtype), True except (TypeError, ValueError): diff --git a/pandas/io/parsers/base_parser.py b/pandas/io/parsers/base_parser.py index 8634367f42e54..fc309d00fb1de 100644 --- a/pandas/io/parsers/base_parser.py +++ b/pandas/io/parsers/base_parser.py @@ -643,9 +643,8 @@ def _infer_types(self, values, na_values, try_num_bool=True): na_count = 0 if issubclass(values.dtype.type, (np.number, np.bool_)): mask = algorithms.isin(values, list(na_values)) - # pandas/io/parsers/base_parser.py:638: error: Incompatible types in - # assignment (expression has type "number[Any]", variable has type "int") - # [assignment] + # error: Incompatible types in assignment (expression has type + # "number[Any]", variable has type "int") na_count = mask.sum() # type: ignore[assignment] if na_count > 0: if is_integer_dtype(values): @@ -705,9 +704,8 @@ def _cast_types(self, values, cast_type, column): # c-parser which parses all categories # as strings - # pandas/io/parsers/base_parser.py:696: error: Argument 2 to - # "astype_nansafe" has incompatible type "Type[str]"; expected - # "Union[dtype[Any], ExtensionDtype]" [arg-type] + # error: Argument 2 to "astype_nansafe" has incompatible type + # "Type[str]"; expected "Union[dtype[Any], ExtensionDtype]" values = astype_nansafe(values, str) # type: ignore[arg-type] cats = Index(values).unique().dropna() @@ -901,19 +899,17 @@ def _get_empty_meta( if not is_dict_like(dtype): # if dtype == None, default will be object. default_dtype = dtype or object - # pandas/io/parsers/base_parser.py:892: error: Argument 1 to "defaultdict" - # has incompatible type "Callable[[], Union[ExtensionDtype, str, dtype[Any], - # Type[object], Dict[Hashable, Union[ExtensionDtype, Union[str, dtype[Any]], - # Type[str], Type[float], Type[int], Type[complex], Type[bool], - # Type[object]]]]]"; expected "Optional[Callable[[], Union[ExtensionDtype, - # str, dtype[Any], Type[object]]]]" [arg-type] - - # pandas/io/parsers/base_parser.py:892: error: Incompatible return value - # type (got "Union[ExtensionDtype, str, dtype[Any], Type[object], - # Dict[Hashable, Union[ExtensionDtype, Union[str, dtype[Any]], Type[str], - # Type[float], Type[int], Type[complex], Type[bool], Type[object]]]]", - # expected "Union[ExtensionDtype, str, dtype[Any], Type[object]]") - # [return-value] + # error: Argument 1 to "defaultdict" has incompatible type "Callable[[], + # Union[ExtensionDtype, str, dtype[Any], Type[object], Dict[Hashable, + # Union[ExtensionDtype, Union[str, dtype[Any]], Type[str], Type[float], + # Type[int], Type[complex], Type[bool], Type[object]]]]]"; expected + # "Optional[Callable[[], Union[ExtensionDtype, str, dtype[Any], + # Type[object]]]]" + # error: Incompatible return value type (got "Union[ExtensionDtype, str, + # dtype[Any], Type[object], Dict[Hashable, Union[ExtensionDtype, Union[str, + # dtype[Any]], Type[str], Type[float], Type[int], Type[complex], Type[bool], + # Type[object]]]]", expected "Union[ExtensionDtype, str, dtype[Any], + # Type[object]]") dtype = defaultdict( lambda: default_dtype # type: ignore[arg-type, return-value] ) diff --git a/pandas/io/pytables.py b/pandas/io/pytables.py index 2c1725848209b..6f26a49baacbc 100644 --- a/pandas/io/pytables.py +++ b/pandas/io/pytables.py @@ -3067,15 +3067,15 @@ def write_array(self, key: str, obj: FrameOrSeries, items: Optional[Index] = Non # store as UTC # with a zone - # pandas/io/pytables.py:3045: error: Item "ExtensionArray" of "Union[Any, - # ExtensionArray]" has no attribute "asi8" [union-attr] + # error: Item "ExtensionArray" of "Union[Any, ExtensionArray]" has no + # attribute "asi8" self._handle.create_array( self.group, key, value.asi8 # type: ignore[union-attr] ) node = getattr(self.group, key) - # pandas/io/pytables.py:3048: error: Item "ExtensionArray" of "Union[Any, - # ExtensionArray]" has no attribute "tz" [union-attr] + # error: Item "ExtensionArray" of "Union[Any, ExtensionArray]" has no + # attribute "tz" node._v_attrs.tz = _get_tz(value.tz) # type: ignore[union-attr] node._v_attrs.value_type = "datetime64" elif is_timedelta64_dtype(value.dtype): @@ -5148,9 +5148,8 @@ def _get_data_and_dtype_name(data: ArrayLike): dtype_name = data.dtype.name.split("[")[0] if data.dtype.kind in ["m", "M"]: - # pandas\io\pytables.py:5117: error: Incompatible types in assignment - # (expression has type "ndarray", variable has type "ExtensionArray") - # [assignment] + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "ExtensionArray") data = np.asarray(data.view("i8")) # type: ignore[assignment] # TODO: we used to reshape for the dt64tz case, but no longer # doing that doesn't seem to break anything. why? @@ -5158,9 +5157,8 @@ def _get_data_and_dtype_name(data: ArrayLike): elif isinstance(data, PeriodIndex): data = data.asi8 - # pandas\io\pytables.py:5124: error: Incompatible types in assignment - # (expression has type "ndarray", variable has type "ExtensionArray") - # [assignment] + # error: Incompatible types in assignment (expression has type "ndarray", variable + # has type "ExtensionArray") data = np.asarray(data) # type: ignore[assignment] return data, dtype_name diff --git a/pandas/io/sql.py b/pandas/io/sql.py index 48a19f88e1ba0..2902ef3d06cdf 100644 --- a/pandas/io/sql.py +++ b/pandas/io/sql.py @@ -1515,12 +1515,12 @@ def to_sql( """ if dtype: if not is_dict_like(dtype): - # pandas/io/sql.py:1502: error: Value expression in dictionary - # comprehension has incompatible type "Union[ExtensionDtype, str, - # dtype[Any], Type[object], Dict[Optional[Hashable], - # Union[ExtensionDtype, Union[str, dtype[Any]], Type[str], Type[float], - # Type[int], Type[complex], Type[bool], Type[object]]]]"; expected type - # "Union[ExtensionDtype, str, dtype[Any], Type[object]]" [misc] + # error: Value expression in dictionary comprehension has incompatible + # type "Union[ExtensionDtype, str, dtype[Any], Type[object], + # Dict[Optional[Hashable], Union[ExtensionDtype, Union[str, dtype[Any]], + # Type[str], Type[float], Type[int], Type[complex], Type[bool], + # Type[object]]]]"; expected type "Union[ExtensionDtype, str, + # dtype[Any], Type[object]]" dtype = {col_name: dtype for col_name in frame} # type: ignore[misc] else: dtype = cast(dict, dtype) @@ -1995,12 +1995,12 @@ def to_sql( """ if dtype: if not is_dict_like(dtype): - # pandas/io/sql.py:1970: error: Value expression in dictionary - # comprehension has incompatible type "Union[ExtensionDtype, str, - # dtype[Any], Type[object], Dict[Optional[Hashable], - # Union[ExtensionDtype, Union[str, dtype[Any]], Type[str], Type[float], - # Type[int], Type[complex], Type[bool], Type[object]]]]"; expected type - # "Union[ExtensionDtype, str, dtype[Any], Type[object]]" [misc] + # error: Value expression in dictionary comprehension has incompatible + # type "Union[ExtensionDtype, str, dtype[Any], Type[object], + # Dict[Optional[Hashable], Union[ExtensionDtype, Union[str, dtype[Any]], + # Type[str], Type[float], Type[int], Type[complex], Type[bool], + # Type[object]]]]"; expected type "Union[ExtensionDtype, str, + # dtype[Any], Type[object]]" dtype = {col_name: dtype for col_name in frame} # type: ignore[misc] else: dtype = cast(dict, dtype) diff --git a/pandas/io/stata.py b/pandas/io/stata.py index 5b90d0e72a142..1635aa8a71a36 100644 --- a/pandas/io/stata.py +++ b/pandas/io/stata.py @@ -1222,9 +1222,8 @@ def g(typ: int) -> Union[str, np.dtype]: if typ <= 2045: return str(typ) try: - # pandas\io\stata.py:1226: error: Incompatible return value - # type (got "Type[number]", expected "Union[str, dtype]") - # [return-value] + # error: Incompatible return value type (got "Type[number]", expected + # "Union[str, dtype]") return self.DTYPE_MAP_XML[typ] # type: ignore[return-value] except KeyError as err: raise ValueError(f"cannot convert stata dtype [{typ}]") from err @@ -1658,8 +1657,8 @@ def read( if self.dtyplist[i] is not None: col = data.columns[i] dtype = data[col].dtype - # pandas\io\stata.py:1677: error: Value of type variable - # "_DTypeScalar" of "dtype" cannot be "object" [type-var] + # error: Value of type variable "_DTypeScalar" of "dtype" cannot be + # "object" if ( dtype != np.dtype(object) # type: ignore[type-var] and dtype != self.dtyplist[i] diff --git a/pandas/tests/io/parser/common/test_chunksize.py b/pandas/tests/io/parser/common/test_chunksize.py index 1a0fce9941e96..b9169baf8ad84 100644 --- a/pandas/tests/io/parser/common/test_chunksize.py +++ b/pandas/tests/io/parser/common/test_chunksize.py @@ -140,8 +140,7 @@ def test_read_chunksize_jagged_names(all_parsers): parser = all_parsers data = "\n".join(["0"] * 7 + [",".join(["0"] * 10)]) - # pandas/tests/io/parser/common/test_chunksize.py:143: error: List item 0 has - # incompatible type "float"; expected "int" [list-item] + # error: List item 0 has incompatible type "float"; expected "int" expected = DataFrame( [[0] + [np.nan] * 9] * 7 + [[0] * 10] # type: ignore[list-item] ) From 21c143bbdbc9bbcfdc3d3257be06e81123489168 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Tue, 16 Feb 2021 17:26:46 +0000 Subject: [PATCH 61/86] update comments --- pandas/core/internals/concat.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pandas/core/internals/concat.py b/pandas/core/internals/concat.py index 0a203446b546d..6236e6895315b 100644 --- a/pandas/core/internals/concat.py +++ b/pandas/core/internals/concat.py @@ -316,7 +316,11 @@ def get_reindexed_values(self, empty_dtype: DtypeObj, upcasted_na) -> ArrayLike: if is_datetime64tz_dtype(empty_dtype): # TODO(EA2D): special case unneeded with 2D EAs i8values = np.full(self.shape[1], fill_value.value) - return DatetimeArray(i8values, dtype=empty_dtype) + # error: Incompatible return value type (got "DatetimeArray", + # expected "ndarray") + return DatetimeArray( # type: ignore[return-value] + i8values, dtype=empty_dtype + ) elif is_extension_array_dtype(blk_dtype): pass elif is_extension_array_dtype(empty_dtype): From 4068a38c858bc4be81efb4a66e6ec7218594c847 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Tue, 16 Feb 2021 18:11:22 +0000 Subject: [PATCH 62/86] IsInLongSeriesValuesDominate benchmark --- asv_bench/benchmarks/series_methods.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/asv_bench/benchmarks/series_methods.py b/asv_bench/benchmarks/series_methods.py index a6bffb1585f2a..fa67183122b59 100644 --- a/asv_bench/benchmarks/series_methods.py +++ b/asv_bench/benchmarks/series_methods.py @@ -151,6 +151,7 @@ class IsInLongSeriesLookUpDominates: def setup(self, dtype, MaxNumber, series_type): N = 10 ** 7 + # https://github.com/pandas-dev/pandas/issues/39844 if not np_version_under1p20 and dtype in ("Int64", "Float64"): raise NotImplementedError @@ -180,6 +181,11 @@ class IsInLongSeriesValuesDominate: def setup(self, dtype, series_type): N = 10 ** 7 + + # https://github.com/pandas-dev/pandas/issues/39844 + if not np_version_under1p20 and dtype in ("Int64", "Float64"): + raise NotImplementedError + if series_type == "random": np.random.seed(42) vals = np.random.randint(0, 10 * N, N) From 50bf741136c0f8f9df62c28d2aa4321696b9b775 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Tue, 16 Feb 2021 18:30:58 +0000 Subject: [PATCH 63/86] ignore new Styler errors --- pandas/io/formats/style.py | 23 ++++++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/pandas/io/formats/style.py b/pandas/io/formats/style.py index 96043355e24d1..88a90b3df0504 100644 --- a/pandas/io/formats/style.py +++ b/pandas/io/formats/style.py @@ -1646,7 +1646,12 @@ def f(data: DataFrame, props: str) -> np.ndarray: return np.where(pd.isna(data).values, props, "") return self.apply( - f, axis=None, subset=subset, props=f"background-color: {null_color};" + # error: Argument 1 to "apply" of "Styler" has incompatible type + # "Callable[[DataFrame, str], ndarray]"; expected "Callable[..., Styler]" + f, # type: ignore[arg-type] + axis=None, + subset=subset, + props=f"background-color: {null_color};", ) def highlight_max( @@ -1677,7 +1682,13 @@ def f(data: FrameOrSeries, props: str) -> np.ndarray: return np.where(data == np.nanmax(data.values), props, "") return self.apply( - f, axis=axis, subset=subset, props=f"background-color: {color};" + # error: Argument 1 to "apply" of "Styler" has incompatible type + # "Callable[[FrameOrSeries, str], ndarray]"; expected "Callable[..., + # Styler]" + f, # type: ignore[arg-type] + axis=axis, + subset=subset, + props=f"background-color: {color};", ) def highlight_min( @@ -1708,7 +1719,13 @@ def f(data: FrameOrSeries, props: str) -> np.ndarray: return np.where(data == np.nanmin(data.values), props, "") return self.apply( - f, axis=axis, subset=subset, props=f"background-color: {color};" + # error: Argument 1 to "apply" of "Styler" has incompatible type + # "Callable[[FrameOrSeries, str], ndarray]"; expected "Callable[..., + # Styler]" + f, # type: ignore[arg-type] + axis=axis, + subset=subset, + props=f"background-color: {color};", ) @classmethod From a654494536ff89dda9bd15e56c456e3e3a0918ef Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Thu, 18 Feb 2021 11:10:39 +0000 Subject: [PATCH 64/86] update comments --- pandas/core/dtypes/cast.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pandas/core/dtypes/cast.py b/pandas/core/dtypes/cast.py index d25b4cabb54c0..e5cd0b7393ea8 100644 --- a/pandas/core/dtypes/cast.py +++ b/pandas/core/dtypes/cast.py @@ -630,7 +630,8 @@ def maybe_promote(dtype: np.dtype, fill_value=np.nan): inferred, fv = infer_dtype_from_scalar(fill_value, pandas_dtype=True) if inferred == dtype: return dtype, fv - return np.dtype(object), fill_value + # error: Value of type variable "_DTypeScalar" of "dtype" cannot be "object" + return np.dtype(object), fill_value # type: ignore[type-var] elif is_float(fill_value): if issubclass(dtype.type, np.bool_): From edb5e46dc5dcb7fa015f0010756a610b913863ad Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Fri, 19 Feb 2021 11:52:46 +0000 Subject: [PATCH 65/86] update comments --- pandas/core/internals/blocks.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index b131ad9b4b731..765c9a3cc888d 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -2520,5 +2520,7 @@ def ensure_block_shape(values: ArrayLike, ndim: int = 1) -> ArrayLike: # block.shape is incorrect for "2D" ExtensionArrays # We can't, and don't need to, reshape. - values = np.asarray(values).reshape(1, -1) + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "ExtensionArray") + values = np.asarray(values).reshape(1, -1) # type: ignore[assignment] return values From 32d877ff9393b379c7af545f7f38698f5eac40c1 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sun, 21 Feb 2021 19:50:40 +0000 Subject: [PATCH 66/86] update ignores --- pandas/core/algorithms.py | 20 +++++++-- pandas/core/arrays/_mixins.py | 4 +- pandas/core/arrays/datetimelike.py | 25 ++++++++--- pandas/core/arrays/interval.py | 5 ++- pandas/core/base.py | 4 +- pandas/core/generic.py | 11 ++++- pandas/core/indexes/base.py | 68 +++++++++++++++++++++++------ pandas/core/indexes/datetimelike.py | 6 ++- pandas/core/series.py | 4 +- pandas/io/formats/format.py | 6 ++- 10 files changed, 123 insertions(+), 30 deletions(-) diff --git a/pandas/core/algorithms.py b/pandas/core/algorithms.py index b2d2f654829f4..c9d06ab16909a 100644 --- a/pandas/core/algorithms.py +++ b/pandas/core/algorithms.py @@ -358,7 +358,9 @@ def _get_values_for_rank(values: ArrayLike): def get_data_algo(values: ArrayLike): values = _get_values_for_rank(values) - ndtype = _check_object_for_strings(values) + # error: Argument 1 to "_check_object_for_strings" has incompatible type + # "ExtensionArray"; expected "ndarray" + ndtype = _check_object_for_strings(values) # type: ignore[arg-type] htable = _hashtables.get(ndtype, _hashtables["object"]) return htable, values @@ -515,7 +517,13 @@ def isin(comps: AnyArrayLike, values: AnyArrayLike) -> np.ndarray: ) if not isinstance(values, (ABCIndex, ABCSeries, ABCExtensionArray, np.ndarray)): - values = _ensure_arraylike(list(values)) + # error: Incompatible types in assignment (expression has type "ExtensionArray", + # variable has type "Index") + # error: Incompatible types in assignment (expression has type "ExtensionArray", + # variable has type "Series") + # error: Incompatible types in assignment (expression has type "ExtensionArray", + # variable has type "ndarray") + values = _ensure_arraylike(list(values)) # type: ignore[assignment] elif isinstance(values, ABCMultiIndex): # Avoid raising in extract_array @@ -533,7 +541,13 @@ def isin(comps: AnyArrayLike, values: AnyArrayLike) -> np.ndarray: # ExtensionArray]", variable has type "Series") values = extract_array(values, extract_numpy=True) # type: ignore[assignment] - comps = _ensure_arraylike(comps) + # error: Incompatible types in assignment (expression has type "ExtensionArray", + # variable has type "Index") + # error: Incompatible types in assignment (expression has type "ExtensionArray", + # variable has type "Series") + # error: Incompatible types in assignment (expression has type "ExtensionArray", + # variable has type "ndarray") + comps = _ensure_arraylike(comps) # type: ignore[assignment] # error: Incompatible types in assignment (expression has type "Union[Any, # ExtensionArray]", variable has type "Index") # error: Incompatible types in assignment (expression has type "Union[Any, diff --git a/pandas/core/arrays/_mixins.py b/pandas/core/arrays/_mixins.py index da3b01e34d291..f393f7aab26af 100644 --- a/pandas/core/arrays/_mixins.py +++ b/pandas/core/arrays/_mixins.py @@ -105,7 +105,9 @@ def take( new_data = take( self._ndarray, - indices, + # error: Argument 2 to "take" has incompatible type "Sequence[int]"; + # expected "ndarray" + indices, # type: ignore[arg-type] allow_fill=allow_fill, fill_value=fill_value, axis=axis, diff --git a/pandas/core/arrays/datetimelike.py b/pandas/core/arrays/datetimelike.py index 0000827e246d4..eebfec7f3bcfa 100644 --- a/pandas/core/arrays/datetimelike.py +++ b/pandas/core/arrays/datetimelike.py @@ -443,7 +443,11 @@ def view(self, dtype: Optional[Dtype] = None) -> ArrayLike: # dtypes here. Everything else we pass through to the underlying # ndarray. if dtype is None or dtype is self.dtype: - return type(self)(self._ndarray, dtype=self.dtype) + # error: Incompatible return value type (got "DatetimeLikeArrayMixin", + # expected "ndarray") + return type(self)( # type: ignore[return-value] + self._ndarray, dtype=self.dtype + ) if isinstance(dtype, type): # we sometimes pass non-dtype objects, e.g np.ndarray; @@ -453,15 +457,25 @@ def view(self, dtype: Optional[Dtype] = None) -> ArrayLike: dtype = pandas_dtype(dtype) if isinstance(dtype, (PeriodDtype, DatetimeTZDtype)): cls = dtype.construct_array_type() - return cls._simple_new(self.asi8, dtype=dtype) + # error: Incompatible return value type (got "Union[PeriodArray, + # DatetimeArray]", expected "ndarray") + return cls._simple_new(self.asi8, dtype=dtype) # type: ignore[return-value] elif dtype == "M8[ns]": from pandas.core.arrays import DatetimeArray - return DatetimeArray._simple_new(self.asi8, dtype=dtype) + # error: Incompatible return value type (got "DatetimeArray", expected + # "ndarray") + return DatetimeArray._simple_new( # type: ignore[return-value] + self.asi8, dtype=dtype + ) elif dtype == "m8[ns]": from pandas.core.arrays import TimedeltaArray - return TimedeltaArray._simple_new(self.asi8.view("m8[ns]"), dtype=dtype) + # error: Incompatible return value type (got "TimedeltaArray", expected + # "ndarray") + return TimedeltaArray._simple_new( # type: ignore[return-value] + self.asi8.view("m8[ns]"), dtype=dtype + ) return self._ndarray.view(dtype=dtype) # ------------------------------------------------------------------ @@ -1752,7 +1766,8 @@ def _with_freq(self, freq): freq = to_offset(self.inferred_freq) arr = self.view() - arr._freq = freq + # error: "ExtensionArray" has no attribute "_freq" + arr._freq = freq # type: ignore[attr-defined] return arr # -------------------------------------------------------------- diff --git a/pandas/core/arrays/interval.py b/pandas/core/arrays/interval.py index ffa445b806a43..acb3790f1ba41 100644 --- a/pandas/core/arrays/interval.py +++ b/pandas/core/arrays/interval.py @@ -1621,7 +1621,10 @@ def _maybe_convert_platform_interval(values) -> ArrayLike: # GH 19016 # empty lists/tuples get object dtype by default, but this is # prohibited for IntervalArray, so coerce to integer instead - return np.array([], dtype=np.int64) + + # error: Incompatible return value type (got "ndarray", expected + # "ExtensionArray") + return np.array([], dtype=np.int64) # type: ignore[return-value] elif is_categorical_dtype(values): values = np.asarray(values) diff --git a/pandas/core/base.py b/pandas/core/base.py index d6026c2666fdd..cf79779422380 100644 --- a/pandas/core/base.py +++ b/pandas/core/base.py @@ -1332,4 +1332,6 @@ def drop_duplicates(self, keep="first"): return self[~duplicated] # type: ignore[index] def duplicated(self, keep: Union[str, bool] = "first") -> np.ndarray: - return duplicated(self._values, keep=keep) + # error: Value of type variable "ArrayLike" of "duplicated" cannot be + # "Union[ExtensionArray, ndarray]" + return duplicated(self._values, keep=keep) # type: ignore[type-var] diff --git a/pandas/core/generic.py b/pandas/core/generic.py index fc9fad2dded88..c9cfc7bc5c092 100644 --- a/pandas/core/generic.py +++ b/pandas/core/generic.py @@ -8456,8 +8456,15 @@ def ranker(data): na_option=na_option, pct=pct, ) - ranks = self._constructor(ranks, **data._construct_axes_dict()) - return ranks.__finalize__(self, method="rank") + # error: Incompatible types in assignment (expression has type + # "FrameOrSeries", variable has type "ndarray") + # error: Argument 1 to "NDFrame" has incompatible type "ndarray"; expected + # "Union[ArrayManager, BlockManager]" + ranks = self._constructor( # type: ignore[assignment] + ranks, **data._construct_axes_dict() # type: ignore[arg-type] + ) + # error: "ndarray" has no attribute "__finalize__" + return ranks.__finalize__(self, method="rank") # type: ignore[attr-defined] # if numeric_only is None, and we can't get anything, we try with # numeric_only=True diff --git a/pandas/core/indexes/base.py b/pandas/core/indexes/base.py index 3b8250a213187..2da3b14028e8d 100644 --- a/pandas/core/indexes/base.py +++ b/pandas/core/indexes/base.py @@ -2956,15 +2956,27 @@ def _union(self, other, sort): if sort is None and self.is_monotonic and other.is_monotonic: try: - result = self._outer_indexer(lvals, rvals)[0] + # error: Argument 1 to "_outer_indexer" of "Index" has incompatible type + # "Union[ExtensionArray, ndarray]"; expected "ndarray" + result = self._outer_indexer(lvals, rvals)[0] # type: ignore[arg-type] except (TypeError, IncompatibleFrequency): # incomparable objects - result = list(lvals) + + # error: Incompatible types in assignment (expression has type + # "List[Any]", variable has type "ndarray") + result = list(lvals) # type: ignore[assignment] # worth making this faster? a very unusual case value_set = set(lvals) - result.extend([x for x in rvals if x not in value_set]) - result = Index(result)._values # do type inference here + # error: "ndarray" has no attribute "extend" + result.extend( # type: ignore[attr-defined] + [x for x in rvals if x not in value_set] + ) + # do type inference here + + # error: Incompatible types in assignment (expression has type + # "Union[ExtensionArray, ndarray]", variable has type "ndarray") + result = Index(result)._values # type: ignore[assignment] else: # find indexes of things in "other" that are not in "self" if self.is_unique: @@ -2978,7 +2990,9 @@ def _union(self, other, sort): result = concat_compat((lvals, other_diff)) else: - result = lvals + # error: Incompatible types in assignment (expression has type + # "Union[ExtensionArray, ndarray]", variable has type "ndarray") + result = lvals # type: ignore[assignment] result = _maybe_try_sort(result, sort) @@ -3070,7 +3084,9 @@ def _intersection(self, other, sort=False): if self.is_monotonic and other.is_monotonic: try: - result = self._inner_indexer(lvals, rvals)[0] + # error: Argument 1 to "_inner_indexer" of "Index" has incompatible type + # "Union[ExtensionArray, ndarray]"; expected "ndarray" + result = self._inner_indexer(lvals, rvals)[0] # type: ignore[arg-type] except TypeError: pass else: @@ -4269,16 +4285,26 @@ def _join_monotonic(self, other, how="left", return_indexers=False): elif how == "right": join_index = other lidx = self._left_indexer_unique(ov, sv) - ridx = None + # error: Incompatible types in assignment (expression has type "None", + # variable has type "ndarray") + ridx = None # type: ignore[assignment] elif how == "inner": - join_index, lidx, ridx = self._inner_indexer(sv, ov) + # error: Incompatible types in assignment (expression has type + # "ndarray", variable has type "Index") + join_index, lidx, ridx = self._inner_indexer( # type:ignore[assignment] + sv, ov + ) # error: Argument 1 to "_wrap_joined_index" of "Index" has incompatible # type "Index"; expected "ndarray" join_index = self._wrap_joined_index( join_index, other # type: ignore[arg-type] ) elif how == "outer": - join_index, lidx, ridx = self._outer_indexer(sv, ov) + # error: Incompatible types in assignment (expression has type + # "ndarray", variable has type "Index") + join_index, lidx, ridx = self._outer_indexer( # type:ignore[assignment] + sv, ov + ) # error: Argument 1 to "_wrap_joined_index" of "Index" has incompatible # type "Index"; expected "ndarray" join_index = self._wrap_joined_index( @@ -4286,13 +4312,29 @@ def _join_monotonic(self, other, how="left", return_indexers=False): ) else: if how == "left": - join_index, lidx, ridx = self._left_indexer(sv, ov) + # error: Incompatible types in assignment (expression has type + # "ndarray", variable has type "Index") + join_index, lidx, ridx = self._left_indexer( # type: ignore[assignment] + sv, ov + ) elif how == "right": - join_index, ridx, lidx = self._left_indexer(ov, sv) + # error: Incompatible types in assignment (expression has type + # "ndarray", variable has type "Index") + join_index, ridx, lidx = self._left_indexer( # type: ignore[assignment] + ov, sv + ) elif how == "inner": - join_index, lidx, ridx = self._inner_indexer(sv, ov) + # error: Incompatible types in assignment (expression has type + # "ndarray", variable has type "Index") + join_index, lidx, ridx = self._inner_indexer( # type:ignore[assignment] + sv, ov + ) elif how == "outer": - join_index, lidx, ridx = self._outer_indexer(sv, ov) + # error: Incompatible types in assignment (expression has type + # "ndarray", variable has type "Index") + join_index, lidx, ridx = self._outer_indexer( # type:ignore[assignment] + sv, ov + ) # error: Argument 1 to "_wrap_joined_index" of "Index" has incompatible type # "Index"; expected "ndarray" join_index = self._wrap_joined_index( diff --git a/pandas/core/indexes/datetimelike.py b/pandas/core/indexes/datetimelike.py index 96fb9d43d7265..bebb8c0077fec 100644 --- a/pandas/core/indexes/datetimelike.py +++ b/pandas/core/indexes/datetimelike.py @@ -559,8 +559,10 @@ def shift(self: _T, periods: int = 1, freq=None) -> _T: PeriodIndex.shift : Shift values of PeriodIndex. """ arr = self._data.view() - arr._freq = self.freq - result = arr._time_shift(periods, freq=freq) + # error: "ExtensionArray" has no attribute "_freq" + arr._freq = self.freq # type: ignore[attr-defined] + # error: "ExtensionArray" has no attribute "_time_shift" + result = arr._time_shift(periods, freq=freq) # type: ignore[attr-defined] return type(self)(result, name=self.name) # -------------------------------------------------------------------- diff --git a/pandas/core/series.py b/pandas/core/series.py index e42f3e97c185b..872f3387055da 100644 --- a/pandas/core/series.py +++ b/pandas/core/series.py @@ -2006,7 +2006,9 @@ def drop_duplicates(self, keep="first", inplace=False) -> Optional[Series]: else: return result - def duplicated(self, keep="first") -> Series: + # error: Return type "Series" of "duplicated" incompatible with return type + # "ndarray" in supertype "IndexOpsMixin" + def duplicated(self, keep="first") -> Series: # type: ignore[override] """ Indicate duplicate Series values. diff --git a/pandas/io/formats/format.py b/pandas/io/formats/format.py index 447e096f0788b..2356a1bb7d21d 100644 --- a/pandas/io/formats/format.py +++ b/pandas/io/formats/format.py @@ -1801,7 +1801,11 @@ def get_format_timedelta64( one_day_nanos = 86400 * 10 ** 9 even_days = ( - np.logical_and(consider_values, values_int % one_day_nanos != 0).sum() == 0 + # error: Unsupported operand types for % ("ExtensionArray" and "int") + np.logical_and( + consider_values, values_int % one_day_nanos != 0 # type: ignore[operator] + ).sum() + == 0 ) if even_days: From 810a937c04b9319e0966d50e211a6fdd8edbc828 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sun, 21 Feb 2021 19:57:19 +0000 Subject: [PATCH 67/86] re-apply benchmark fix --- asv_bench/benchmarks/algos/isin.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/asv_bench/benchmarks/algos/isin.py b/asv_bench/benchmarks/algos/isin.py index 5d81d9d0d45a3..89d57de9aeade 100644 --- a/asv_bench/benchmarks/algos/isin.py +++ b/asv_bench/benchmarks/algos/isin.py @@ -273,6 +273,7 @@ class IsInLongSeriesLookUpDominates: def setup(self, dtype, MaxNumber, series_type): N = 10 ** 7 + # https://github.com/pandas-dev/pandas/issues/39844 if not np_version_under1p20 and dtype in ("Int64", "Float64"): raise NotImplementedError @@ -303,6 +304,11 @@ class IsInLongSeriesValuesDominate: def setup(self, dtype, series_type): N = 10 ** 7 + + # https://github.com/pandas-dev/pandas/issues/39844 + if not np_version_under1p20 and dtype in ("Int64", "Float64"): + raise NotImplementedError + if series_type == "random": np.random.seed(42) vals = np.random.randint(0, 10 * N, N) From 7f38d0be176e624e09d941022e275bcd57260218 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Tue, 23 Feb 2021 14:54:05 +0000 Subject: [PATCH 68/86] fix failing test --- pandas/core/array_algos/take.py | 13 ++++++++++--- pandas/core/dtypes/cast.py | 15 +++++++++++---- pandas/core/frame.py | 6 +++++- 3 files changed, 26 insertions(+), 8 deletions(-) diff --git a/pandas/core/array_algos/take.py b/pandas/core/array_algos/take.py index 906aea0a90982..e25cf484ebf19 100644 --- a/pandas/core/array_algos/take.py +++ b/pandas/core/array_algos/take.py @@ -127,7 +127,9 @@ def take_2d_multi( row_idx = ensure_int64(row_idx) col_idx = ensure_int64(col_idx) - indexer = row_idx, col_idx + # error: Incompatible types in assignment (expression has type "Tuple[Any, Any]", + # variable has type "ndarray") + indexer = row_idx, col_idx # type: ignore[assignment] mask_info = None # check for promotion based on types only (do this first because @@ -419,8 +421,13 @@ def _take_preprocess_indexer_and_fill_value( if dtype != arr.dtype and (out is None or out.dtype != dtype): # check if promotion is actually required based on indexer mask = indexer == -1 - needs_masking = mask.any() - mask_info = mask, needs_masking + # error: Item "bool" of "Union[Any, bool]" has no attribute "any" + # [union-attr] + needs_masking = mask.any() # type: ignore[union-attr] + # error: Incompatible types in assignment (expression has type + # "Tuple[Union[Any, bool], Any]", variable has type + # "Optional[Tuple[None, bool]]") + mask_info = mask, needs_masking # type: ignore[assignment] if needs_masking: if out is not None and out.dtype != dtype: raise TypeError("Incompatible type for fill_value") diff --git a/pandas/core/dtypes/cast.py b/pandas/core/dtypes/cast.py index 5b89ce76319e1..1ef322dc6e459 100644 --- a/pandas/core/dtypes/cast.py +++ b/pandas/core/dtypes/cast.py @@ -1345,7 +1345,9 @@ def soft_convert_objects( values, convert_datetime=datetime, convert_timedelta=timedelta ) except (OutOfBoundsDatetime, ValueError): - return values + # error: Incompatible return value type (got "ndarray", expected + # "ExtensionArray") + return values # type: ignore[return-value] if numeric and is_object_dtype(values.dtype): converted = lib.maybe_convert_numeric(values, set(), coerce_numeric=True) @@ -1354,7 +1356,8 @@ def soft_convert_objects( values = converted if not isna(converted).all() else values values = values.copy() if copy else values - return values + # error: Incompatible return value type (got "ndarray", expected "ExtensionArray") + return values # type: ignore[return-value] def convert_dtypes( @@ -1528,12 +1531,16 @@ def try_datetime(v: np.ndarray) -> ArrayLike: pass else: dti = DatetimeIndex(values).tz_localize("UTC").tz_convert(tz=tz) - return dti._data + # error: Incompatible return value type (got "DatetimeArray", expected + # "ndarray") [return-value] + return dti._data # type: ignore[return-value] except TypeError: # e.g. is not convertible to datetime pass - return v.reshape(shape) + # error: Incompatible return value type (got "ndarray", expected + # "ExtensionArray") + return v.reshape(shape) # type: ignore[return-value] def try_timedelta(v: np.ndarray) -> np.ndarray: # safe coerce to timedelta64 diff --git a/pandas/core/frame.py b/pandas/core/frame.py index dafbad2c20c32..8e7ddf1891818 100644 --- a/pandas/core/frame.py +++ b/pandas/core/frame.py @@ -4308,7 +4308,11 @@ def _reindex_multi(self, axes, copy: bool, fill_value) -> DataFrame: if row_indexer is not None and col_indexer is not None: indexer = row_indexer, col_indexer - new_values = take_2d_multi(self.values, indexer, fill_value=fill_value) + # error: Argument 2 to "take_2d_multi" has incompatible type "Tuple[Any, + # Any]"; expected "ndarray" + new_values = take_2d_multi( + self.values, indexer, fill_value=fill_value # type: ignore[arg-type] + ) return self._constructor(new_values, index=new_index, columns=new_columns) else: return self._reindex_with_indexers( From 466a8b09b5595f3a85b29cabdc879b55b8fca484 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Wed, 24 Feb 2021 11:52:52 +0000 Subject: [PATCH 69/86] update comments --- pandas/core/arrays/_mixins.py | 6 ++++- pandas/core/arrays/base.py | 6 ++++- pandas/core/arrays/categorical.py | 6 ++++- pandas/core/arrays/string_arrow.py | 8 ++++++- pandas/core/construction.py | 28 +++++++++++++++++----- pandas/core/dtypes/cast.py | 34 +++++++++++++++++++++++---- pandas/core/internals/blocks.py | 4 +++- pandas/core/internals/construction.py | 6 ++++- 8 files changed, 82 insertions(+), 16 deletions(-) diff --git a/pandas/core/arrays/_mixins.py b/pandas/core/arrays/_mixins.py index c663ccecb1721..0134ae5fa7b3d 100644 --- a/pandas/core/arrays/_mixins.py +++ b/pandas/core/arrays/_mixins.py @@ -285,7 +285,11 @@ def fillna( value, method = validate_fillna_kwargs(value, method) mask = self.isna() - value = missing.check_value_size(value, mask, len(self)) + # error: Argument 2 to "check_value_size" has incompatible type + # "ExtensionArray"; expected "ndarray" + value = missing.check_value_size( + value, mask, len(self) # type: ignore[arg-type] + ) # error: "ExtensionArray" has no attribute "any" if mask.any(): # type: ignore[attr-defined] diff --git a/pandas/core/arrays/base.py b/pandas/core/arrays/base.py index 5949a3870ab76..73070f5548f76 100644 --- a/pandas/core/arrays/base.py +++ b/pandas/core/arrays/base.py @@ -708,7 +708,11 @@ def fillna(self, value=None, method=None, limit=None): value, method = validate_fillna_kwargs(value, method) mask = self.isna() - value = missing.check_value_size(value, mask, len(self)) + # error: Argument 2 to "check_value_size" has incompatible type + # "ExtensionArray"; expected "ndarray" + value = missing.check_value_size( + value, mask, len(self) # type: ignore[arg-type] + ) # error: "ExtensionArray" has no attribute "any" if mask.any(): # type: ignore[attr-defined] diff --git a/pandas/core/arrays/categorical.py b/pandas/core/arrays/categorical.py index cd13ed143632c..09365554a0008 100644 --- a/pandas/core/arrays/categorical.py +++ b/pandas/core/arrays/categorical.py @@ -534,7 +534,11 @@ def astype(self, dtype: Dtype, copy: bool = True) -> ArrayLike: msg = f"Cannot cast {self.categories.dtype} dtype to {dtype}" raise ValueError(msg) - result = take_nd(new_cats, libalgos.ensure_platform_int(self._codes)) + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "Categorical") + result = take_nd( # type: ignore[assignment] + new_cats, libalgos.ensure_platform_int(self._codes) + ) # error: Incompatible return value type (got "Categorical", expected "ndarray") return result # type: ignore[return-value] diff --git a/pandas/core/arrays/string_arrow.py b/pandas/core/arrays/string_arrow.py index 133d21e42cf2a..290b5bf2a92d4 100644 --- a/pandas/core/arrays/string_arrow.py +++ b/pandas/core/arrays/string_arrow.py @@ -390,7 +390,13 @@ def fillna(self, value=None, method=None, limit=None): if mask.any(): if method is not None: func = missing.get_fill_func(method) - new_values = func(self.to_numpy(object), limit=limit, mask=mask) + # error: Argument 1 to "to_numpy" of "ArrowStringArray" has incompatible + # type "Type[object]"; expected "Union[str, dtype[Any], None]" + new_values = func( + self.to_numpy(object), # type: ignore[arg-type] + limit=limit, + mask=mask, + ) new_values = self._from_sequence(new_values) else: # fill with value diff --git a/pandas/core/construction.py b/pandas/core/construction.py index 4eb3bbda1c473..967ed1b38bdb3 100644 --- a/pandas/core/construction.py +++ b/pandas/core/construction.py @@ -492,7 +492,9 @@ def sanitize_array( try: subarr = _try_cast(data, dtype, copy, True) except ValueError: - subarr = np.array(data, copy=copy) + # error: Incompatible types in assignment (expression has type + # "ndarray", variable has type "ExtensionArray") + subarr = np.array(data, copy=copy) # type: ignore[assignment] else: # we will try to copy by-definition here subarr = _try_cast(data, dtype, copy, raise_cast_failure) @@ -505,7 +507,9 @@ def sanitize_array( subarr = subarr.astype(dtype, copy=copy) elif copy: subarr = subarr.copy() - return subarr + # error: Incompatible return value type (got "ExtensionArray", expected + # "ndarray") + return subarr # type: ignore[return-value] elif isinstance(data, (list, tuple, abc.Set, abc.ValuesView)) and len(data) > 0: # TODO: deque, array.array @@ -518,7 +522,10 @@ def sanitize_array( subarr = _try_cast(data, dtype, copy, raise_cast_failure) else: subarr = maybe_convert_platform(data) - subarr = maybe_cast_to_datetime(subarr, dtype) + # error: Incompatible types in assignment (expression has type + # "Union[ExtensionArray, ndarray, List[Any]]", variable has type + # "ExtensionArray") + subarr = maybe_cast_to_datetime(subarr, dtype) # type: ignore[assignment] elif isinstance(data, range): # GH#16804 @@ -539,7 +546,13 @@ def sanitize_array( subarr = _sanitize_ndim(subarr, data, dtype, index) if not (is_extension_array_dtype(subarr.dtype) or is_extension_array_dtype(dtype)): - subarr = _sanitize_str_dtypes(subarr, data, dtype, copy) + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "ExtensionArray") + # error: Argument 1 to "_sanitize_str_dtypes" has incompatible type + # "ExtensionArray"; expected "ndarray" + subarr = _sanitize_str_dtypes( # type: ignore[assignment] + subarr, data, dtype, copy # type: ignore[arg-type] + ) is_object_or_str_dtype = is_object_dtype(dtype) or is_string_dtype(dtype) if is_object_dtype(subarr.dtype) and not is_object_or_str_dtype: @@ -547,7 +560,8 @@ def sanitize_array( if inferred in {"interval", "period"}: subarr = array(subarr) - return subarr + # error: Incompatible return value type (got "ExtensionArray", expected "ndarray") + return subarr # type: ignore[return-value] def _sanitize_ndim( @@ -656,7 +670,9 @@ def _try_cast( and not copy and dtype is None ): - return arr + # error: Incompatible return value type (got "ndarray", expected + # "ExtensionArray") + return arr # type: ignore[return-value] if isinstance(dtype, ExtensionDtype) and (dtype.kind != "M" or is_sparse(dtype)): # create an extension array from its dtype diff --git a/pandas/core/dtypes/cast.py b/pandas/core/dtypes/cast.py index f227ca0418307..8106cd96c38cc 100644 --- a/pandas/core/dtypes/cast.py +++ b/pandas/core/dtypes/cast.py @@ -1660,7 +1660,11 @@ def maybe_cast_to_datetime( try: if is_datetime64: - dti = to_datetime(value, errors="raise") + # error: No overload variant of "to_datetime" matches + # argument types "ndarray", "str" + dti = to_datetime( # type: ignore[call-overload] + value, errors="raise" + ) # GH 25843: Remove tz information since the dtype # didn't specify one if dti.tz is not None: @@ -1672,16 +1676,38 @@ def maybe_cast_to_datetime( # datetime64tz is assumed to be naive which should # be localized to the timezone. is_dt_string = is_string_dtype(value.dtype) - dta = to_datetime(value, errors="raise").array + # error: No overload variant of "to_datetime" matches + # argument types "ndarray", "str" + dta = to_datetime( # type: ignore[call-overload] + value, errors="raise" + ).array if dta.tz is not None: value = dta.astype(dtype, copy=False) elif is_dt_string: # Strings here are naive, so directly localize - value = dta.tz_localize(dtype.tz) + + # error: Item "dtype[Any]" of "Union[dtype[Any], + # ExtensionDtype, None]" has no attribute "tz" + # error: Item "ExtensionDtype" of "Union[dtype[Any], + # ExtensionDtype, None]" has no attribute "tz" + # error: Item "None" of "Union[dtype[Any], + # ExtensionDtype, None]" has no attribute "tz" + value = dta.tz_localize( + dtype.tz # type: ignore[union-attr] + ) else: # Numeric values are UTC at this point, # so localize and convert - value = dta.tz_localize("UTC").tz_convert(dtype.tz) + + # error: Item "dtype[Any]" of "Union[dtype[Any], + # ExtensionDtype, None]" has no attribute "tz" + # error: Item "ExtensionDtype" of "Union[dtype[Any], + # ExtensionDtype, None]" has no attribute "tz" + # error: Item "None" of "Union[dtype[Any], + # ExtensionDtype, None]" has no attribute "tz" + value = dta.tz_localize("UTC").tz_convert( + dtype.tz # type: ignore[union-attr] + ) elif is_timedelta64: value = to_timedelta(value, errors="raise")._values except OutOfBoundsDatetime: diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index c9f28a63983f1..ebd266413fd2f 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -1403,7 +1403,9 @@ def take_nd( else: allow_fill = True - new_values = algos.take_nd( + # error: Value of type variable "ArrayLike" of "take_nd" cannot be + # "Union[ndarray, ExtensionArray]" + new_values = algos.take_nd( # type: ignore[type-var] values, indexer, axis=axis, allow_fill=allow_fill, fill_value=fill_value ) diff --git a/pandas/core/internals/construction.py b/pandas/core/internals/construction.py index e0859206e7f3a..5c4ba298d2629 100644 --- a/pandas/core/internals/construction.py +++ b/pandas/core/internals/construction.py @@ -684,7 +684,11 @@ def _list_of_series_to_arrays( values = extract_array(s, extract_numpy=True) aligned_values.append(algorithms.take_nd(values, indexer)) - content = np.vstack(aligned_values) + # error: Argument 1 to "vstack" has incompatible type "List[ExtensionArray]"; + # expected "Sequence[Union[Union[int, float, complex, str, bytes, generic], + # Sequence[Union[int, float, complex, str, bytes, generic]], + # Sequence[Sequence[Any]], _SupportsArray]]" + content = np.vstack(aligned_values) # type: ignore[arg-type] # error: Incompatible return value type (got "Tuple[ndarray, Union[Index, # List[Any]]]", expected "Tuple[List[Union[Union[str, int, float, bool], Union[Any, From 1bb6eff9b2827dcd77bfa4a14d2e43c3bd314ef4 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Fri, 26 Feb 2021 11:52:35 +0000 Subject: [PATCH 70/86] update comments --- pandas/core/arrays/categorical.py | 6 +++++- pandas/core/arrays/datetimelike.py | 13 +++++++++++-- pandas/core/dtypes/missing.py | 3 ++- pandas/core/groupby/generic.py | 4 +++- pandas/core/internals/array_manager.py | 14 ++++++++++---- pandas/core/internals/managers.py | 6 +++++- pandas/core/reshape/merge.py | 9 +++++++-- pandas/core/series.py | 7 ++++++- 8 files changed, 49 insertions(+), 13 deletions(-) diff --git a/pandas/core/arrays/categorical.py b/pandas/core/arrays/categorical.py index 7dda22fa4f1e3..dd6c145578ca7 100644 --- a/pandas/core/arrays/categorical.py +++ b/pandas/core/arrays/categorical.py @@ -2142,7 +2142,11 @@ def mode(self, dropna=True): if dropna: good = self._codes != -1 codes = self._codes[good] - codes = sorted(htable.mode_int64(ensure_int64(codes), dropna)) + # error: Incompatible types in assignment (expression has type "List[Any]", + # variable has type "ndarray") + codes = sorted( # type: ignore[assignment] + htable.mode_int64(ensure_int64(codes), dropna) + ) return self._from_backing_data(codes) # ------------------------------------------------------------------ diff --git a/pandas/core/arrays/datetimelike.py b/pandas/core/arrays/datetimelike.py index 4e5266276aa7b..2801677cf27e3 100644 --- a/pandas/core/arrays/datetimelike.py +++ b/pandas/core/arrays/datetimelike.py @@ -467,7 +467,10 @@ def view(self, dtype: Optional[Dtype] = None) -> ArrayLike: if isinstance(dtype, type): # we sometimes pass non-dtype objects, e.g np.ndarray; # pass those through to the underlying ndarray - return self._ndarray.view(dtype) + + # error: Incompatible return value type (got "ndarray", expected + # "ExtensionArray") + return self._ndarray.view(dtype) # type: ignore[return-value] dtype = pandas_dtype(dtype) if isinstance(dtype, (PeriodDtype, DatetimeTZDtype)): @@ -491,7 +494,13 @@ def view(self, dtype: Optional[Dtype] = None) -> ArrayLike: return TimedeltaArray._simple_new( # type: ignore[return-value] self.asi8.view("m8[ns]"), dtype=dtype ) - return self._ndarray.view(dtype=dtype) + # error: Incompatible return value type (got "ndarray", expected + # "ExtensionArray") + # error: Argument "dtype" to "view" of "_ArrayOrScalarCommon" has incompatible + # type "Union[ExtensionDtype, dtype[Any]]"; expected "Union[dtype[Any], None, + # type, _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, Union[int, + # Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]]" + return self._ndarray.view(dtype=dtype) # type: ignore[return-value,arg-type] # ------------------------------------------------------------------ # ExtensionArray Interface diff --git a/pandas/core/dtypes/missing.py b/pandas/core/dtypes/missing.py index 8a2031dfd0c60..f5a16846a4460 100644 --- a/pandas/core/dtypes/missing.py +++ b/pandas/core/dtypes/missing.py @@ -625,7 +625,8 @@ def is_valid_na_for_dtype(obj, dtype: DtypeObj) -> bool: # Numeric return obj is not NaT and not isinstance(obj, (np.datetime64, np.timedelta64)) - elif dtype == np.dtype(object): + # error: Value of type variable "_DTypeScalar" of "dtype" cannot be "object" + elif dtype == np.dtype(object): # type: ignore[type-var] # This is needed for Categorical, but is kind of weird return True diff --git a/pandas/core/groupby/generic.py b/pandas/core/groupby/generic.py index 5edf5bfbf8d12..ca8c8aaab8c8d 100644 --- a/pandas/core/groupby/generic.py +++ b/pandas/core/groupby/generic.py @@ -1209,7 +1209,9 @@ def array_func(values: ArrayLike) -> ArrayLike: assert how == "ohlc" raise - result = py_fallback(values) + # error: Incompatible types in assignment (expression has type + # "ExtensionArray", variable has type "ndarray") + result = py_fallback(values) # type: ignore[assignment] return cast_agg_result(result, values, how) diff --git a/pandas/core/internals/array_manager.py b/pandas/core/internals/array_manager.py index 835748ebb0441..0231251675013 100644 --- a/pandas/core/internals/array_manager.py +++ b/pandas/core/internals/array_manager.py @@ -242,7 +242,11 @@ def reduce( if res is NaT and is_timedelta64_ns_dtype(arr.dtype): result_arrays.append(np.array(["NaT"], dtype="timedelta64[ns]")) else: - result_arrays.append(sanitize_array([res], None)) + # error: Argument 1 to "append" of "list" has incompatible type + # "ExtensionArray"; expected "ndarray" + result_arrays.append( + sanitize_array([res], None) # type: ignore[arg-type] + ) result_indices.append(i) index = Index._simple_new(np.array([None], dtype=object)) # placeholder @@ -253,7 +257,9 @@ def reduce( indexer = np.arange(self.shape[0]) columns = self.items - new_mgr = type(self)(result_arrays, [index, columns]) + # error: Argument 1 to "ArrayManager" has incompatible type "List[ndarray]"; + # expected "List[Union[ndarray, ExtensionArray]]" + new_mgr = type(self)(result_arrays, [index, columns]) # type: ignore[arg-type] return new_mgr, indexer def grouped_reduce(self: T, func: Callable, ignore_failures: bool = False) -> T: @@ -398,8 +404,8 @@ def apply_with_block(self: T, f, align_keys=None, **kwargs) -> T: # DatetimeArray needs to be converted to ndarray for DatetimeBlock arr = arr._data # type: ignore[union-attr] elif arr.dtype.kind == "m" and not isinstance(arr, np.ndarray): - # TimedeltaArray needs to be converted to ndarray for TimedeltaBlock - arr = arr._data # type: ignore[union-attr] + # error: "ExtensionArray" has no attribute "_data" [attr-defined] + arr = arr._data # type: ignore[attr-defined] if isinstance(arr, np.ndarray): arr = np.atleast_2d(arr) block = make_block(arr, placement=slice(0, 1, 1), ndim=2) diff --git a/pandas/core/internals/managers.py b/pandas/core/internals/managers.py index 15a2e399f722d..bd77bb4ace510 100644 --- a/pandas/core/internals/managers.py +++ b/pandas/core/internals/managers.py @@ -297,7 +297,11 @@ def arrays(self) -> List[ArrayLike]: Not to be used in actual code, and return value is not the same as the ArrayManager method (list of 1D arrays vs iterator of 2D ndarrays / 1D EAs). """ - return [blk.values for blk in self.blocks] + # error: List comprehension has incompatible type List[Union[ndarray, + # ExtensionArray]]; expected List[ExtensionArray] + # error: List comprehension has incompatible type List[Union[ndarray, + # ExtensionArray]]; expected List[ndarray] + return [blk.values for blk in self.blocks] # type: ignore[misc] def __getstate__(self): block_values = [b.values for b in self.blocks] diff --git a/pandas/core/reshape/merge.py b/pandas/core/reshape/merge.py index e46f2a67205ee..a048217d6b1f0 100644 --- a/pandas/core/reshape/merge.py +++ b/pandas/core/reshape/merge.py @@ -2064,8 +2064,13 @@ def _factorize_keys( if is_datetime64tz_dtype(lk.dtype) and is_datetime64tz_dtype(rk.dtype): # Extract the ndarray (UTC-localized) values # Note: we dont need the dtypes to match, as these can still be compared - lk = cast("DatetimeArray", lk)._ndarray - rk = cast("DatetimeArray", rk)._ndarray + + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "ExtensionArray") + lk = cast("DatetimeArray", lk)._ndarray # type: ignore[assignment] + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "ExtensionArray") + rk = cast("DatetimeArray", rk)._ndarray # type: ignore[assignment] elif ( is_categorical_dtype(lk.dtype) diff --git a/pandas/core/series.py b/pandas/core/series.py index 40d9534132d66..4b35cd71c30a5 100644 --- a/pandas/core/series.py +++ b/pandas/core/series.py @@ -397,7 +397,12 @@ def __init__( elif copy: data = data.copy() else: - data = sanitize_array(data, index, dtype, copy) + # error: Argument 3 to "sanitize_array" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; expected + # "Union[dtype[Any], ExtensionDtype, None]" + data = sanitize_array( + data, index, dtype, copy # type: ignore[arg-type] + ) data = SingleBlockManager.from_array(data, index) From d9d079cd7881cdd6091666dfbf5c06783c40cd1b Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Fri, 26 Feb 2021 15:06:41 +0000 Subject: [PATCH 71/86] update comments --- pandas/core/frame.py | 73 +++++++++++++++++++++++++++++++++++++++----- 1 file changed, 65 insertions(+), 8 deletions(-) diff --git a/pandas/core/frame.py b/pandas/core/frame.py index 5ecd8c62b35f3..ef8ce6f40edf4 100644 --- a/pandas/core/frame.py +++ b/pandas/core/frame.py @@ -575,7 +575,12 @@ def __init__( ) elif isinstance(data, dict): - mgr = dict_to_mgr(data, index, columns, dtype=dtype) + # error: Argument "dtype" to "dict_to_mgr" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; expected + # "Union[dtype[Any], ExtensionDtype, None]" + mgr = dict_to_mgr( + data, index, columns, dtype=dtype # type: ignore[arg-type] + ) elif isinstance(data, ma.MaskedArray): import numpy.ma.mrecords as mrecords @@ -592,7 +597,16 @@ def __init__( # a masked array else: data = sanitize_masked_array(data) - mgr = ndarray_to_mgr(data, index, columns, dtype=dtype, copy=copy) + # error: Argument "dtype" to "ndarray_to_mgr" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; expected + # "Union[dtype[Any], ExtensionDtype, None]" + mgr = ndarray_to_mgr( + data, + index, + columns, + dtype=dtype, # type: ignore[arg-type] + copy=copy, + ) elif isinstance(data, (np.ndarray, Series, Index)): if data.dtype.names: @@ -600,11 +614,35 @@ def __init__( data = {k: data[k] for k in data_columns} if columns is None: columns = data_columns - mgr = dict_to_mgr(data, index, columns, dtype=dtype) + # error: Argument "dtype" to "dict_to_mgr" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; expected + # "Union[dtype[Any], ExtensionDtype, None]" + mgr = dict_to_mgr( + data, index, columns, dtype=dtype # type: ignore[arg-type] + ) elif getattr(data, "name", None) is not None: - mgr = dict_to_mgr({data.name: data}, index, columns, dtype=dtype) + # error: Item "ndarray" of "Union[ndarray, Series, Index]" has no + # attribute "name" + # error: Argument "dtype" to "dict_to_mgr" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; expected + # "Union[dtype[Any], ExtensionDtype, None]" + mgr = dict_to_mgr( + {data.name: data}, # type: ignore[union-attr] + index, + columns, + dtype=dtype, # type: ignore[arg-type] + ) else: - mgr = ndarray_to_mgr(data, index, columns, dtype=dtype, copy=copy) + # error: Argument "dtype" to "ndarray_to_mgr" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; expected + # "Union[dtype[Any], ExtensionDtype, None]" + mgr = ndarray_to_mgr( + data, + index, + columns, + dtype=dtype, # type: ignore[arg-type] + copy=copy, + ) # For data is list-like, or Iterable (will consume into list) elif is_list_like(data): @@ -638,9 +676,23 @@ def __init__( dtype=dtype, # type: ignore[arg-type] ) else: - mgr = ndarray_to_mgr(data, index, columns, dtype=dtype, copy=copy) + # error: Argument "dtype" to "ndarray_to_mgr" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; + # expected "Union[dtype[Any], ExtensionDtype, None]" + mgr = ndarray_to_mgr( + data, + index, + columns, + dtype=dtype, # type: ignore[arg-type] + copy=copy, + ) else: - mgr = dict_to_mgr({}, index, columns, dtype=dtype) + # error: Argument "dtype" to "dict_to_mgr" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; expected + # "Union[dtype[Any], ExtensionDtype, None]" + mgr = dict_to_mgr( + {}, index, columns, dtype=dtype # type: ignore[arg-type] + ) # For data is scalar else: if index is None or columns is None: @@ -678,7 +730,12 @@ def __init__( ) mgr = ndarray_to_mgr( - values, index, columns, dtype=values.dtype, copy=False + # error: "List[ExtensionArray]" has no attribute "dtype" + values, + index, + columns, + dtype=values.dtype, # type: ignore[attr-defined] + copy=False, ) # ensure correct Manager type according to settings From c3f84e33c5cbd835404159edde38a03b2ab14d7a Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Fri, 26 Feb 2021 15:23:41 +0000 Subject: [PATCH 72/86] update comments --- pandas/core/algorithms.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/pandas/core/algorithms.py b/pandas/core/algorithms.py index ff950cbc8759d..40526ca738ac1 100644 --- a/pandas/core/algorithms.py +++ b/pandas/core/algorithms.py @@ -203,8 +203,13 @@ def _ensure_data(values: ArrayLike) -> Tuple[np.ndarray, DtypeObj]: return values.asi8, dtype # type: ignore[union-attr] elif is_categorical_dtype(values.dtype): - values = cast("Categorical", values) - values = values.codes + # error: Incompatible types in assignment (expression has type "Categorical", + # variable has type "ndarray") + values = cast("Categorical", values) # type: ignore[assignment] + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "ExtensionArray") + # error: Item "ndarray" of "Union[Any, ndarray]" has no attribute "codes" + values = values.codes # type: ignore[assignment,union-attr] dtype = pandas_dtype("category") # we are actually coercing to int64 From be6f667f65e35675a19e2eb76d7176aafd43cd8c Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 27 Feb 2021 10:38:49 +0000 Subject: [PATCH 73/86] update comments --- pandas/core/dtypes/cast.py | 28 ++++++++++++++++++++++------ pandas/core/internals/blocks.py | 8 ++++++-- 2 files changed, 28 insertions(+), 8 deletions(-) diff --git a/pandas/core/dtypes/cast.py b/pandas/core/dtypes/cast.py index 20c7587215bf8..028bba074c126 100644 --- a/pandas/core/dtypes/cast.py +++ b/pandas/core/dtypes/cast.py @@ -1550,16 +1550,22 @@ def try_timedelta(v: np.ndarray) -> np.ndarray: inferred_type = lib.infer_datetimelike_array(ensure_object(v)) if inferred_type == "date" and convert_dates: - value = try_datetime(v) + # error: Incompatible types in assignment (expression has type "ExtensionArray", + # variable has type "Union[ndarray, List[Any]]") + value = try_datetime(v) # type: ignore[assignment] elif inferred_type == "datetime": - value = try_datetime(v) + # error: Incompatible types in assignment (expression has type "ExtensionArray", + # variable has type "Union[ndarray, List[Any]]") + value = try_datetime(v) # type: ignore[assignment] elif inferred_type == "timedelta": value = try_timedelta(v) elif inferred_type == "nat": # if all NaT, return as datetime if isna(v).all(): - value = try_datetime(v) + # error: Incompatible types in assignment (expression has type + # "ExtensionArray", variable has type "Union[ndarray, List[Any]]") + value = try_datetime(v) # type: ignore[assignment] else: # We have at least a NaT and a string @@ -1569,7 +1575,10 @@ def try_timedelta(v: np.ndarray) -> np.ndarray: if lib.infer_dtype(value, skipna=False) in ["mixed"]: # cannot skip missing values, as NaT implies that the string # is actually a datetime - value = try_datetime(v) + + # error: Incompatible types in assignment (expression has type + # "ExtensionArray", variable has type "Union[ndarray, List[Any]]") + value = try_datetime(v) # type: ignore[assignment] return value @@ -1737,7 +1746,10 @@ def maybe_cast_to_datetime( # only do this if we have an array and the dtype of the array is not # setup already we are not an integer/object, so don't bother with this # conversion - value = maybe_infer_to_datetimelike(value) + + # error: Argument 1 to "maybe_infer_to_datetimelike" has incompatible type + # "Union[ExtensionArray, List[Any]]"; expected "Union[ndarray, List[Any]]" + value = maybe_infer_to_datetimelike(value) # type: ignore[arg-type] return value @@ -1965,7 +1977,11 @@ def construct_1d_ndarray_preserving_na( # TODO(numpy#12550): special-case can be removed subarr = construct_1d_object_array_from_listlike(list(values)) else: - subarr = np.array(values, dtype=dtype, copy=copy) + # error: Argument "dtype" to "array" has incompatible type + # "Union[dtype[Any], ExtensionDtype, None]"; expected "Union[dtype[Any], + # None, type, _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, + # Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]]" + subarr = np.array(values, dtype=dtype, copy=copy) # type: ignore[arg-type] return subarr diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index ae56f148025c6..86f3e965f5f75 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -618,7 +618,9 @@ def downcast(self, dtypes=None) -> List[Block]: if dtypes is None: dtypes = "infer" - nv = maybe_downcast_to_dtype(values, dtypes) + # error: Value of type variable "ArrayLike" of "maybe_downcast_to_dtype" + # cannot be "Union[ndarray, ExtensionArray]" + nv = maybe_downcast_to_dtype(values, dtypes) # type: ignore[type-var] return [self.make_block(nv)] # ndim > 1 @@ -861,7 +863,9 @@ def replace( ) blk = self if inplace else self.copy() - putmask_inplace(blk.values, mask, value) + # error: Value of type variable "ArrayLike" of "putmask_inplace" cannot be + # "Union[ndarray, ExtensionArray]" + putmask_inplace(blk.values, mask, value) # type: ignore[type-var] blocks = blk.convert(numeric=False, copy=False) return blocks From 763f0e9d1d28027b5d7d6b78035a87964d866633 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sun, 28 Feb 2021 10:00:28 +0000 Subject: [PATCH 74/86] update comments --- pandas/core/arrays/categorical.py | 7 ++++++- pandas/core/dtypes/cast.py | 4 +++- pandas/core/indexes/base.py | 6 +++++- pandas/core/internals/array_manager.py | 4 +++- pandas/core/internals/construction.py | 14 +++++--------- pandas/core/sorting.py | 9 +++++++-- 6 files changed, 29 insertions(+), 15 deletions(-) diff --git a/pandas/core/arrays/categorical.py b/pandas/core/arrays/categorical.py index 203595f474861..50a41aabc585b 100644 --- a/pandas/core/arrays/categorical.py +++ b/pandas/core/arrays/categorical.py @@ -412,7 +412,12 @@ def __init__( if null_mask.any(): # We remove null values here, then below will re-insert # them, grep "full_codes" - arr = [values[idx] for idx in np.where(~null_mask)[0]] + + # error: Incompatible types in assignment (expression has type + # "List[Any]", variable has type "ExtensionArray") + arr = [ # type: ignore[assignment] + values[idx] for idx in np.where(~null_mask)[0] + ] arr = sanitize_array(arr, None) values = arr diff --git a/pandas/core/dtypes/cast.py b/pandas/core/dtypes/cast.py index 2c4cf0cf33640..479b711348b16 100644 --- a/pandas/core/dtypes/cast.py +++ b/pandas/core/dtypes/cast.py @@ -1541,7 +1541,9 @@ def try_timedelta(v: np.ndarray) -> np.ndarray: inferred_type = lib.infer_datetimelike_array(ensure_object(v)) if inferred_type == "datetime": - value = try_datetime(v) + # error: Incompatible types in assignment (expression has type "ExtensionArray", + # variable has type "Union[ndarray, List[Any]]") + value = try_datetime(v) # type: ignore[assignment] elif inferred_type == "timedelta": value = try_timedelta(v) elif inferred_type == "nat": diff --git a/pandas/core/indexes/base.py b/pandas/core/indexes/base.py index 2d05e630f4e78..7612a3cd7cb12 100644 --- a/pandas/core/indexes/base.py +++ b/pandas/core/indexes/base.py @@ -2990,7 +2990,11 @@ def _union(self, other: Index, sort): missing = algos.unique1d(self.get_indexer_non_unique(other)[1]) if len(missing) > 0: - other_diff = algos.take_nd(rvals, missing, allow_fill=False) + # error: Value of type variable "ArrayLike" of "take_nd" cannot be + # "Union[ExtensionArray, ndarray]" + other_diff = algos.take_nd( # type: ignore[type-var] + rvals, missing, allow_fill=False + ) result = concat_compat((lvals, other_diff)) else: diff --git a/pandas/core/internals/array_manager.py b/pandas/core/internals/array_manager.py index 327a9420750b0..0e1115fdb3510 100644 --- a/pandas/core/internals/array_manager.py +++ b/pandas/core/internals/array_manager.py @@ -299,7 +299,9 @@ def grouped_reduce(self: T, func: Callable, ignore_failures: bool = False) -> T: else: columns = self.items - return type(self)(result_arrays, [index, columns]) + # error: Argument 1 to "ArrayManager" has incompatible type "List[ndarray]"; + # expected "List[Union[ndarray, ExtensionArray]]" + return type(self)(result_arrays, [index, columns]) # type: ignore[arg-type] def operate_blockwise(self, other: ArrayManager, array_op) -> ArrayManager: """ diff --git a/pandas/core/internals/construction.py b/pandas/core/internals/construction.py index 9bf0d42291862..7395631ef5077 100644 --- a/pandas/core/internals/construction.py +++ b/pandas/core/internals/construction.py @@ -619,7 +619,9 @@ def to_arrays(data, columns: Optional[Index], dtype: Optional[DtypeObj] = None): if not len(data): if isinstance(data, np.ndarray): - columns = data.dtype.names + # error: Incompatible types in assignment (expression has type + # "Optional[Tuple[str, ...]]", variable has type "Optional[Index]") + columns = data.dtype.names # type: ignore[assignment] if columns is not None: return [[]] * len(columns), columns return [], [] # columns if columns is not None else [] @@ -649,11 +651,8 @@ def to_arrays(data, columns: Optional[Index], dtype: Optional[DtypeObj] = None): # error: Incompatible types in assignment (expression has type "List[ndarray]", # variable has type "List[Union[Union[str, int, float, bool], Union[Any, Any, Any, # Any]]]") - # error: Argument 1 to "_finalize_columns_and_data" has incompatible type - # "List[Union[Union[str, int, float, bool], Union[Any, Any, Any, Any]]]"; expected - # "ndarray" content, columns = _finalize_columns_and_data( # type: ignore[assignment] - content, columns, dtype # type: ignore[arg-type] + content, columns, dtype ) return content, columns @@ -702,10 +701,7 @@ def _list_of_series_to_arrays( # Sequence[Sequence[Any]], _SupportsArray]]" content = np.vstack(aligned_values) # type: ignore[arg-type] - # error: Incompatible return value type (got "Tuple[ndarray, Union[Index, - # List[Any]]]", expected "Tuple[List[Union[Union[str, int, float, bool], Union[Any, - # Any, Any, Any]]], Union[Index, List[Union[str, int]]]]") - return content, columns # type: ignore[return-value] + return content, columns def _list_of_dict_to_arrays( diff --git a/pandas/core/sorting.py b/pandas/core/sorting.py index 97418e2ce0105..ba81866602361 100644 --- a/pandas/core/sorting.py +++ b/pandas/core/sorting.py @@ -585,11 +585,16 @@ def get_group_index_sorter( df.groupby(key)[col].transform('first') """ if ngroups is None: - ngroups = 1 + group_index.max() + # error: Incompatible types in assignment (expression has type "number[Any]", + # variable has type "Optional[int]") + ngroups = 1 + group_index.max() # type: ignore[assignment] count = len(group_index) alpha = 0.0 # taking complexities literally; there may be beta = 1.0 # some room for fine-tuning these parameters - do_groupsort = count > 0 and ((alpha + beta * ngroups) < (count * np.log(count))) + # error: Unsupported operand types for * ("float" and "None") + do_groupsort = count > 0 and ( + (alpha + beta * ngroups) < (count * np.log(count)) # type: ignore[operator] + ) if do_groupsort: sorter, _ = algos.groupsort_indexer(ensure_int64(group_index), ngroups) return ensure_platform_int(sorter) From fb495b6d525ae1cf2ef165ceeb9d585d8fd7ecaf Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Mon, 1 Mar 2021 22:09:09 +0000 Subject: [PATCH 75/86] update comments --- pandas/core/arrays/datetimelike.py | 20 ++++++++++++--- pandas/core/dtypes/cast.py | 14 ++++++++--- pandas/core/frame.py | 36 +++++++++++++++++++++------ pandas/core/internals/construction.py | 14 +++++++---- pandas/core/tools/timedeltas.py | 4 ++- 5 files changed, 68 insertions(+), 20 deletions(-) diff --git a/pandas/core/arrays/datetimelike.py b/pandas/core/arrays/datetimelike.py index cbd4172b315c0..fd5d548e4005e 100644 --- a/pandas/core/arrays/datetimelike.py +++ b/pandas/core/arrays/datetimelike.py @@ -475,16 +475,28 @@ def view(self, dtype: Optional[Dtype] = None) -> ArrayLike: dtype = pandas_dtype(dtype) if isinstance(dtype, (PeriodDtype, DatetimeTZDtype)): cls = dtype.construct_array_type() - return cls(self.asi8, dtype=dtype) + # error: Incompatible return value type (got "Union[PeriodArray, + # DatetimeArray]", expected "ndarray") + return cls(self.asi8, dtype=dtype) # type: ignore[return-value] elif dtype == "M8[ns]": from pandas.core.arrays import DatetimeArray - return DatetimeArray(self.asi8, dtype=dtype) + # error: Incompatible return value type (got "DatetimeArray", expected + # "ndarray") + return DatetimeArray(self.asi8, dtype=dtype) # type: ignore[return-value] elif dtype == "m8[ns]": from pandas.core.arrays import TimedeltaArray - return TimedeltaArray(self.asi8, dtype=dtype) - return self._ndarray.view(dtype=dtype) + # error: Incompatible return value type (got "TimedeltaArray", expected + # "ndarray") + return TimedeltaArray(self.asi8, dtype=dtype) # type: ignore[return-value] + # error: Incompatible return value type (got "ndarray", expected + # "ExtensionArray") + # error: Argument "dtype" to "view" of "_ArrayOrScalarCommon" has incompatible + # type "Union[ExtensionDtype, dtype[Any]]"; expected "Union[dtype[Any], None, + # type, _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, Union[int, + # Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]]" + return self._ndarray.view(dtype=dtype) # type: ignore[return-value,arg-type] # ------------------------------------------------------------------ # ExtensionArray Interface diff --git a/pandas/core/dtypes/cast.py b/pandas/core/dtypes/cast.py index f4dd1092fc8d6..1fca65c6bd641 100644 --- a/pandas/core/dtypes/cast.py +++ b/pandas/core/dtypes/cast.py @@ -1516,7 +1516,10 @@ def try_datetime(v: np.ndarray) -> ArrayLike: ) except (ValueError, TypeError): # e.g. is not convertible to datetime - return v.reshape(shape) + + # error: Incompatible return value type (got "ndarray", expected + # "ExtensionArray") + return v.reshape(shape) # type: ignore[return-value] else: # we might have a sequence of the same-datetimes with tz's # if so coerce to a DatetimeIndex; if they are not the same, @@ -1531,8 +1534,13 @@ def try_datetime(v: np.ndarray) -> ArrayLike: dta = DatetimeArray._simple_new(vals.view("M8[ns]"), dtype=tz_to_dtype(tz)) if dta.tz is None: # TODO(EA2D): conditional reshape kludge unnecessary with 2D EAs - return dta._ndarray.reshape(shape) - return dta + + # error: Incompatible return value type (got "ndarray", expected + # "ExtensionArray") + return dta._ndarray.reshape(shape) # type: ignore[return-value] + # error: Incompatible return value type (got "DatetimeArray", expected + # "ndarray") + return dta # type: ignore[return-value] def try_timedelta(v: np.ndarray) -> np.ndarray: # safe coerce to timedelta64 diff --git a/pandas/core/frame.py b/pandas/core/frame.py index db5fec58a2108..562d51bbce5b4 100644 --- a/pandas/core/frame.py +++ b/pandas/core/frame.py @@ -585,7 +585,12 @@ def __init__( # masked recarray if isinstance(data, mrecords.MaskedRecords): - mgr = rec_array_to_mgr(data, index, columns, dtype, copy) + # error: Argument 4 to "rec_array_to_mgr" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; expected + # "Union[dtype[Any], ExtensionDtype, None]" + mgr = rec_array_to_mgr( + data, index, columns, dtype, copy # type: ignore[arg-type] + ) # a masked array else: @@ -604,10 +609,27 @@ def __init__( elif isinstance(data, (np.ndarray, Series, Index)): if data.dtype.names: # i.e. numpy structured array - mgr = rec_array_to_mgr(data, index, columns, dtype, copy) + + # error: Argument 4 to "rec_array_to_mgr" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; expected + # "Union[dtype[Any], ExtensionDtype, None]" + mgr = rec_array_to_mgr( + data, index, columns, dtype, copy # type: ignore[arg-type] + ) elif getattr(data, "name", None) is not None: # i.e. Series/Index with non-None name - mgr = dict_to_mgr({data.name: data}, index, columns, dtype=dtype) + + # error: Item "ndarray" of "Union[ndarray, Series, Index]" has no + # attribute "name" + # error: Argument "dtype" to "dict_to_mgr" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; expected + # "Union[dtype[Any], ExtensionDtype, None]" + mgr = dict_to_mgr( + {data.name: data}, # type: ignore[union-attr] + index, + columns, + dtype=dtype, # type: ignore[arg-type] + ) else: # error: Argument "dtype" to "ndarray_to_mgr" has incompatible type # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; expected @@ -629,17 +651,17 @@ def __init__( data = dataclasses_to_dicts(data) if treat_as_nested(data): if columns is not None: - columns = ensure_index(columns) + # error: Value of type variable "AnyArrayLike" of "ensure_index" + # cannot be "Collection[Any]" + columns = ensure_index(columns) # type: ignore[type-var] arrays, columns, index = nested_data_to_arrays( - # error: Argument 2 to "nested_data_to_arrays" has incompatible - # type "Optional[Collection[Any]]"; expected "Optional[Index]" # error: Argument 3 to "nested_data_to_arrays" has incompatible # type "Optional[Collection[Any]]"; expected "Optional[Index]" # error: Argument 4 to "nested_data_to_arrays" has incompatible # type "Union[ExtensionDtype, str, dtype[Any], Type[object], # None]"; expected "Union[dtype[Any], ExtensionDtype, None]" data, - columns, # type: ignore[arg-type] + columns, index, # type: ignore[arg-type] dtype, # type: ignore[arg-type] ) diff --git a/pandas/core/internals/construction.py b/pandas/core/internals/construction.py index b260f7b5b4a76..0bc0ae6981636 100644 --- a/pandas/core/internals/construction.py +++ b/pandas/core/internals/construction.py @@ -143,7 +143,9 @@ def rec_array_to_mgr( if isinstance(data, np.ma.MaskedArray): new_arrays = fill_masked_arrays(data, arr_columns) else: - new_arrays = arrays + # error: Incompatible types in assignment (expression has type + # "List[ExtensionArray]", variable has type "List[ndarray]") + new_arrays = arrays # type: ignore[assignment] # create the manager arrays, arr_columns = reorder_arrays(new_arrays, arr_columns, columns) @@ -377,9 +379,7 @@ def nested_data_to_arrays( columns = ensure_index(data[0]._fields) arrays, columns = to_arrays(data, columns, dtype=dtype) - # error: Value of type variable "AnyArrayLike" of "ensure_index" cannot be - # "Optional[Index]" - columns = ensure_index(columns) # type: ignore[type-var] + columns = ensure_index(columns) if index is None: if isinstance(data[0], ABCSeries): @@ -674,7 +674,11 @@ def to_arrays( content, columns = _finalize_columns_and_data( # type: ignore[assignment] content, columns, dtype ) - return content, columns + # error: Incompatible return value type (got "Tuple[ndarray, Index]", expected + # "Tuple[List[ExtensionArray], Index]") + # error: Incompatible return value type (got "Tuple[ndarray, Index]", expected + # "Tuple[List[ndarray], Index]") + return content, columns # type: ignore[return-value] def _list_to_arrays(data: List[Union[Tuple, List]]) -> np.ndarray: diff --git a/pandas/core/tools/timedeltas.py b/pandas/core/tools/timedeltas.py index a335146265523..a8378e91f9375 100644 --- a/pandas/core/tools/timedeltas.py +++ b/pandas/core/tools/timedeltas.py @@ -181,5 +181,7 @@ def _convert_listlike(arg, unit=None, errors="raise", name=None): from pandas import TimedeltaIndex - value = TimedeltaIndex(value, unit="ns", name=name) + # error: Incompatible types in assignment (expression has type "TimedeltaIndex", + # variable has type "ndarray") + value = TimedeltaIndex(value, unit="ns", name=name) # type: ignore[assignment] return value From dd38e441b52ccd8ab9c49abf1079ee9a69816bc3 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Mon, 1 Mar 2021 22:23:13 +0000 Subject: [PATCH 76/86] update comments --- pandas/core/arrays/datetimelike.py | 8 +++++++- pandas/core/dtypes/concat.py | 6 +++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/pandas/core/arrays/datetimelike.py b/pandas/core/arrays/datetimelike.py index 415307a1b47ff..c2ac7517ecba3 100644 --- a/pandas/core/arrays/datetimelike.py +++ b/pandas/core/arrays/datetimelike.py @@ -1233,7 +1233,13 @@ def _addsub_object_array(self, other: np.ndarray, op): res_values = op(self.astype("O"), np.asarray(other)) result = pd_array(res_values.ravel()) - result = extract_array(result, extract_numpy=True).reshape(self.shape) + # error: Item "ExtensionArray" of "Union[Any, ExtensionArray]" has no attribute + # "reshape" + result = extract_array( + result, extract_numpy=True + ).reshape( # type: ignore[union-attr] + self.shape + ) return result def _time_shift(self, periods, freq=None): diff --git a/pandas/core/dtypes/concat.py b/pandas/core/dtypes/concat.py index a9bea6da7ddc2..06fc1918b5ecf 100644 --- a/pandas/core/dtypes/concat.py +++ b/pandas/core/dtypes/concat.py @@ -71,7 +71,11 @@ def _cast_to_common_type(arr: ArrayLike, dtype: DtypeObj) -> ArrayLike: if is_extension_array_dtype(dtype) and isinstance(arr, np.ndarray): # numpy's astype cannot handle ExtensionDtypes return pd_array(arr, dtype=dtype, copy=False) - return arr.astype(dtype, copy=False) + # error: Argument 1 to "astype" of "_ArrayOrScalarCommon" has incompatible type + # "Union[dtype[Any], ExtensionDtype]"; expected "Union[dtype[Any], None, type, + # _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], + # List[Any], _DTypeDict, Tuple[Any, Any]]]" + return arr.astype(dtype, copy=False) # type: ignore[arg-type] def concat_compat(to_concat, axis: int = 0, ea_compat_axis: bool = False): From 09f07f1246138ca87d4a3752266243ca4ac2f95b Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Wed, 3 Mar 2021 09:29:20 +0000 Subject: [PATCH 77/86] fix merge error --- pandas/core/frame.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/pandas/core/frame.py b/pandas/core/frame.py index 411a467cff4f0..a20f15168c4c7 100644 --- a/pandas/core/frame.py +++ b/pandas/core/frame.py @@ -636,16 +636,6 @@ def __init__( index, # type: ignore[arg-type] dtype, # type: ignore[arg-type] ) - # error: Argument "dtype" to "arrays_to_mgr" has incompatible type - # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; - # expected "Union[dtype[Any], ExtensionDtype, None]" - mgr = arrays_to_mgr( - arrays, - columns, - index, - columns, - dtype=dtype, # type: ignore[arg-type] - ) mgr = arrays_to_mgr( arrays, columns, index, columns, dtype=dtype, typ=manager ) From e938438a1a11e075866056b7ffd22f541a092a41 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Wed, 3 Mar 2021 10:00:43 +0000 Subject: [PATCH 78/86] update comments --- pandas/core/dtypes/cast.py | 76 +++++++++++++++++------- pandas/core/frame.py | 102 ++++++++++++++++++++++++++++---- pandas/core/internals/blocks.py | 6 +- pandas/core/tools/datetimes.py | 4 +- 4 files changed, 155 insertions(+), 33 deletions(-) diff --git a/pandas/core/dtypes/cast.py b/pandas/core/dtypes/cast.py index 89eabb963028d..550273f161fa1 100644 --- a/pandas/core/dtypes/cast.py +++ b/pandas/core/dtypes/cast.py @@ -1328,7 +1328,11 @@ def astype_array(values: ArrayLike, dtype: DtypeObj, copy: bool = False) -> Arra raise TypeError(msg) if is_datetime64tz_dtype(dtype) and is_datetime64_dtype(values.dtype): - return astype_dt64_to_dt64tz(values, dtype, copy, via_utc=True) + # error: Incompatible return value type (got "DatetimeArray", expected + # "ndarray") + return astype_dt64_to_dt64tz( # type: ignore[return-value] + values, dtype, copy, via_utc=True + ) if is_dtype_equal(values.dtype, dtype): if copy: @@ -1339,11 +1343,19 @@ def astype_array(values: ArrayLike, dtype: DtypeObj, copy: bool = False) -> Arra values = values.astype(dtype, copy=copy) else: - values = astype_nansafe(values, dtype, copy=copy) + # error: Incompatible types in assignment (expression has type "ExtensionArray", + # variable has type "ndarray") + # error: Argument 1 to "astype_nansafe" has incompatible type "ExtensionArray"; + # expected "ndarray" + values = astype_nansafe( # type: ignore[assignment] + values, dtype, copy=copy # type: ignore[arg-type] + ) # in pandas we don't store numpy str dtypes, so convert to object if isinstance(dtype, np.dtype) and issubclass(values.dtype.type, str): - values = np.array(values, dtype=object) + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "ExtensionArray") + values = np.array(values, dtype=object) # type: ignore[assignment] return values @@ -1770,8 +1782,13 @@ def maybe_cast_to_datetime( dta = sequence_to_datetimes(value, allow_object=False) # GH 25843: Remove tz information since the dtype # didn't specify one - if dta.tz is not None: - dta = dta.tz_localize(None) + + # error: Item "ndarray" of "Union[ndarray, DatetimeArray]" + # has no attribute "tz" + if dta.tz is not None: # type: ignore[union-attr] + # error: Item "ndarray" of "Union[ndarray, + # DatetimeArray]" has no attribute "tz_localize" + dta = dta.tz_localize(None) # type: ignore[union-attr] value = dta elif is_datetime64tz: # The string check can be removed once issue #13712 @@ -1780,31 +1797,48 @@ def maybe_cast_to_datetime( # be localized to the timezone. is_dt_string = is_string_dtype(value.dtype) dta = sequence_to_datetimes(value, allow_object=False) - if dta.tz is not None: - value = dta.astype(dtype, copy=False) + # error: Item "ndarray" of "Union[ndarray, DatetimeArray]" + # has no attribute "tz" + if dta.tz is not None: # type: ignore[union-attr] + # error: Argument 1 to "astype" of + # "_ArrayOrScalarCommon" has incompatible type + # "Union[dtype[Any], ExtensionDtype, None]"; expected + # "Union[dtype[Any], None, type, _SupportsDType, str, + # Union[Tuple[Any, int], Tuple[Any, Union[int, + # Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, + # Any]]]" + value = dta.astype( + dtype, copy=False # type: ignore[arg-type] + ) elif is_dt_string: # Strings here are naive, so directly localize - # error: Item "dtype[Any]" of "Union[dtype[Any], - # ExtensionDtype, None]" has no attribute "tz" - # error: Item "ExtensionDtype" of "Union[dtype[Any], - # ExtensionDtype, None]" has no attribute "tz" - # error: Item "None" of "Union[dtype[Any], - # ExtensionDtype, None]" has no attribute "tz" - value = dta.tz_localize( + # error: Item "ndarray" of "Union[ndarray, + # DatetimeArray]" has no attribute "tz_localize" + value = dta.tz_localize( # type: ignore[union-attr] + # error: Item "dtype[Any]" of "Union[dtype[Any], + # ExtensionDtype, None]" has no attribute "tz" + # error: Item "ExtensionDtype" of "Union[dtype[Any], + # ExtensionDtype, None]" has no attribute "tz" + # error: Item "None" of "Union[dtype[Any], + # ExtensionDtype, None]" has no attribute "tz" dtype.tz # type: ignore[union-attr] ) else: # Numeric values are UTC at this point, # so localize and convert - # error: Item "dtype[Any]" of "Union[dtype[Any], - # ExtensionDtype, None]" has no attribute "tz" - # error: Item "ExtensionDtype" of "Union[dtype[Any], - # ExtensionDtype, None]" has no attribute "tz" - # error: Item "None" of "Union[dtype[Any], - # ExtensionDtype, None]" has no attribute "tz" - value = dta.tz_localize("UTC").tz_convert( + # error: Item "ndarray" of "Union[ndarray, + # DatetimeArray]" has no attribute "tz_localize" + value = dta.tz_localize( # type: ignore[union-attr] + "UTC" + ).tz_convert( + # error: Item "dtype[Any]" of "Union[dtype[Any], + # ExtensionDtype, None]" has no attribute "tz" + # error: Item "ExtensionDtype" of "Union[dtype[Any], + # ExtensionDtype, None]" has no attribute "tz" + # error: Item "None" of "Union[dtype[Any], + # ExtensionDtype, None]" has no attribute "tz" dtype.tz # type: ignore[union-attr] ) elif is_timedelta64: diff --git a/pandas/core/frame.py b/pandas/core/frame.py index a20f15168c4c7..98e2973c6db63 100644 --- a/pandas/core/frame.py +++ b/pandas/core/frame.py @@ -584,33 +584,84 @@ def __init__( ) elif isinstance(data, dict): - mgr = dict_to_mgr(data, index, columns, dtype=dtype, typ=manager) + # error: Argument "dtype" to "dict_to_mgr" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; expected + # "Union[dtype[Any], ExtensionDtype, None]" + mgr = dict_to_mgr( + data, index, columns, dtype=dtype, typ=manager # type: ignore[arg-type] + ) elif isinstance(data, ma.MaskedArray): import numpy.ma.mrecords as mrecords # masked recarray if isinstance(data, mrecords.MaskedRecords): - mgr = rec_array_to_mgr(data, index, columns, dtype, copy, typ=manager) + # error: Argument 4 to "rec_array_to_mgr" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; expected + # "Union[dtype[Any], ExtensionDtype, None]" + mgr = rec_array_to_mgr( + data, + index, + columns, + dtype, # type: ignore[arg-type] + copy, + typ=manager, + ) # a masked array else: data = sanitize_masked_array(data) mgr = ndarray_to_mgr( - data, index, columns, dtype=dtype, copy=copy, typ=manager + # error: Argument "dtype" to "ndarray_to_mgr" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; + # expected "Union[dtype[Any], ExtensionDtype, None]" + data, + index, + columns, + dtype=dtype, # type: ignore[arg-type] + copy=copy, + typ=manager, ) elif isinstance(data, (np.ndarray, Series, Index)): if data.dtype.names: # i.e. numpy structured array - mgr = rec_array_to_mgr(data, index, columns, dtype, copy, typ=manager) + + # error: Argument 4 to "rec_array_to_mgr" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; expected + # "Union[dtype[Any], ExtensionDtype, None]" + mgr = rec_array_to_mgr( + data, + index, + columns, + dtype, # type: ignore[arg-type] + copy, + typ=manager, + ) elif getattr(data, "name", None) is not None: # i.e. Series/Index with non-None name mgr = dict_to_mgr( - {data.name: data}, index, columns, dtype=dtype, typ=manager + # error: Item "ndarray" of "Union[ndarray, Series, Index]" has no + # attribute "name" + # error: Argument "dtype" to "dict_to_mgr" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; + # expected "Union[dtype[Any], ExtensionDtype, None]" + {data.name: data}, # type: ignore[union-attr] + index, + columns, + dtype=dtype, # type: ignore[arg-type] + typ=manager, ) else: mgr = ndarray_to_mgr( - data, index, columns, dtype=dtype, copy=copy, typ=manager + # error: Argument "dtype" to "ndarray_to_mgr" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; + # expected "Union[dtype[Any], ExtensionDtype, None]" + data, + index, + columns, + dtype=dtype, # type: ignore[arg-type] + copy=copy, + typ=manager, ) # For data is list-like, or Iterable (will consume into list) @@ -637,14 +688,39 @@ def __init__( dtype, # type: ignore[arg-type] ) mgr = arrays_to_mgr( - arrays, columns, index, columns, dtype=dtype, typ=manager + # error: Argument "dtype" to "arrays_to_mgr" has incompatible + # type "Union[ExtensionDtype, str, dtype[Any], Type[object], + # None]"; expected "Union[dtype[Any], ExtensionDtype, None]" + arrays, + columns, + index, + columns, + dtype=dtype, # type: ignore[arg-type] + typ=manager, ) else: mgr = ndarray_to_mgr( - data, index, columns, dtype=dtype, copy=copy, typ=manager + # error: Argument "dtype" to "ndarray_to_mgr" has incompatible + # type "Union[ExtensionDtype, str, dtype[Any], Type[object], + # None]"; expected "Union[dtype[Any], ExtensionDtype, None]" + data, + index, + columns, + dtype=dtype, # type: ignore[arg-type] + copy=copy, + typ=manager, ) else: - mgr = dict_to_mgr({}, index, columns, dtype=dtype, typ=manager) + # error: Argument "dtype" to "dict_to_mgr" has incompatible type + # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; expected + # "Union[dtype[Any], ExtensionDtype, None]" + mgr = dict_to_mgr( + {}, + index, + columns, + dtype=dtype, # type: ignore[arg-type] + typ=manager, + ) # For data is scalar else: if index is None or columns is None: @@ -684,7 +760,13 @@ def __init__( ) mgr = ndarray_to_mgr( - values, index, columns, dtype=values.dtype, copy=False, typ=manager + # error: "List[ExtensionArray]" has no attribute "dtype" + values, + index, + columns, + dtype=values.dtype, # type: ignore[attr-defined] + copy=False, + typ=manager, ) # ensure correct Manager type according to settings diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index 38377ff4769bb..b1a94165531a9 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -661,7 +661,11 @@ def astype(self, dtype, copy: bool = False, errors: str = "raise"): if values.dtype.kind in ["m", "M"]: values = self.array_values() - new_values = astype_array_safe(values, dtype, copy=copy, errors=errors) + # error: Value of type variable "ArrayLike" of "astype_array_safe" cannot be + # "Union[ndarray, ExtensionArray]" + new_values = astype_array_safe( + values, dtype, copy=copy, errors=errors # type: ignore[type-var] + ) newb = self.make_block(new_values) if newb.shape != self.shape: diff --git a/pandas/core/tools/datetimes.py b/pandas/core/tools/datetimes.py index d3f7eef18c00e..100eac064e361 100644 --- a/pandas/core/tools/datetimes.py +++ b/pandas/core/tools/datetimes.py @@ -431,7 +431,9 @@ def _convert_listlike_datetimes( require_iso8601 = not infer_datetime_format format = None - result = None + # error: Incompatible types in assignment (expression has type "None", variable has + # type "ExtensionArray") + result = None # type: ignore[assignment] if format is not None: try: From 50f64fa46b523be858c4ff9bee75a7f939f90689 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Thu, 4 Mar 2021 10:29:19 +0000 Subject: [PATCH 79/86] update comments --- pandas/core/dtypes/cast.py | 31 ++++++++++++++++++++---- pandas/core/indexes/base.py | 23 ++++++++++++++---- pandas/core/internals/construction.py | 7 +++++- pandas/core/tools/datetimes.py | 34 +++++++++++++++++---------- pandas/core/window/ewm.py | 20 +++++++++++++--- 5 files changed, 90 insertions(+), 25 deletions(-) diff --git a/pandas/core/dtypes/cast.py b/pandas/core/dtypes/cast.py index 14c4dd209bbcc..8bae91a3e6eed 100644 --- a/pandas/core/dtypes/cast.py +++ b/pandas/core/dtypes/cast.py @@ -1626,7 +1626,12 @@ def try_datetime(v: np.ndarray) -> ArrayLike: # GH#19761 we may have mixed timezones, in which cast 'dta' is # an ndarray[object]. Only 1 test # relies on this behavior, see GH#40111 - return dta.reshape(shape) + + # error: Incompatible return value type (got "Union[ndarray, + # DatetimeArray]", expected "ExtensionArray") + # error: Incompatible return value type (got "Union[ndarray, + # DatetimeArray]", expected "ndarray") + return dta.reshape(shape) # type: ignore[return-value] def try_timedelta(v: np.ndarray) -> np.ndarray: # safe coerce to timedelta64 @@ -1709,10 +1714,16 @@ def maybe_cast_to_datetime( dta = sequence_to_datetimes(value, allow_object=False) # GH 25843: Remove tz information since the dtype # didn't specify one - if dta.tz is not None: + + # error: Item "ndarray" of "Union[ndarray, DatetimeArray]" + # has no attribute "tz" + if dta.tz is not None: # type: ignore[union-attr] # equiv: dta.view(dtype) # Note: NOT equivalent to dta.astype(dtype) - dta = dta.tz_localize(None) + + # error: Item "ndarray" of "Union[ndarray, + # DatetimeArray]" has no attribute "tz_localize" + dta = dta.tz_localize(None) # type: ignore[union-attr] value = dta elif is_datetime64tz: dtype = cast(DatetimeTZDtype, dtype) @@ -1738,12 +1749,22 @@ def maybe_cast_to_datetime( elif is_dt_string: # Strings here are naive, so directly localize # equiv: dta.astype(dtype) # though deprecated - value = dta.tz_localize(dtype.tz) + + # error: Item "ndarray" of "Union[ndarray, + # DatetimeArray]" has no attribute "tz_localize" + value = dta.tz_localize( # type: ignore[union-attr] + dtype.tz + ) else: # Numeric values are UTC at this point, # so localize and convert # equiv: Series(dta).astype(dtype) # though deprecated - value = dta.tz_localize("UTC").tz_convert(dtype.tz) + + # error: Item "ndarray" of "Union[ndarray, + # DatetimeArray]" has no attribute "tz_localize" + value = dta.tz_localize( # type: ignore[union-attr] + "UTC" + ).tz_convert(dtype.tz) elif is_timedelta64: # if successful, we get a ndarray[td64ns] value, _ = sequence_to_td64ns(value) diff --git a/pandas/core/indexes/base.py b/pandas/core/indexes/base.py index 896dc6dfe5c28..cced970ebf20f 100644 --- a/pandas/core/indexes/base.py +++ b/pandas/core/indexes/base.py @@ -2966,7 +2966,11 @@ def _union(self, other: Index, sort): ): # Both are unique and monotonic, so can use outer join try: - return self._outer_indexer(lvals, rvals)[0] + # error: Argument 1 to "_outer_indexer" of "Index" has incompatible type + # "Union[ExtensionArray, ndarray]"; expected "ndarray" + # error: Argument 2 to "_outer_indexer" of "Index" has incompatible type + # "Union[ExtensionArray, ndarray]"; expected "ndarray" + return self._outer_indexer(lvals, rvals)[0] # type: ignore[arg-type] except (TypeError, IncompatibleFrequency): # incomparable objects value_list = list(lvals) @@ -2978,7 +2982,12 @@ def _union(self, other: Index, sort): elif not other.is_unique and not self.is_unique: # self and other both have duplicates - result = algos.union_with_duplicates(lvals, rvals) + + # error: Argument 1 to "union_with_duplicates" has incompatible type + # "Union[ExtensionArray, ndarray]"; expected "ndarray" + # error: Argument 2 to "union_with_duplicates" has incompatible type + # "Union[ExtensionArray, ndarray]"; expected "ndarray" + result = algos.union_with_duplicates(lvals, rvals) # type: ignore[arg-type] return _maybe_try_sort(result, sort) # Either other or self is not unique @@ -2990,10 +2999,16 @@ def _union(self, other: Index, sort): missing = algos.unique1d(self.get_indexer_non_unique(other)[1]) if len(missing) > 0: - other_diff = algos.take_nd(rvals, missing, allow_fill=False) + # error: Value of type variable "ArrayLike" of "take_nd" cannot be + # "Union[ExtensionArray, ndarray]" + other_diff = algos.take_nd( + rvals, missing, allow_fill=False # type: ignore[type-var] + ) result = concat_compat((lvals, other_diff)) else: - result = lvals + # error: Incompatible types in assignment (expression has type + # "Union[ExtensionArray, ndarray]", variable has type "ndarray") + result = lvals # type: ignore[assignment] if not self.is_monotonic or not other.is_monotonic: result = _maybe_try_sort(result, sort) diff --git a/pandas/core/internals/construction.py b/pandas/core/internals/construction.py index db32d63c3677a..f479db0682872 100644 --- a/pandas/core/internals/construction.py +++ b/pandas/core/internals/construction.py @@ -164,7 +164,12 @@ def rec_array_to_mgr( new_arrays = arrays # type: ignore[assignment] # create the manager - arrays, arr_columns = reorder_arrays(new_arrays, arr_columns, columns) + + # error: Argument 1 to "reorder_arrays" has incompatible type "List[ndarray]"; + # expected "List[ExtensionArray]" + arrays, arr_columns = reorder_arrays( + new_arrays, arr_columns, columns # type: ignore[arg-type] + ) if columns is None: columns = arr_columns diff --git a/pandas/core/tools/datetimes.py b/pandas/core/tools/datetimes.py index 1640ab3fd77dc..f7bb3083b91a9 100644 --- a/pandas/core/tools/datetimes.py +++ b/pandas/core/tools/datetimes.py @@ -361,11 +361,7 @@ def _convert_listlike_datetimes( arg, _ = maybe_convert_dtype(arg, copy=False) except TypeError: if errors == "coerce": - # error: Incompatible types in assignment (expression has type "ndarray", - # variable has type "ExtensionArray") - result = np.array( # type: ignore[assignment] - ["NaT"], dtype="datetime64[ns]" - ).repeat(len(arg)) + result = np.array(["NaT"], dtype="datetime64[ns]").repeat(len(arg)) return DatetimeIndex(result, name=name) elif errors == "ignore": # error: Incompatible types in assignment (expression has type @@ -395,7 +391,9 @@ def _convert_listlike_datetimes( result = None # type: ignore[assignment] if format is not None: - result = _to_datetime_with_format( + # error: Incompatible types in assignment (expression has type + # "Optional[Index]", variable has type "ndarray") + result = _to_datetime_with_format( # type: ignore[assignment] arg, orig_arg, name, tz, format, exact, errors, infer_datetime_format ) if result is not None: @@ -504,7 +502,9 @@ def _to_datetime_with_format( # fallback if result is None: - result = _array_strptime_with_fallback( + # error: Incompatible types in assignment (expression has type + # "Optional[Index]", variable has type "Optional[ndarray]") + result = _array_strptime_with_fallback( # type: ignore[assignment] arg, name, tz, fmt, exact, errors, infer_datetime_format ) if result is not None: @@ -520,7 +520,9 @@ def _to_datetime_with_format( except (ValueError, TypeError): raise e - return result + # error: Incompatible return value type (got "Optional[ndarray]", expected + # "Optional[Index]") + return result # type: ignore[return-value] def _to_datetime_with_unit(arg, unit, name, tz, errors: Optional[str]) -> Index: @@ -539,12 +541,18 @@ def _to_datetime_with_unit(arg, unit, name, tz, errors: Optional[str]) -> Index: if errors == "ignore": # Index constructor _may_ infer to DatetimeIndex - result = Index(result, name=name) + + # error: Incompatible types in assignment (expression has type "Index", variable + # has type "ExtensionArray") + result = Index(result, name=name) # type: ignore[assignment] else: - result = DatetimeIndex(result, name=name) + # error: Incompatible types in assignment (expression has type "DatetimeIndex", + # variable has type "ExtensionArray") + result = DatetimeIndex(result, name=name) # type: ignore[assignment] if not isinstance(result, DatetimeIndex): - return result + # error: Incompatible return value type (got "ExtensionArray", expected "Index") + return result # type: ignore[return-value] # GH#23758: We may still need to localize the result with tz # GH#25546: Apply tz_parsed first (from arg), then tz (from caller) @@ -1073,7 +1081,9 @@ def calc_with_mask(carg, mask): # string with NaN-like try: - mask = ~algorithms.isin(arg, list(nat_strings)) + # error: Value of type variable "AnyArrayLike" of "isin" cannot be + # "Iterable[Any]" + mask = ~algorithms.isin(arg, list(nat_strings)) # type: ignore[type-var] return calc_with_mask(arg, mask) except (ValueError, OverflowError, TypeError): pass diff --git a/pandas/core/window/ewm.py b/pandas/core/window/ewm.py index 5a71db82f26e4..3f14c8fdb3d8e 100644 --- a/pandas/core/window/ewm.py +++ b/pandas/core/window/ewm.py @@ -259,7 +259,9 @@ def __init__( self.times = self._selected_obj[times] if not is_datetime64_ns_dtype(self.times): raise ValueError("times must be datetime64[ns] dtype.") - if len(self.times) != len(obj): + # error: Argument 1 to "len" has incompatible type "Union[str, ndarray, + # FrameOrSeries, None]"; expected "Sized" + if len(self.times) != len(obj): # type: ignore[arg-type] raise ValueError("times must be the same length as the object.") if not isinstance(halflife, (str, datetime.timedelta)): raise ValueError( @@ -267,7 +269,13 @@ def __init__( ) if isna(self.times).any(): raise ValueError("Cannot convert NaT values to integer") - _times = np.asarray(self.times.view(np.int64), dtype=np.float64) + # error: Item "str" of "Union[str, ndarray, FrameOrSeries, None]" has no + # attribute "view" + # error: Item "None" of "Union[str, ndarray, FrameOrSeries, None]" has no + # attribute "view" + _times = np.asarray( + self.times.view(np.int64), dtype=np.float64 # type: ignore[union-attr] + ) _halflife = float(Timedelta(self.halflife).value) self._deltas = np.diff(_times) / _halflife # Halflife is no longer applicable when calculating COM @@ -287,7 +295,13 @@ def __init__( # Without times, points are equally spaced self._deltas = np.ones(max(len(self.obj) - 1, 0), dtype=np.float64) self._com = get_center_of_mass( - self.com, self.span, self.halflife, self.alpha + # error: Argument 3 to "get_center_of_mass" has incompatible type + # "Union[float, Any, None, timedelta64, signedinteger[_64Bit]]"; + # expected "Optional[float]" + self.com, + self.span, + self.halflife, # type: ignore[arg-type] + self.alpha, ) def _get_window_indexer(self) -> BaseIndexer: From 478aeee094fe71169f579bc6e562dabbbc95bdc6 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Fri, 5 Mar 2021 18:08:12 +0000 Subject: [PATCH 80/86] update comments --- pandas/core/array_algos/quantile.py | 18 +++++++++++---- pandas/core/arrays/interval.py | 8 +++++-- pandas/core/dtypes/cast.py | 8 +++++-- pandas/core/groupby/generic.py | 8 ++++++- pandas/core/internals/array_manager.py | 32 ++++++++++++++++++-------- pandas/core/internals/blocks.py | 14 ++++++++--- pandas/core/nanops.py | 7 +++--- pandas/core/window/rolling.py | 6 ++++- 8 files changed, 75 insertions(+), 26 deletions(-) diff --git a/pandas/core/array_algos/quantile.py b/pandas/core/array_algos/quantile.py index 802fc4db0a36d..501d3308b7d8b 100644 --- a/pandas/core/array_algos/quantile.py +++ b/pandas/core/array_algos/quantile.py @@ -143,10 +143,17 @@ def quantile_ea_compat( mask = np.asarray(values.isna()) mask = np.atleast_2d(mask) - values, fill_value = values._values_for_factorize() - values = np.atleast_2d(values) - - result = quantile_with_mask(values, mask, fill_value, qs, interpolation, axis) + # error: Incompatible types in assignment (expression has type "ndarray", variable + # has type "ExtensionArray") + values, fill_value = values._values_for_factorize() # type: ignore[assignment] + # error: No overload variant of "atleast_2d" matches argument type "ExtensionArray" + values = np.atleast_2d(values) # type: ignore[call-overload] + + # error: Argument 1 to "quantile_with_mask" has incompatible type "ExtensionArray"; + # expected "ndarray" + result = quantile_with_mask( + values, mask, fill_value, qs, interpolation, axis # type: ignore[arg-type] + ) if not is_sparse(orig.dtype): # shape[0] should be 1 as long as EAs are 1D @@ -160,4 +167,5 @@ def quantile_ea_compat( assert result.shape == (1, len(qs)), result.shape result = type(orig)._from_factorized(result[0], orig) - return result + # error: Incompatible return value type (got "ndarray", expected "ExtensionArray") + return result # type: ignore[return-value] diff --git a/pandas/core/arrays/interval.py b/pandas/core/arrays/interval.py index 526b081760371..7ccdad11761ab 100644 --- a/pandas/core/arrays/interval.py +++ b/pandas/core/arrays/interval.py @@ -1618,7 +1618,10 @@ def _maybe_convert_platform_interval(values) -> ArrayLike: # GH 19016 # empty lists/tuples get object dtype by default, but this is # prohibited for IntervalArray, so coerce to integer instead - return np.array([], dtype=np.int64) + + # error: Incompatible return value type (got "ndarray", expected + # "ExtensionArray") + return np.array([], dtype=np.int64) # type: ignore[return-value] elif not is_list_like(values) or isinstance(values, ABCDataFrame): # This will raise later, but we avoid passing to maybe_convert_platform return values @@ -1630,4 +1633,5 @@ def _maybe_convert_platform_interval(values) -> ArrayLike: else: values = extract_array(values, extract_numpy=True) - return maybe_convert_platform(values) + # error: Incompatible return value type (got "ExtensionArray", expected "ndarray") + return maybe_convert_platform(values) # type: ignore[return-value] diff --git a/pandas/core/dtypes/cast.py b/pandas/core/dtypes/cast.py index bb55a4ee2f0f4..c5d672b207369 100644 --- a/pandas/core/dtypes/cast.py +++ b/pandas/core/dtypes/cast.py @@ -128,12 +128,16 @@ def maybe_convert_platform( else: # The caller is responsible for ensuring that we have np.ndarray # or ExtensionArray here. - arr = values + + # error: Incompatible types in assignment (expression has type "Union[ndarray, + # ExtensionArray]", variable has type "ndarray") + arr = values # type: ignore[assignment] if arr.dtype == object: arr = lib.maybe_convert_objects(arr) - return arr + # error: Incompatible return value type (got "ndarray", expected "ExtensionArray") + return arr # type: ignore[return-value] def is_nested_object(obj) -> bool: diff --git a/pandas/core/groupby/generic.py b/pandas/core/groupby/generic.py index abf1eecfa0952..f02e13e2853c1 100644 --- a/pandas/core/groupby/generic.py +++ b/pandas/core/groupby/generic.py @@ -1187,7 +1187,13 @@ def py_fallback(values: ArrayLike) -> ArrayLike: else: # We are a single block from a BlockManager # or one array from SingleArrayManager - return arrays[0] + + # error: Incompatible return value type (got "Union[ndarray, + # ExtensionArray, ArrayLike]", expected "ExtensionArray") + # error: Incompatible return value type (got "Union[ndarray, + # ExtensionArray, ArrayLike]", expected + # "ndarray") + return arrays[0] # type: ignore[return-value] def array_func(values: ArrayLike) -> ArrayLike: diff --git a/pandas/core/internals/array_manager.py b/pandas/core/internals/array_manager.py index 684f879840602..660a147ce2aa6 100644 --- a/pandas/core/internals/array_manager.py +++ b/pandas/core/internals/array_manager.py @@ -470,8 +470,8 @@ def apply_with_block(self: T, f, align_keys=None, **kwargs) -> T: # DatetimeArray needs to be converted to ndarray for DatetimeBlock arr = arr._data # type: ignore[union-attr] elif arr.dtype.kind == "m" and not isinstance(arr, np.ndarray): - # TimedeltaArray needs to be converted to ndarray for TimedeltaBlock - arr = arr._data # type: ignore[union-attr] + # error: "ExtensionArray" has no attribute "_data" + arr = arr._data # type: ignore[attr-defined] if self.ndim == 2: if isinstance(arr, np.ndarray): @@ -505,14 +505,24 @@ def quantile( for x in self.arrays ] assert axis == 1 - new_arrs = [quantile_compat(x, qs, interpolation, axis=axis) for x in arrs] + # error: Value of type variable "ArrayLike" of "quantile_compat" cannot be + # "object" + new_arrs = [ + quantile_compat(x, qs, interpolation, axis=axis) # type: ignore[type-var] + for x in arrs + ] for i, arr in enumerate(new_arrs): - if arr.ndim == 2: - assert arr.shape[0] == 1, arr.shape - new_arrs[i] = arr[0] + # error: "object" has no attribute "ndim" + if arr.ndim == 2: # type: ignore[attr-defined] + # error: "object" has no attribute "shape" + assert arr.shape[0] == 1, arr.shape # type: ignore[attr-defined] + # error: Value of type "object" is not indexable + new_arrs[i] = arr[0] # type: ignore[index] axes = [qs, self._axes[1]] - return type(self)(new_arrs, axes) + # error: Argument 1 to "ArrayManager" has incompatible type "List[object]"; + # expected "List[Union[ndarray, ExtensionArray]]" + return type(self)(new_arrs, axes) # type: ignore[arg-type] def isna(self, func) -> ArrayManager: return self.apply("apply", func=func) @@ -1014,7 +1024,9 @@ def _reindex_indexer( else: validate_indices(indexer, len(self._axes[0])) new_arrays = [ - take_nd( + # error: Value of type variable "ArrayLike" of "take_nd" cannot be + # "Union[ndarray, ExtensionArray]" + take_nd( # type: ignore[type-var] arr, indexer, allow_fill=True, @@ -1102,7 +1114,9 @@ def unstack(self, unstacker, fill_value) -> ArrayManager: new_arrays = [] for arr in self.arrays: for i in range(unstacker.full_shape[1]): - new_arr = take_nd( + # error: Value of type variable "ArrayLike" of "take_nd" cannot be + # "Union[ndarray, ExtensionArray]" + new_arr = take_nd( # type: ignore[type-var] arr, new_indexer2D[:, i], allow_fill=True, fill_value=fill_value ) new_arrays.append(new_arr) diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index 25016db43f421..efbfc4d771f0c 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -1027,7 +1027,9 @@ def setitem(self, indexer, value): # length checking check_setitem_lengths(indexer, value, values) - exact_match = is_exact_shape_match(values, arr_value) + # error: Value of type variable "ArrayLike" of "is_exact_shape_match" cannot be + # "Union[Any, ndarray, ExtensionArray]" + exact_match = is_exact_shape_match(values, arr_value) # type: ignore[type-var] if is_empty_indexer(indexer, arr_value): # GH#8669 empty indexers @@ -1513,7 +1515,11 @@ def quantile( assert axis == 1 # only ever called this way assert is_list_like(qs) # caller is responsible for this - result = quantile_compat(self.values, qs, interpolation, axis) + # error: Value of type variable "ArrayLike" of "quantile_compat" cannot be + # "Union[ndarray, ExtensionArray]" + result = quantile_compat( # type: ignore[type-var] + self.values, qs, interpolation, axis + ) return new_block(result, placement=self.mgr_locs, ndim=2) @@ -1933,7 +1939,9 @@ def _can_hold_element(self, element: Any) -> bool: if isinstance(element, (IntegerArray, FloatingArray)): if element._mask.any(): return False - return can_hold_element(self.dtype, element) + # error: Argument 1 to "can_hold_element" has incompatible type + # "Union[dtype[Any], ExtensionDtype]"; expected "dtype[Any]" + return can_hold_element(self.dtype, element) # type: ignore[arg-type] @property def _can_hold_na(self): diff --git a/pandas/core/nanops.py b/pandas/core/nanops.py index 4e3e614f0d24c..bcc01093b7986 100644 --- a/pandas/core/nanops.py +++ b/pandas/core/nanops.py @@ -1343,7 +1343,9 @@ def nanprod( result = values.prod(axis) # error: Argument 1 to "_maybe_null_out" has incompatible type "Union[number, # ndarray]"; expected "ndarray" - return _maybe_null_out( + # error: Incompatible return value type (got "Union[ndarray, float]", expected + # "float") + return _maybe_null_out( # type: ignore[return-value] result, axis, mask, values.shape, min_count=min_count # type: ignore[arg-type] ) @@ -1466,8 +1468,7 @@ def _maybe_null_out( # "float", variable has type "ndarray") result = np.nan # type: ignore[assignment] - # error: Incompatible return value type (got "ndarray", expected "float") - return result # type: ignore[return-value] + return result def check_below_min_count( diff --git a/pandas/core/window/rolling.py b/pandas/core/window/rolling.py index 7d4c5c1f71f1e..92d88c65a6620 100644 --- a/pandas/core/window/rolling.py +++ b/pandas/core/window/rolling.py @@ -426,7 +426,11 @@ def hfunc(bvalues: ArrayLike) -> ArrayLike: return getattr(res_values, "T", res_values) def hfunc2d(values: ArrayLike) -> ArrayLike: - values = self._prep_values(values) + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "ExtensionArray") + # error: Argument 1 to "_prep_values" of "BaseWindow" has incompatible type + # "ExtensionArray"; expected "Optional[ndarray]" + values = self._prep_values(values) # type: ignore[assignment,arg-type] return homogeneous_func(values) if isinstance(mgr, ArrayManager) and self.axis == 1: From 567d1d03208220f35c66be634f4262881b54d09e Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Fri, 5 Mar 2021 18:30:50 +0000 Subject: [PATCH 81/86] update comments --- pandas/core/dtypes/missing.py | 42 ++++++++++++++++++++++++++++------- 1 file changed, 34 insertions(+), 8 deletions(-) diff --git a/pandas/core/dtypes/missing.py b/pandas/core/dtypes/missing.py index 4a13418ff9a1b..286272b165fb9 100644 --- a/pandas/core/dtypes/missing.py +++ b/pandas/core/dtypes/missing.py @@ -164,9 +164,13 @@ def _isna(obj, inf_as_na: bool = False): elif isinstance(obj, type): return False elif isinstance(obj, (np.ndarray, ABCExtensionArray)): - return _isna_array(obj, inf_as_na=inf_as_na) + # error: Value of type variable "ArrayLike" of "_isna_array" cannot be + # "Union[ndarray, ExtensionArray]" + return _isna_array(obj, inf_as_na=inf_as_na) # type: ignore[type-var] elif isinstance(obj, (ABCSeries, ABCIndex)): - result = _isna_array(obj._values, inf_as_na=inf_as_na) + # error: Value of type variable "ArrayLike" of "_isna_array" cannot be + # "Union[Any, ExtensionArray, ndarray]" + result = _isna_array(obj._values, inf_as_na=inf_as_na) # type: ignore[type-var] # box if isinstance(obj, ABCSeries): result = obj._constructor( @@ -234,19 +238,37 @@ def _isna_array(values: ArrayLike, inf_as_na: bool = False): if is_extension_array_dtype(dtype): if inf_as_na and is_categorical_dtype(dtype): - result = libmissing.isnaobj_old(values.to_numpy()) + # error: "ndarray" has no attribute "to_numpy" + result = libmissing.isnaobj_old( + values.to_numpy() # type: ignore[attr-defined] + ) else: - result = values.isna() + # error: "ndarray" has no attribute "isna" + result = values.isna() # type: ignore[attr-defined] elif is_string_dtype(dtype): - result = _isna_string_dtype(values, dtype, inf_as_na=inf_as_na) + # error: Argument 1 to "_isna_string_dtype" has incompatible type + # "ExtensionArray"; expected "ndarray" + # error: Argument 2 to "_isna_string_dtype" has incompatible type + # "ExtensionDtype"; expected "dtype[Any]" + result = _isna_string_dtype( + values, dtype, inf_as_na=inf_as_na # type: ignore[arg-type] + ) elif needs_i8_conversion(dtype): # this is the NaT pattern result = values.view("i8") == iNaT else: if inf_as_na: - result = ~np.isfinite(values) + # error: Argument 1 to "__call__" of "ufunc" has incompatible type + # "ExtensionArray"; expected "Union[Union[int, float, complex, str, bytes, + # generic], Sequence[Union[int, float, complex, str, bytes, generic]], + # Sequence[Sequence[Any]], _SupportsArray]" + result = ~np.isfinite(values) # type: ignore[arg-type] else: - result = np.isnan(values) + # error: Argument 1 to "__call__" of "ufunc" has incompatible type + # "ExtensionArray"; expected "Union[Union[int, float, complex, str, bytes, + # generic], Sequence[Union[int, float, complex, str, bytes, generic]], + # Sequence[Sequence[Any]], _SupportsArray]" + result = np.isnan(values) # type: ignore[arg-type] return result @@ -658,7 +680,11 @@ def isna_all(arr: ArrayLike) -> bool: checker = lambda x: np.asarray(x.view("i8")) == iNaT # type: ignore[assignment] else: - checker = lambda x: _isna_array(x, inf_as_na=INF_AS_NA) + # error: Incompatible types in assignment (expression has type "Callable[[Any], + # Any]", variable has type "ufunc") + checker = lambda x: _isna_array( # type: ignore[assignment] + x, inf_as_na=INF_AS_NA + ) return all( # error: Argument 1 to "__call__" of "ufunc" has incompatible type From 6521d91545a3c81e7e3527f61e43db48775e8b3e Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Sat, 6 Mar 2021 10:39:57 +0000 Subject: [PATCH 82/86] update comments --- pandas/core/array_algos/take.py | 9 ++++++++- pandas/core/arrays/string_arrow.py | 8 +++++++- pandas/core/internals/array_manager.py | 8 ++++++-- pandas/core/missing.py | 8 ++++++-- pandas/io/formats/style.py | 18 +++++++++++++++--- 5 files changed, 42 insertions(+), 9 deletions(-) diff --git a/pandas/core/array_algos/take.py b/pandas/core/array_algos/take.py index 08b2dbdcb6d7a..7eed31663f1cb 100644 --- a/pandas/core/array_algos/take.py +++ b/pandas/core/array_algos/take.py @@ -140,7 +140,14 @@ def take_1d( """ if not isinstance(arr, np.ndarray): # ExtensionArray -> dispatch to their method - return arr.take(indexer, fill_value=fill_value, allow_fill=allow_fill) + + # error: Argument 1 to "take" of "ExtensionArray" has incompatible type + # "ndarray"; expected "Sequence[int]" + return arr.take( + indexer, # type: ignore[arg-type] + fill_value=fill_value, + allow_fill=allow_fill, + ) indexer, dtype, fill_value, mask_info = _take_preprocess_indexer_and_fill_value( arr, indexer, 0, None, fill_value, allow_fill diff --git a/pandas/core/arrays/string_arrow.py b/pandas/core/arrays/string_arrow.py index dd7f3777c7138..efdc18cd071b5 100644 --- a/pandas/core/arrays/string_arrow.py +++ b/pandas/core/arrays/string_arrow.py @@ -405,7 +405,13 @@ def fillna(self, value=None, method=None, limit=None): if mask.any(): if method is not None: func = missing.get_fill_func(method) - new_values, _ = func(self.to_numpy(object), limit=limit, mask=mask) + # error: Argument 1 to "to_numpy" of "ArrowStringArray" has incompatible + # type "Type[object]"; expected "Union[str, dtype[Any], None]" + new_values, _ = func( + self.to_numpy(object), # type: ignore[arg-type] + limit=limit, + mask=mask, + ) new_values = self._from_sequence(new_values) else: # fill with value diff --git a/pandas/core/internals/array_manager.py b/pandas/core/internals/array_manager.py index 09d6435cff561..503bd72f017c3 100644 --- a/pandas/core/internals/array_manager.py +++ b/pandas/core/internals/array_manager.py @@ -1021,7 +1021,9 @@ def _reindex_indexer( else: validate_indices(indexer, len(self._axes[0])) new_arrays = [ - take_1d( + # error: Value of type variable "ArrayLike" of "take_1d" cannot be + # "Union[ndarray, ExtensionArray]" [type-var] + take_1d( # type: ignore[type-var] arr, indexer, allow_fill=True, @@ -1109,7 +1111,9 @@ def unstack(self, unstacker, fill_value) -> ArrayManager: new_arrays = [] for arr in self.arrays: for i in range(unstacker.full_shape[1]): - new_arr = take_1d( + # error: Value of type variable "ArrayLike" of "take_1d" cannot be + # "Union[ndarray, ExtensionArray]" [type-var] + new_arr = take_1d( # type: ignore[type-var] arr, new_indexer2D[:, i], allow_fill=True, fill_value=fill_value ) new_arrays.append(new_arr) diff --git a/pandas/core/missing.py b/pandas/core/missing.py index 515e970c21635..742ebd938cb60 100644 --- a/pandas/core/missing.py +++ b/pandas/core/missing.py @@ -723,7 +723,9 @@ def _pad_1d( ) -> tuple[np.ndarray, np.ndarray]: mask = _fillna_prep(values, mask) algos.pad_inplace(values, mask, limit=limit) - return values, mask + # error: Incompatible return value type (got "Tuple[ndarray, Optional[ndarray]]", + # expected "Tuple[ndarray, ndarray]") + return values, mask # type: ignore[return-value] @_datetimelike_compat @@ -734,7 +736,9 @@ def _backfill_1d( ) -> tuple[np.ndarray, np.ndarray]: mask = _fillna_prep(values, mask) algos.backfill_inplace(values, mask, limit=limit) - return values, mask + # error: Incompatible return value type (got "Tuple[ndarray, Optional[ndarray]]", + # expected "Tuple[ndarray, ndarray]") + return values, mask # type: ignore[return-value] @_datetimelike_compat diff --git a/pandas/io/formats/style.py b/pandas/io/formats/style.py index 619b71611674f..b3f48128ef6cf 100644 --- a/pandas/io/formats/style.py +++ b/pandas/io/formats/style.py @@ -1732,7 +1732,11 @@ def f(data: DataFrame, props: str) -> np.ndarray: if props is None: props = f"background-color: {null_color};" - return self.apply(f, axis=None, subset=subset, props=props) + # error: Argument 1 to "apply" of "Styler" has incompatible type + # "Callable[[DataFrame, str], ndarray]"; expected "Callable[..., Styler]" + return self.apply( + f, axis=None, subset=subset, props=props # type: ignore[arg-type] + ) def highlight_max( self, @@ -1775,7 +1779,11 @@ def f(data: FrameOrSeries, props: str) -> np.ndarray: if props is None: props = f"background-color: {color};" - return self.apply(f, axis=axis, subset=subset, props=props) + # error: Argument 1 to "apply" of "Styler" has incompatible type + # "Callable[[FrameOrSeries, str], ndarray]"; expected "Callable[..., Styler]" + return self.apply( + f, axis=axis, subset=subset, props=props # type: ignore[arg-type] + ) def highlight_min( self, @@ -1818,7 +1826,11 @@ def f(data: FrameOrSeries, props: str) -> np.ndarray: if props is None: props = f"background-color: {color};" - return self.apply(f, axis=axis, subset=subset, props=props) + # error: Argument 1 to "apply" of "Styler" has incompatible type + # "Callable[[FrameOrSeries, str], ndarray]"; expected "Callable[..., Styler]" + return self.apply( + f, axis=axis, subset=subset, props=props # type: ignore[arg-type] + ) @classmethod def from_custom_template(cls, searchpath, name): From 57bb406a7ee73578dd91a56bbbcea3a9595c61a2 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Mon, 8 Mar 2021 11:11:44 +0000 Subject: [PATCH 83/86] update comments --- pandas/core/generic.py | 4 +++- pandas/core/internals/array_manager.py | 5 ++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/pandas/core/generic.py b/pandas/core/generic.py index 7f5e634753c39..741771705797f 100644 --- a/pandas/core/generic.py +++ b/pandas/core/generic.py @@ -753,11 +753,13 @@ def swapaxes(self: FrameOrSeries, axis1, axis2, copy=True) -> FrameOrSeries: # ignore needed because of NDFrame constructor is different than # DataFrame/Series constructors. return self._constructor( + # error: Argument 1 to "NDFrame" has incompatible type "ndarray"; expected + # "Union[ArrayManager, BlockManager]" # error: Argument 2 to "NDFrame" has incompatible type "*Generator[Index, # None, None]"; expected "bool" [arg-type] # error: Argument 2 to "NDFrame" has incompatible type "*Generator[Index, # None, None]"; expected "Optional[Mapping[Optional[Hashable], Any]]" - new_values, + new_values, # type: ignore[arg-type] *new_axes, # type: ignore[arg-type] ).__finalize__(self, method="swapaxes") diff --git a/pandas/core/internals/array_manager.py b/pandas/core/internals/array_manager.py index 9c03710f1f6bc..7f88de0bbcfbf 100644 --- a/pandas/core/internals/array_manager.py +++ b/pandas/core/internals/array_manager.py @@ -476,9 +476,8 @@ def apply_with_block(self: T, f, align_keys=None, swap_axis=True, **kwargs) -> T elif arr.dtype.kind == "m" and not isinstance(arr, np.ndarray): # TimedeltaArray needs to be converted to ndarray for TimedeltaBlock - # error: Item "ExtensionArray" of "Union[Any, ExtensionArray]" has no - # attribute "_data" - arr = arr._data # type: ignore[union-attr] + # error: "ExtensionArray" has no attribute "_data" + arr = arr._data # type: ignore[attr-defined] if self.ndim == 2: if isinstance(arr, np.ndarray): From 2cfd5a71121ba143965165df558bb22878b0535a Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Mon, 8 Mar 2021 16:19:26 +0000 Subject: [PATCH 84/86] update comments --- pandas/core/internals/blocks.py | 13 ++++++++++--- pandas/core/window/rolling.py | 7 ++----- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index e4a740806bd2b..06c656b62efc8 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -1097,7 +1097,9 @@ def putmask(self, mask, new) -> List[Block]: List[Block] """ orig_mask = mask - mask, noop = validate_putmask(self.values.T, mask) + # error: Value of type variable "ArrayLike" of "validate_putmask" cannot be + # "Union[ndarray, ExtensionArray]" + mask, noop = validate_putmask(self.values.T, mask) # type: ignore[type-var] assert not isinstance(new, (ABCIndex, ABCSeries, ABCDataFrame)) # if we are passed a scalar None, convert it here @@ -1106,7 +1108,9 @@ def putmask(self, mask, new) -> List[Block]: if self._can_hold_element(new): - putmask_without_repeat(self.values.T, mask, new) + # error: Argument 1 to "putmask_without_repeat" has incompatible type + # "Union[ndarray, ExtensionArray]"; expected "ndarray" + putmask_without_repeat(self.values.T, mask, new) # type: ignore[arg-type] return [self] elif noop: @@ -1121,7 +1125,10 @@ def putmask(self, mask, new) -> List[Block]: elif self.ndim == 1 or self.shape[0] == 1: # no need to split columns - nv = putmask_smart(self.values.T, mask, new).T + + # error: Argument 1 to "putmask_smart" has incompatible type "Union[ndarray, + # ExtensionArray]"; expected "ndarray" + nv = putmask_smart(self.values.T, mask, new).T # type: ignore[arg-type] return [self.make_block(nv)] else: diff --git a/pandas/core/window/rolling.py b/pandas/core/window/rolling.py index c264ef05dcf0c..17d05e81b82bb 100644 --- a/pandas/core/window/rolling.py +++ b/pandas/core/window/rolling.py @@ -304,8 +304,7 @@ def _prep_values(self, values: ArrayLike) -> np.ndarray: """Convert input to numpy arrays for Cython routines""" if needs_i8_conversion(values.dtype): raise NotImplementedError( - # error: Item "None" of "Optional[ndarray]" has no attribute "dtype" - f"ops for {type(self).__name__} for this " # type: ignore[union-attr] + f"ops for {type(self).__name__} for this " f"dtype {values.dtype} are not implemented" ) else: @@ -314,9 +313,7 @@ def _prep_values(self, values: ArrayLike) -> np.ndarray: try: values = ensure_float64(values) except (ValueError, TypeError) as err: - # error: Item "None" of "Optional[ndarray]" has no attribute "dtype" - tmp = values.dtype # type: ignore[union-attr] - raise TypeError(f"cannot handle this type -> {tmp}") from err + raise TypeError(f"cannot handle this type -> {values.dtype}") from err # Convert inf to nan for C funcs From 1c51c74eed08ce8cd48d97deec973eed1a6707f4 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Tue, 9 Mar 2021 09:57:09 +0000 Subject: [PATCH 85/86] update comments --- pandas/core/dtypes/dtypes.py | 10 ++++++-- pandas/core/util/hashing.py | 46 ++++++++++++++++++++++++++---------- 2 files changed, 42 insertions(+), 14 deletions(-) diff --git a/pandas/core/dtypes/dtypes.py b/pandas/core/dtypes/dtypes.py index c9b52f30fdfec..d44d2a564fb78 100644 --- a/pandas/core/dtypes/dtypes.py +++ b/pandas/core/dtypes/dtypes.py @@ -469,8 +469,14 @@ def _hash_categories(categories, ordered: Ordered = True) -> int: [cat_array, np.arange(len(cat_array), dtype=cat_array.dtype)] ) else: - cat_array = [cat_array] - hashed = combine_hash_arrays(iter(cat_array), num_items=len(cat_array)) + # error: Incompatible types in assignment (expression has type + # "List[ndarray]", variable has type "ndarray") + cat_array = [cat_array] # type: ignore[assignment] + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "int") + hashed = combine_hash_arrays( # type: ignore[assignment] + iter(cat_array), num_items=len(cat_array) + ) return np.bitwise_xor.reduce(hashed) @classmethod diff --git a/pandas/core/util/hashing.py b/pandas/core/util/hashing.py index 9d488bb13b0f1..7d314d6a6fa1a 100644 --- a/pandas/core/util/hashing.py +++ b/pandas/core/util/hashing.py @@ -116,10 +116,14 @@ def hash_pandas_object( return Series(hash_tuples(obj, encoding, hash_key), dtype="uint64", copy=False) elif isinstance(obj, ABCIndex): - h = hash_array(obj._values, encoding, hash_key, categorize).astype( - "uint64", copy=False - ) - h = Series(h, index=obj, dtype="uint64", copy=False) + # error: Value of type variable "ArrayLike" of "hash_array" cannot be + # "Union[ExtensionArray, ndarray]" + h = hash_array( # type: ignore[type-var] + obj._values, encoding, hash_key, categorize + ).astype("uint64", copy=False) + # error: Incompatible types in assignment (expression has type "Series", + # variable has type "ndarray") + h = Series(h, index=obj, dtype="uint64", copy=False) # type: ignore[assignment] elif isinstance(obj, ABCSeries): h = hash_array(obj._values, encoding, hash_key, categorize).astype( @@ -139,7 +143,11 @@ def hash_pandas_object( arrays = itertools.chain([h], index_iter) h = combine_hash_arrays(arrays, 2) - h = Series(h, index=obj.index, dtype="uint64", copy=False) + # error: Incompatible types in assignment (expression has type "Series", + # variable has type "ndarray") + h = Series( # type: ignore[assignment] + h, index=obj.index, dtype="uint64", copy=False + ) elif isinstance(obj, ABCDataFrame): hashes = (hash_array(series._values) for _, series in obj.items()) @@ -162,10 +170,15 @@ def hash_pandas_object( hashes = (x for x in _hashes) h = combine_hash_arrays(hashes, num_items) - h = Series(h, index=obj.index, dtype="uint64", copy=False) + # error: Incompatible types in assignment (expression has type "Series", + # variable has type "ndarray") + h = Series( # type: ignore[assignment] + h, index=obj.index, dtype="uint64", copy=False + ) else: raise TypeError(f"Unexpected type for hashing {type(obj)}") - return h + # error: Incompatible return value type (got "ndarray", expected "Series") + return h # type: ignore[return-value] def hash_tuples( @@ -284,12 +297,21 @@ def hash_array( # hash values. (This check is above the complex check so that we don't ask # numpy if categorical is a subdtype of complex, as it will choke). if is_categorical_dtype(dtype): - vals = cast("Categorical", vals) - return _hash_categorical(vals, encoding, hash_key) + # error: Incompatible types in assignment (expression has type "Categorical", + # variable has type "ndarray") + vals = cast("Categorical", vals) # type: ignore[assignment] + # error: Argument 1 to "_hash_categorical" has incompatible type "ndarray"; + # expected "Categorical" + return _hash_categorical(vals, encoding, hash_key) # type: ignore[arg-type] elif is_extension_array_dtype(dtype): - vals, _ = vals._values_for_factorize() - - return _hash_ndarray(vals, encoding, hash_key, categorize) + # error: Incompatible types in assignment (expression has type "ndarray", + # variable has type "ExtensionArray") + # error: "ndarray" has no attribute "_values_for_factorize" + vals, _ = vals._values_for_factorize() # type: ignore[assignment,attr-defined] + + # error: Argument 1 to "_hash_ndarray" has incompatible type "ExtensionArray"; + # expected "ndarray" + return _hash_ndarray(vals, encoding, hash_key, categorize) # type: ignore[arg-type] def _hash_ndarray( From 81966ec9d8c157c0fbb620023ffdbcd0244c5408 Mon Sep 17 00:00:00 2001 From: Simon Hawkins Date: Wed, 10 Mar 2021 14:21:43 +0000 Subject: [PATCH 86/86] update comments --- pandas/core/arrays/categorical.py | 6 +++++- pandas/core/internals/array_manager.py | 17 +++++++---------- pandas/core/internals/blocks.py | 8 ++++++-- pandas/core/reshape/melt.py | 7 ++++++- 4 files changed, 24 insertions(+), 14 deletions(-) diff --git a/pandas/core/arrays/categorical.py b/pandas/core/arrays/categorical.py index 66e7e368a83f6..8588bc9aa94ec 100644 --- a/pandas/core/arrays/categorical.py +++ b/pandas/core/arrays/categorical.py @@ -2159,7 +2159,11 @@ def mode(self, dropna=True): if dropna: good = self._codes != -1 codes = self._codes[good] - codes = sorted(htable.mode_int64(ensure_int64(codes), dropna)) + # error: Incompatible types in assignment (expression has type "List[Any]", + # variable has type "ndarray") + codes = sorted( # type: ignore[assignment] + htable.mode_int64(ensure_int64(codes), dropna) + ) codes = coerce_indexer_dtype(codes, self.dtype.categories) return self._from_backing_data(codes) diff --git a/pandas/core/internals/array_manager.py b/pandas/core/internals/array_manager.py index 3a297d4015aa0..52ad52b6912ec 100644 --- a/pandas/core/internals/array_manager.py +++ b/pandas/core/internals/array_manager.py @@ -509,7 +509,9 @@ def quantile( interpolation="linear", ) -> ArrayManager: - arrs = [ensure_block_shape(x, 2) for x in self.arrays] + # error: Value of type variable "ArrayLike" of "ensure_block_shape" cannot be + # "Union[ndarray, ExtensionArray]" + arrs = [ensure_block_shape(x, 2) for x in self.arrays] # type: ignore[type-var] assert axis == 1 # error: Value of type variable "ArrayLike" of "quantile_compat" cannot be # "object" @@ -518,17 +520,12 @@ def quantile( for x in arrs ] for i, arr in enumerate(new_arrs): - # error: "object" has no attribute "ndim" - if arr.ndim == 2: # type: ignore[attr-defined] - # error: "object" has no attribute "shape" - assert arr.shape[0] == 1, arr.shape # type: ignore[attr-defined] - # error: Value of type "object" is not indexable - new_arrs[i] = arr[0] # type: ignore[index] + if arr.ndim == 2: + assert arr.shape[0] == 1, arr.shape + new_arrs[i] = arr[0] axes = [qs, self._axes[1]] - # error: Argument 1 to "ArrayManager" has incompatible type "List[object]"; - # expected "List[Union[ndarray, ExtensionArray]]" - return type(self)(new_arrs, axes) # type: ignore[arg-type] + return type(self)(new_arrs, axes) def isna(self, func) -> ArrayManager: return self.apply("apply", func=func) diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index 6980516f19b99..bfcfc15aaf153 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -117,8 +117,10 @@ ) from pandas.core.arrays._mixins import NDArrayBackedExtensionArray +# comparison is faster than is_object_dtype -_dtype_obj = np.dtype(object) # comparison is faster than is_object_dtype +# error: Value of type variable "_DTypeScalar" of "dtype" cannot be "object" +_dtype_obj = np.dtype(object) # type: ignore[type-var] class Block(PandasObject): @@ -288,7 +290,9 @@ def get_values(self, dtype: Optional[DtypeObj] = None) -> np.ndarray: """ if dtype == _dtype_obj: return self.values.astype(_dtype_obj) - return self.values + # error: Incompatible return value type (got "Union[ndarray, ExtensionArray]", + # expected "ndarray") + return self.values # type: ignore[return-value] @final def get_block_values_for_json(self) -> np.ndarray: diff --git a/pandas/core/reshape/melt.py b/pandas/core/reshape/melt.py index 447a2a35a1e8b..09249eba9c3f5 100644 --- a/pandas/core/reshape/melt.py +++ b/pandas/core/reshape/melt.py @@ -148,7 +148,12 @@ def melt( mdata[value_name] = frame._values.ravel("F") # type: ignore[assignment] for i, col in enumerate(var_name): # asanyarray will keep the columns as an Index - mdata[col] = np.asanyarray(frame.columns._get_level_values(i)).repeat(N) + + # error: Incompatible types in assignment (expression has type "ndarray", target + # has type "Series") + mdata[col] = np.asanyarray( # type: ignore[assignment] + frame.columns._get_level_values(i) + ).repeat(N) result = frame._constructor(mdata, columns=mcolumns)