Skip to content

Commit 5740667

Browse files
authored
STY: Enable B904 (#56941)
1 parent 7a265c2 commit 5740667

25 files changed

+74
-68
lines changed

asv_bench/benchmarks/algos/isin.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -61,8 +61,8 @@ def setup(self, dtype):
6161
self.series = Series(
6262
Index([f"i-{i}" for i in range(N)], dtype=object), dtype=dtype
6363
)
64-
except ImportError:
65-
raise NotImplementedError
64+
except ImportError as err:
65+
raise NotImplementedError from err
6666
self.values = list(self.series[:2])
6767

6868
else:

asv_bench/benchmarks/array.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -76,8 +76,8 @@ class ArrowStringArray:
7676
def setup(self, multiple_chunks):
7777
try:
7878
import pyarrow as pa
79-
except ImportError:
80-
raise NotImplementedError
79+
except ImportError as err:
80+
raise NotImplementedError from err
8181
strings = np.array([str(i) for i in range(10_000)], dtype=object)
8282
if multiple_chunks:
8383
chunks = [strings[i : i + 100] for i in range(0, len(strings), 100)]

asv_bench/benchmarks/strings.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,8 @@ def setup(self, dtype):
2121
self.s = Series(
2222
Index([f"i-{i}" for i in range(10000)], dtype=object), dtype=dtype
2323
)
24-
except ImportError:
25-
raise NotImplementedError
24+
except ImportError as err:
25+
raise NotImplementedError from err
2626

2727

2828
class Construction:

pandas/compat/_optional.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -160,9 +160,9 @@ def import_optional_dependency(
160160
)
161161
try:
162162
module = importlib.import_module(name)
163-
except ImportError:
163+
except ImportError as err:
164164
if errors == "raise":
165-
raise ImportError(msg)
165+
raise ImportError(msg) from err
166166
return None
167167

168168
# Handle submodules: if we have submodule, grab parent module from sys.modules

pandas/core/array_algos/datetimelike_accumulations.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -37,8 +37,10 @@ def _cum_func(
3737
np.cumsum: 0,
3838
np.minimum.accumulate: np.iinfo(np.int64).max,
3939
}[func]
40-
except KeyError:
41-
raise ValueError(f"No accumulation for {func} implemented on BaseMaskedArray")
40+
except KeyError as err:
41+
raise ValueError(
42+
f"No accumulation for {func} implemented on BaseMaskedArray"
43+
) from err
4244

4345
mask = isna(values)
4446
y = values.view("i8")

pandas/core/array_algos/masked_accumulations.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -60,10 +60,10 @@ def _cum_func(
6060
np.cumsum: 0,
6161
np.minimum.accumulate: dtype_info.max,
6262
}[func]
63-
except KeyError:
63+
except KeyError as err:
6464
raise NotImplementedError(
6565
f"No accumulation for {func} implemented on BaseMaskedArray"
66-
)
66+
) from err
6767

6868
values[mask] = fill_value
6969

pandas/core/arrays/categorical.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -597,9 +597,9 @@ def astype(self, dtype: AstypeArg, copy: bool = True) -> ArrayLike:
597597
except (
598598
TypeError, # downstream error msg for CategoricalIndex is misleading
599599
ValueError,
600-
):
600+
) as err:
601601
msg = f"Cannot cast {self.categories.dtype} dtype to {dtype}"
602-
raise ValueError(msg)
602+
raise ValueError(msg) from err
603603

604604
result = take_nd(
605605
new_cats, ensure_platform_int(self._codes), fill_value=fill_value

pandas/core/arrays/datetimelike.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -524,9 +524,9 @@ def _validate_comparison_value(self, other):
524524
try:
525525
# GH#18435 strings get a pass from tzawareness compat
526526
other = self._scalar_from_string(other)
527-
except (ValueError, IncompatibleFrequency):
527+
except (ValueError, IncompatibleFrequency) as err:
528528
# failed to parse as Timestamp/Timedelta/Period
529-
raise InvalidComparison(other)
529+
raise InvalidComparison(other) from err
530530

531531
if isinstance(other, self._recognized_scalars) or other is NaT:
532532
other = self._scalar_type(other)
@@ -664,11 +664,11 @@ def _validate_listlike(self, value, allow_object: bool = False):
664664
if lib.infer_dtype(value) in self._infer_matches:
665665
try:
666666
value = type(self)._from_sequence(value)
667-
except (ValueError, TypeError):
667+
except (ValueError, TypeError) as err:
668668
if allow_object:
669669
return value
670670
msg = self._validation_error_message(value, True)
671-
raise TypeError(msg)
671+
raise TypeError(msg) from err
672672

673673
# Do type inference if necessary up front (after unpacking
674674
# NumpyExtensionArray)

pandas/core/arrays/period.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -1188,10 +1188,10 @@ def dt64arr_to_periodarr(
11881188
freq = Period._maybe_convert_freq(freq)
11891189
try:
11901190
base = freq._period_dtype_code
1191-
except (AttributeError, TypeError):
1191+
except (AttributeError, TypeError) as err:
11921192
# AttributeError: _period_dtype_code might not exist
11931193
# TypeError: _period_dtype_code might intentionally raise
1194-
raise TypeError(f"{freq.name} is not supported as period frequency")
1194+
raise TypeError(f"{freq.name} is not supported as period frequency") from err
11951195
return c_dt64arr_to_periodarr(data.view("i8"), base, tz, reso=reso), freq
11961196

11971197

pandas/core/dtypes/cast.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -1817,8 +1817,8 @@ def np_can_hold_element(dtype: np.dtype, element: Any) -> Any:
18171817
# TODO: general-case for EAs?
18181818
try:
18191819
casted = element.astype(dtype)
1820-
except (ValueError, TypeError):
1821-
raise LossySetitemError
1820+
except (ValueError, TypeError) as err:
1821+
raise LossySetitemError from err
18221822
# Check for cases of either
18231823
# a) lossy overflow/rounding or
18241824
# b) semantic changes like dt64->int64

pandas/core/generic.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -576,8 +576,10 @@ def _construct_axes_dict(self, axes: Sequence[Axis] | None = None, **kwargs):
576576
def _get_axis_number(cls, axis: Axis) -> AxisInt:
577577
try:
578578
return cls._AXIS_TO_AXIS_NUMBER[axis]
579-
except KeyError:
580-
raise ValueError(f"No axis named {axis} for object type {cls.__name__}")
579+
except KeyError as err:
580+
raise ValueError(
581+
f"No axis named {axis} for object type {cls.__name__}"
582+
) from err
581583

582584
@final
583585
@classmethod

pandas/core/indexes/base.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -3798,7 +3798,7 @@ def get_loc(self, key):
37983798
isinstance(casted_key, abc.Iterable)
37993799
and any(isinstance(x, slice) for x in casted_key)
38003800
):
3801-
raise InvalidIndexError(key)
3801+
raise InvalidIndexError(key) from err
38023802
raise KeyError(key) from err
38033803
except TypeError:
38043804
# If we have a listlike key, _check_indexing_error will raise
@@ -5750,13 +5750,13 @@ def asof(self, label):
57505750
self._searchsorted_monotonic(label) # validate sortedness
57515751
try:
57525752
loc = self.get_loc(label)
5753-
except (KeyError, TypeError):
5753+
except (KeyError, TypeError) as err:
57545754
# KeyError -> No exact match, try for padded
57555755
# TypeError -> passed e.g. non-hashable, fall through to get
57565756
# the tested exception message
57575757
indexer = self.get_indexer([label], method="pad")
57585758
if indexer.ndim > 1 or indexer.size > 1:
5759-
raise TypeError("asof requires scalar valued input")
5759+
raise TypeError("asof requires scalar valued input") from err
57605760
loc = indexer.item()
57615761
if loc == -1:
57625762
return self._na_value
@@ -6812,7 +6812,7 @@ def get_slice_bound(self, label, side: Literal["left", "right"]) -> int:
68126812
return self._searchsorted_monotonic(label, side)
68136813
except ValueError:
68146814
# raise the original KeyError
6815-
raise err
6815+
raise err from None
68166816

68176817
if isinstance(slc, np.ndarray):
68186818
# get_loc may return a boolean array, which

pandas/core/interchange/column.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -182,8 +182,8 @@ def describe_null(self):
182182
kind = self.dtype[0]
183183
try:
184184
null, value = _NULL_DESCRIPTION[kind]
185-
except KeyError:
186-
raise NotImplementedError(f"Data type {kind} not yet supported")
185+
except KeyError as err:
186+
raise NotImplementedError(f"Data type {kind} not yet supported") from err
187187

188188
return null, value
189189

@@ -341,9 +341,9 @@ def _get_validity_buffer(self) -> tuple[PandasBuffer, Any]:
341341

342342
try:
343343
msg = f"{_NO_VALIDITY_BUFFER[null]} so does not have a separate mask"
344-
except KeyError:
344+
except KeyError as err:
345345
# TODO: implement for other bit/byte masks?
346-
raise NotImplementedError("See self.describe_null")
346+
raise NotImplementedError("See self.describe_null") from err
347347

348348
raise NoBufferPresent(msg)
349349

pandas/core/reshape/encoding.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -493,8 +493,8 @@ def from_dummies(
493493
# index data with a list of all columns that are dummies
494494
try:
495495
data_to_decode = data.astype("boolean", copy=False)
496-
except TypeError:
497-
raise TypeError("Passed DataFrame contains non-dummy data")
496+
except TypeError as err:
497+
raise TypeError("Passed DataFrame contains non-dummy data") from err
498498

499499
# collect prefixes and get lists to slice data for each prefix
500500
variables_slice = defaultdict(list)

pandas/core/sorting.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -582,11 +582,11 @@ def ensure_key_mapped(
582582
type_of_values = type(values)
583583
# error: Too many arguments for "ExtensionArray"
584584
result = type_of_values(result) # type: ignore[call-arg]
585-
except TypeError:
585+
except TypeError as err:
586586
raise TypeError(
587587
f"User-provided `key` function returned an invalid type {type(result)} \
588588
which could not be converted to {type(values)}."
589-
)
589+
) from err
590590

591591
return result
592592

pandas/core/tools/datetimes.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -510,14 +510,14 @@ def _to_datetime_with_unit(arg, unit, name, utc: bool, errors: str) -> Index:
510510
with np.errstate(over="raise"):
511511
try:
512512
arr = cast_from_unit_vectorized(arg, unit=unit)
513-
except OutOfBoundsDatetime:
513+
except OutOfBoundsDatetime as err:
514514
if errors != "raise":
515515
return _to_datetime_with_unit(
516516
arg.astype(object), unit, name, utc, errors
517517
)
518518
raise OutOfBoundsDatetime(
519519
f"cannot convert input with unit '{unit}'"
520-
)
520+
) from err
521521

522522
arr = arr.view("M8[ns]")
523523
tz_parsed = None

pandas/io/formats/style_render.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -1922,11 +1922,11 @@ def maybe_convert_css_to_tuples(style: CSSProperties) -> CSSList:
19221922
for x in s
19231923
if x.strip() != ""
19241924
]
1925-
except IndexError:
1925+
except IndexError as err:
19261926
raise ValueError(
19271927
"Styles supplied as string must follow CSS rule formats, "
19281928
f"for example 'attr: val;'. '{style}' was given."
1929-
)
1929+
) from err
19301930
return style
19311931

19321932

pandas/io/formats/xml.py

+12-8
Original file line numberDiff line numberDiff line change
@@ -293,8 +293,8 @@ def _build_attribs(self, d: dict[str, Any], elem_row: Any) -> Any:
293293
try:
294294
if not isna(d[col]):
295295
elem_row.attrib[attr_name] = str(d[col])
296-
except KeyError:
297-
raise KeyError(f"no valid column, {col}")
296+
except KeyError as err:
297+
raise KeyError(f"no valid column, {col}") from err
298298
return elem_row
299299

300300
@final
@@ -330,8 +330,8 @@ def _build_elems(self, d: dict[str, Any], elem_row: Any) -> None:
330330
try:
331331
val = None if isna(d[col]) or d[col] == "" else str(d[col])
332332
sub_element_cls(elem_row, elem_name).text = val
333-
except KeyError:
334-
raise KeyError(f"no valid column, {col}")
333+
except KeyError as err:
334+
raise KeyError(f"no valid column, {col}") from err
335335

336336
@final
337337
def write_output(self) -> str | None:
@@ -408,8 +408,10 @@ def _get_prefix_uri(self) -> str:
408408
if self.prefix:
409409
try:
410410
uri = f"{{{self.namespaces[self.prefix]}}}"
411-
except KeyError:
412-
raise KeyError(f"{self.prefix} is not included in namespaces")
411+
except KeyError as err:
412+
raise KeyError(
413+
f"{self.prefix} is not included in namespaces"
414+
) from err
413415
elif "" in self.namespaces:
414416
uri = f'{{{self.namespaces[""]}}}'
415417
else:
@@ -504,8 +506,10 @@ def _get_prefix_uri(self) -> str:
504506
if self.prefix:
505507
try:
506508
uri = f"{{{self.namespaces[self.prefix]}}}"
507-
except KeyError:
508-
raise KeyError(f"{self.prefix} is not included in namespaces")
509+
except KeyError as err:
510+
raise KeyError(
511+
f"{self.prefix} is not included in namespaces"
512+
) from err
509513
elif "" in self.namespaces:
510514
uri = f'{{{self.namespaces[""]}}}'
511515
else:

pandas/io/parsers/arrow_parser_wrapper.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -214,9 +214,9 @@ def _finalize_pandas_output(self, frame: DataFrame) -> DataFrame:
214214
self.dtype = pandas_dtype(self.dtype)
215215
try:
216216
frame = frame.astype(self.dtype)
217-
except TypeError as e:
217+
except TypeError as err:
218218
# GH#44901 reraise to keep api consistent
219-
raise ValueError(e)
219+
raise ValueError(str(err)) from err
220220
return frame
221221

222222
def _validate_usecols(self, usecols) -> None:
@@ -247,7 +247,7 @@ def read(self) -> DataFrame:
247247

248248
try:
249249
convert_options = pyarrow_csv.ConvertOptions(**self.convert_options)
250-
except TypeError:
250+
except TypeError as err:
251251
include = self.convert_options.get("include_columns", None)
252252
if include is not None:
253253
self._validate_usecols(include)
@@ -258,7 +258,7 @@ def read(self) -> DataFrame:
258258
):
259259
raise TypeError(
260260
"The 'pyarrow' engine requires all na_values to be strings"
261-
)
261+
) from err
262262

263263
raise
264264

pandas/io/parsers/python_parser.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -733,8 +733,8 @@ def _next_line(self) -> list[Scalar]:
733733
if ret:
734734
line = ret[0]
735735
break
736-
except IndexError:
737-
raise StopIteration
736+
except IndexError as err:
737+
raise StopIteration from err
738738
else:
739739
while self.skipfunc(self.pos):
740740
self.pos += 1

pandas/io/xml.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -484,12 +484,12 @@ def _validate_path(self) -> list[Any]:
484484
if children == [] and attrs == {}:
485485
raise ValueError(msg)
486486

487-
except (KeyError, SyntaxError):
487+
except (KeyError, SyntaxError) as err:
488488
raise SyntaxError(
489489
"You have used an incorrect or unsupported XPath "
490490
"expression for etree library or you used an "
491491
"undeclared namespace prefix."
492-
)
492+
) from err
493493

494494
return elems
495495

@@ -746,12 +746,12 @@ class that build Data Frame and infers specific dtypes.
746746
try:
747747
with TextParser(nodes, names=tags, **kwargs) as tp:
748748
return tp.read()
749-
except ParserError:
749+
except ParserError as err:
750750
raise ParserError(
751751
"XML document may be too complex for import. "
752752
"Try to flatten document and use distinct "
753753
"element and attribute names."
754-
)
754+
) from err
755755

756756

757757
def _parse(

pandas/tests/indexes/datetimes/test_constructors.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -1045,12 +1045,12 @@ def test_dti_constructor_with_non_nano_now_today(self):
10451045
tolerance = pd.Timedelta(microseconds=1)
10461046

10471047
diff0 = result[0] - now.as_unit("s")
1048-
assert diff0 >= pd.Timedelta(0)
1049-
assert diff0 < tolerance
1048+
assert diff0 >= pd.Timedelta(0), f"The difference is {diff0}"
1049+
assert diff0 < tolerance, f"The difference is {diff0}"
10501050

10511051
diff1 = result[1] - today.as_unit("s")
1052-
assert diff1 >= pd.Timedelta(0)
1053-
assert diff1 < tolerance
1052+
assert diff1 >= pd.Timedelta(0), f"The difference is {diff0}"
1053+
assert diff1 < tolerance, f"The difference is {diff0}"
10541054

10551055
def test_dti_constructor_object_float_matches_float_dtype(self):
10561056
# GH#55780

0 commit comments

Comments
 (0)