Skip to content

Commit 6228771

Browse files
authored
REF: remove no-longer-needed ignore_failures (#49659)
1 parent c60135c commit 6228771

File tree

5 files changed

+37
-104
lines changed

5 files changed

+37
-104
lines changed

pandas/core/frame.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10463,7 +10463,7 @@ def _get_data() -> DataFrame:
1046310463

1046410464
# After possibly _get_data and transposing, we are now in the
1046510465
# simple case where we can use BlockManager.reduce
10466-
res, _ = df._mgr.reduce(blk_func, ignore_failures=False)
10466+
res = df._mgr.reduce(blk_func)
1046710467
out = df._constructor(res).iloc[0]
1046810468
if out_dtype is not None:
1046910469
out = out.astype(out_dtype)

pandas/core/internals/array_manager.py

Lines changed: 22 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -196,7 +196,6 @@ def apply(
196196
self: T,
197197
f,
198198
align_keys: list[str] | None = None,
199-
ignore_failures: bool = False,
200199
**kwargs,
201200
) -> T:
202201
"""
@@ -207,19 +206,17 @@ def apply(
207206
f : str or callable
208207
Name of the Array method to apply.
209208
align_keys: List[str] or None, default None
210-
ignore_failures: bool, default False
211209
**kwargs
212210
Keywords to pass to `f`
213211
214212
Returns
215213
-------
216214
ArrayManager
217215
"""
218-
assert "filter" not in kwargs
216+
assert "filter" not in kwargs and "ignore_failures" not in kwargs
219217

220218
align_keys = align_keys or []
221219
result_arrays: list[np.ndarray] = []
222-
result_indices: list[int] = []
223220
# fillna: Series/DataFrame is responsible for making sure value is aligned
224221

225222
aligned_args = {k: kwargs[k] for k in align_keys}
@@ -243,27 +240,17 @@ def apply(
243240
# otherwise we have an array-like
244241
kwargs[k] = obj[i]
245242

246-
try:
247-
if callable(f):
248-
applied = f(arr, **kwargs)
249-
else:
250-
applied = getattr(arr, f)(**kwargs)
251-
except (TypeError, NotImplementedError):
252-
if not ignore_failures:
253-
raise
254-
continue
243+
if callable(f):
244+
applied = f(arr, **kwargs)
245+
else:
246+
applied = getattr(arr, f)(**kwargs)
247+
255248
# if not isinstance(applied, ExtensionArray):
256249
# # TODO not all EA operations return new EAs (eg astype)
257250
# applied = array(applied)
258251
result_arrays.append(applied)
259-
result_indices.append(i)
260252

261-
new_axes: list[Index]
262-
if ignore_failures:
263-
# TODO copy?
264-
new_axes = [self._axes[0], self._axes[1][result_indices]]
265-
else:
266-
new_axes = self._axes
253+
new_axes = self._axes
267254

268255
# error: Argument 1 to "ArrayManager" has incompatible type "List[ndarray]";
269256
# expected "List[Union[ndarray, ExtensionArray]]"
@@ -985,58 +972,41 @@ def grouped_reduce(self: T, func: Callable, ignore_failures: bool = False) -> T:
985972
# expected "List[Union[ndarray, ExtensionArray]]"
986973
return type(self)(result_arrays, [index, columns]) # type: ignore[arg-type]
987974

988-
def reduce(
989-
self: T, func: Callable, ignore_failures: bool = False
990-
) -> tuple[T, np.ndarray]:
975+
def reduce(self: T, func: Callable) -> T:
991976
"""
992977
Apply reduction function column-wise, returning a single-row ArrayManager.
993978
994979
Parameters
995980
----------
996981
func : reduction function
997-
ignore_failures : bool, default False
998-
Whether to drop columns where func raises TypeError.
999982
1000983
Returns
1001984
-------
1002985
ArrayManager
1003-
np.ndarray
1004-
Indexer of column indices that are retained.
1005986
"""
1006987
result_arrays: list[np.ndarray] = []
1007-
result_indices: list[int] = []
1008988
for i, arr in enumerate(self.arrays):
1009-
try:
1010-
res = func(arr, axis=0)
1011-
except TypeError:
1012-
if not ignore_failures:
1013-
raise
989+
res = func(arr, axis=0)
990+
991+
# TODO NaT doesn't preserve dtype, so we need to ensure to create
992+
# a timedelta result array if original was timedelta
993+
# what if datetime results in timedelta? (eg std)
994+
if res is NaT and is_timedelta64_ns_dtype(arr.dtype):
995+
result_arrays.append(np.array(["NaT"], dtype="timedelta64[ns]"))
1014996
else:
1015-
# TODO NaT doesn't preserve dtype, so we need to ensure to create
1016-
# a timedelta result array if original was timedelta
1017-
# what if datetime results in timedelta? (eg std)
1018-
if res is NaT and is_timedelta64_ns_dtype(arr.dtype):
1019-
result_arrays.append(np.array(["NaT"], dtype="timedelta64[ns]"))
1020-
else:
1021-
# error: Argument 1 to "append" of "list" has incompatible type
1022-
# "ExtensionArray"; expected "ndarray"
1023-
result_arrays.append(
1024-
sanitize_array([res], None) # type: ignore[arg-type]
1025-
)
1026-
result_indices.append(i)
997+
# error: Argument 1 to "append" of "list" has incompatible type
998+
# "ExtensionArray"; expected "ndarray"
999+
result_arrays.append(
1000+
sanitize_array([res], None) # type: ignore[arg-type]
1001+
)
10271002

10281003
index = Index._simple_new(np.array([None], dtype=object)) # placeholder
1029-
if ignore_failures:
1030-
indexer = np.array(result_indices)
1031-
columns = self.items[result_indices]
1032-
else:
1033-
indexer = np.arange(self.shape[0])
1034-
columns = self.items
1004+
columns = self.items
10351005

10361006
# error: Argument 1 to "ArrayManager" has incompatible type "List[ndarray]";
10371007
# expected "List[Union[ndarray, ExtensionArray]]"
10381008
new_mgr = type(self)(result_arrays, [index, columns]) # type: ignore[arg-type]
1039-
return new_mgr, indexer
1009+
return new_mgr
10401010

10411011
def operate_blockwise(self, other: ArrayManager, array_op) -> ArrayManager:
10421012
"""

pandas/core/internals/base.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -134,7 +134,6 @@ def apply(
134134
self: T,
135135
f,
136136
align_keys: list[str] | None = None,
137-
ignore_failures: bool = False,
138137
**kwargs,
139138
) -> T:
140139
raise AbstractMethodError(self)

pandas/core/internals/blocks.py

Lines changed: 5 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -326,17 +326,12 @@ def apply(self, func, **kwargs) -> list[Block]:
326326

327327
return self._split_op_result(result)
328328

329-
def reduce(self, func, ignore_failures: bool = False) -> list[Block]:
329+
def reduce(self, func) -> list[Block]:
330330
# We will apply the function and reshape the result into a single-row
331331
# Block with the same mgr_locs; squeezing will be done at a higher level
332332
assert self.ndim == 2
333333

334-
try:
335-
result = func(self.values)
336-
except (TypeError, NotImplementedError):
337-
if ignore_failures:
338-
return []
339-
raise
334+
result = func(self.values)
340335

341336
if self.values.ndim == 1:
342337
# TODO(EA2D): special case not needed with 2D EAs
@@ -1957,23 +1952,17 @@ class ObjectBlock(NumpyBlock):
19571952
__slots__ = ()
19581953
is_object = True
19591954

1960-
@maybe_split
1961-
def reduce(self, func, ignore_failures: bool = False) -> list[Block]:
1955+
def reduce(self, func) -> list[Block]:
19621956
"""
19631957
For object-dtype, we operate column-wise.
19641958
"""
19651959
assert self.ndim == 2
19661960

1967-
try:
1968-
res = func(self.values)
1969-
except TypeError:
1970-
if not ignore_failures:
1971-
raise
1972-
return []
1961+
res = func(self.values)
19731962

19741963
assert isinstance(res, np.ndarray)
19751964
assert res.ndim == 1
1976-
res = res.reshape(1, -1)
1965+
res = res.reshape(-1, 1)
19771966
return [self.make_block_same_class(res)]
19781967

19791968
@maybe_split

pandas/core/internals/managers.py

Lines changed: 9 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -303,7 +303,6 @@ def apply(
303303
self: T,
304304
f,
305305
align_keys: list[str] | None = None,
306-
ignore_failures: bool = False,
307306
**kwargs,
308307
) -> T:
309308
"""
@@ -314,15 +313,14 @@ def apply(
314313
f : str or callable
315314
Name of the Block method to apply.
316315
align_keys: List[str] or None, default None
317-
ignore_failures: bool, default False
318316
**kwargs
319317
Keywords to pass to `f`
320318
321319
Returns
322320
-------
323321
BlockManager
324322
"""
325-
assert "filter" not in kwargs
323+
assert "filter" not in kwargs and "ignore_failures" not in kwargs
326324

327325
align_keys = align_keys or []
328326
result_blocks: list[Block] = []
@@ -346,20 +344,12 @@ def apply(
346344
# otherwise we have an ndarray
347345
kwargs[k] = obj[b.mgr_locs.indexer]
348346

349-
try:
350-
if callable(f):
351-
applied = b.apply(f, **kwargs)
352-
else:
353-
applied = getattr(b, f)(**kwargs)
354-
except (TypeError, NotImplementedError):
355-
if not ignore_failures:
356-
raise
357-
continue
347+
if callable(f):
348+
applied = b.apply(f, **kwargs)
349+
else:
350+
applied = getattr(b, f)(**kwargs)
358351
result_blocks = extend_blocks(applied, result_blocks)
359352

360-
if ignore_failures:
361-
return self._combine(result_blocks)
362-
363353
out = type(self).from_blocks(result_blocks, self.axes)
364354
return out
365355

@@ -1527,44 +1517,29 @@ def grouped_reduce(self: T, func: Callable, ignore_failures: bool = False) -> T:
15271517

15281518
return type(self).from_blocks(result_blocks, [self.axes[0], index])
15291519

1530-
def reduce(
1531-
self: T, func: Callable, ignore_failures: bool = False
1532-
) -> tuple[T, np.ndarray]:
1520+
def reduce(self: T, func: Callable) -> T:
15331521
"""
15341522
Apply reduction function blockwise, returning a single-row BlockManager.
15351523
15361524
Parameters
15371525
----------
15381526
func : reduction function
1539-
ignore_failures : bool, default False
1540-
Whether to drop blocks where func raises TypeError.
15411527
15421528
Returns
15431529
-------
15441530
BlockManager
1545-
np.ndarray
1546-
Indexer of mgr_locs that are retained.
15471531
"""
15481532
# If 2D, we assume that we're operating column-wise
15491533
assert self.ndim == 2
15501534

15511535
res_blocks: list[Block] = []
15521536
for blk in self.blocks:
1553-
nbs = blk.reduce(func, ignore_failures)
1537+
nbs = blk.reduce(func)
15541538
res_blocks.extend(nbs)
15551539

15561540
index = Index([None]) # placeholder
1557-
if ignore_failures:
1558-
if res_blocks:
1559-
indexer = np.concatenate([blk.mgr_locs.as_array for blk in res_blocks])
1560-
new_mgr = self._combine(res_blocks, copy=False, index=index)
1561-
else:
1562-
indexer = []
1563-
new_mgr = type(self).from_blocks([], [self.items[:0], index])
1564-
else:
1565-
indexer = np.arange(self.shape[0])
1566-
new_mgr = type(self).from_blocks(res_blocks, [self.items, index])
1567-
return new_mgr, indexer
1541+
new_mgr = type(self).from_blocks(res_blocks, [self.items, index])
1542+
return new_mgr
15681543

15691544
def operate_blockwise(self, other: BlockManager, array_op) -> BlockManager:
15701545
"""

0 commit comments

Comments
 (0)