Skip to content

Commit c82ff3f

Browse files
authored
TST: fix pyarrow xfails for date/time dtypes (#51281)
1 parent 18ef7f1 commit c82ff3f

File tree

2 files changed

+32
-21
lines changed

2 files changed

+32
-21
lines changed

pandas/_libs/missing.pyx

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -338,6 +338,14 @@ def _create_binary_propagating_op(name, is_divmod=False):
338338
elif is_cmp and isinstance(other, (date, time, timedelta)):
339339
return NA
340340

341+
elif isinstance(other, date):
342+
if name in ["__sub__", "__rsub__"]:
343+
return NA
344+
345+
elif isinstance(other, timedelta):
346+
if name in ["__sub__", "__rsub__", "__add__", "__radd__"]:
347+
return NA
348+
341349
return NotImplemented
342350

343351
method.__name__ = name

pandas/tests/extension/test_arrow.py

Lines changed: 24 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -384,7 +384,7 @@ def test_accumulate_series(self, data, all_numeric_accumulations, skipna, reques
384384
# renders the exception messages even when not showing them
385385
pytest.skip(f"{all_numeric_accumulations} not implemented for pyarrow < 9")
386386

387-
elif all_numeric_accumulations == "cumsum" and (pa.types.is_boolean(pa_type)):
387+
elif all_numeric_accumulations == "cumsum" and pa.types.is_boolean(pa_type):
388388
request.node.add_marker(
389389
pytest.mark.xfail(
390390
reason=f"{all_numeric_accumulations} not implemented for {pa_type}",
@@ -859,17 +859,7 @@ def test_factorize(self, data_for_grouping, request):
859859

860860
def test_combine_add(self, data_repeated, request):
861861
pa_dtype = next(data_repeated(1)).dtype.pyarrow_dtype
862-
if pa.types.is_duration(pa_dtype):
863-
# TODO: this fails on the scalar addition constructing 'expected'
864-
# but not in the actual 'combine' call, so may be salvage-able
865-
mark = pytest.mark.xfail(
866-
raises=TypeError,
867-
reason=f"{pa_dtype} cannot be added to {pa_dtype}",
868-
)
869-
request.node.add_marker(mark)
870-
super().test_combine_add(data_repeated)
871-
872-
elif pa.types.is_temporal(pa_dtype):
862+
if pa.types.is_temporal(pa_dtype) and not pa.types.is_duration(pa_dtype):
873863
# analogous to datetime64, these cannot be added
874864
orig_data1, orig_data2 = data_repeated(2)
875865
s1 = pd.Series(orig_data1)
@@ -915,14 +905,24 @@ def _patch_combine(self, obj, other, op):
915905
pa_expected = pa.array(expected_data._values)
916906

917907
if pa.types.is_duration(pa_expected.type):
918-
# pyarrow sees sequence of datetime/timedelta objects and defaults
919-
# to "us" but the non-pointwise op retains unit
920-
unit = original_dtype.pyarrow_dtype.unit
921-
if type(other) in [datetime, timedelta] and unit in ["s", "ms"]:
922-
# pydatetime/pytimedelta objects have microsecond reso, so we
923-
# take the higher reso of the original and microsecond. Note
924-
# this matches what we would do with DatetimeArray/TimedeltaArray
925-
unit = "us"
908+
orig_pa_type = original_dtype.pyarrow_dtype
909+
if pa.types.is_date(orig_pa_type):
910+
if pa.types.is_date64(orig_pa_type):
911+
# TODO: why is this different vs date32?
912+
unit = "ms"
913+
else:
914+
unit = "s"
915+
else:
916+
# pyarrow sees sequence of datetime/timedelta objects and defaults
917+
# to "us" but the non-pointwise op retains unit
918+
# timestamp or duration
919+
unit = orig_pa_type.unit
920+
if type(other) in [datetime, timedelta] and unit in ["s", "ms"]:
921+
# pydatetime/pytimedelta objects have microsecond reso, so we
922+
# take the higher reso of the original and microsecond. Note
923+
# this matches what we would do with DatetimeArray/TimedeltaArray
924+
unit = "us"
925+
926926
pa_expected = pa_expected.cast(f"duration[{unit}]")
927927
else:
928928
pa_expected = pa_expected.cast(original_dtype.pyarrow_dtype)
@@ -979,7 +979,7 @@ def _get_arith_xfail_marker(self, opname, pa_dtype):
979979
f"for {pa_dtype}"
980980
)
981981
)
982-
elif arrow_temporal_supported:
982+
elif arrow_temporal_supported and pa.types.is_time(pa_dtype):
983983
mark = pytest.mark.xfail(
984984
raises=TypeError,
985985
reason=(
@@ -1024,6 +1024,7 @@ def test_arith_series_with_scalar(
10241024
)
10251025
or pa.types.is_duration(pa_dtype)
10261026
or pa.types.is_timestamp(pa_dtype)
1027+
or pa.types.is_date(pa_dtype)
10271028
):
10281029
# BaseOpsUtil._combine always returns int64, while ArrowExtensionArray does
10291030
# not upcast
@@ -1055,6 +1056,7 @@ def test_arith_frame_with_scalar(
10551056
)
10561057
or pa.types.is_duration(pa_dtype)
10571058
or pa.types.is_timestamp(pa_dtype)
1059+
or pa.types.is_date(pa_dtype)
10581060
):
10591061
# BaseOpsUtil._combine always returns int64, while ArrowExtensionArray does
10601062
# not upcast
@@ -1107,6 +1109,7 @@ def test_arith_series_with_array(
11071109
)
11081110
or pa.types.is_duration(pa_dtype)
11091111
or pa.types.is_timestamp(pa_dtype)
1112+
or pa.types.is_date(pa_dtype)
11101113
):
11111114
monkeypatch.setattr(TestBaseArithmeticOps, "_combine", self._patch_combine)
11121115
self.check_opname(ser, op_name, other, exc=self.series_array_exc)

0 commit comments

Comments
 (0)