Skip to content

ENH: fix a bunch of pyarrow duration xfails #50669

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Jan 13, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions pandas/core/arrays/arrow/array.py
Original file line number Diff line number Diff line change
Expand Up @@ -653,6 +653,15 @@ def factorize(
use_na_sentinel: bool = True,
) -> tuple[np.ndarray, ExtensionArray]:
null_encoding = "mask" if use_na_sentinel else "encode"

pa_type = self._data.type
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Similar to #50688 (comment), could you see if going through cast is more performant here?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

import pandas as pd
import pyarrow as pa

parr = pa.array(range(10000), type=pa.duration("s"))
arr = pd.core.arrays.ArrowExtensionArray(parr)

%timeit arr.factorize()
377 µs ± 17.6 µs per loop (mean ± std. dev. of 7 runs, 1,000 loops each)  # <- astype
335 µs ± 2.92 µs per loop (mean ± std. dev. of 7 runs, 1,000 loops each)  # <- cast

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

so ~10% more performant at a similar complexity cost

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

if this is a blocker ill change on the affected PRs. i still like this pattern marginally more than the alternative, but mainly i want to get the slow xfails out of my workflow

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I would prefer going through cast (and generally keep ops in pyarrow-land as much as possible). If you could adjust this in #50688 that'd be good.

if pa.types.is_duration(pa_type):
# https://github.com/apache/arrow/issues/15226#issuecomment-1376578323
arr = cast(ArrowExtensionArray, self.astype("int64[pyarrow]"))
indices, uniques = arr.factorize(use_na_sentinel=use_na_sentinel)
uniques = uniques.astype(self.dtype)
return indices, uniques

encoded = self._data.dictionary_encode(null_encoding=null_encoding)
if encoded.length() == 0:
indices = np.array([], dtype=np.intp)
Expand Down Expand Up @@ -849,6 +858,12 @@ def unique(self: ArrowExtensionArrayT) -> ArrowExtensionArrayT:
-------
ArrowExtensionArray
"""
if pa.types.is_duration(self._data.type):
# https://github.com/apache/arrow/issues/15226#issuecomment-1376578323
arr = cast(ArrowExtensionArrayT, self.astype("int64[pyarrow]"))
result = arr.unique()
return cast(ArrowExtensionArrayT, result.astype(self.dtype))

return type(self)(pc.unique(self._data))

def value_counts(self, dropna: bool = True) -> Series:
Expand All @@ -868,6 +883,13 @@ def value_counts(self, dropna: bool = True) -> Series:
--------
Series.value_counts
"""
if pa.types.is_duration(self._data.type):
# https://github.com/apache/arrow/issues/15226#issuecomment-1376578323
arr = cast(ArrowExtensionArray, self.astype("int64[pyarrow]"))
result = arr.value_counts()
result.index = result.index.astype(self.dtype)
return result

from pandas import (
Index,
Series,
Expand Down
111 changes: 1 addition & 110 deletions pandas/tests/extension/test_arrow.py
Original file line number Diff line number Diff line change
Expand Up @@ -502,13 +502,6 @@ def test_groupby_extension_no_sort(self, data_for_grouping, request):
reason=f"{pa_dtype} only has 2 unique possible values",
)
)
elif pa.types.is_duration(pa_dtype):
request.node.add_marker(
pytest.mark.xfail(
raises=pa.ArrowNotImplementedError,
reason=f"pyarrow doesn't support factorizing {pa_dtype}",
)
)
super().test_groupby_extension_no_sort(data_for_grouping)

def test_groupby_extension_transform(self, data_for_grouping, request):
Expand All @@ -519,13 +512,6 @@ def test_groupby_extension_transform(self, data_for_grouping, request):
reason=f"{pa_dtype} only has 2 unique possible values",
)
)
elif pa.types.is_duration(pa_dtype):
request.node.add_marker(
pytest.mark.xfail(
raises=pa.ArrowNotImplementedError,
reason=f"pyarrow doesn't support factorizing {pa_dtype}",
)
)
with tm.maybe_produces_warning(
PerformanceWarning, pa_version_under7p0, check_stacklevel=False
):
Expand All @@ -534,14 +520,6 @@ def test_groupby_extension_transform(self, data_for_grouping, request):
def test_groupby_extension_apply(
self, data_for_grouping, groupby_apply_op, request
):
pa_dtype = data_for_grouping.dtype.pyarrow_dtype
if pa.types.is_duration(pa_dtype):
request.node.add_marker(
pytest.mark.xfail(
raises=pa.ArrowNotImplementedError,
reason=f"pyarrow doesn't support factorizing {pa_dtype}",
)
)
with tm.maybe_produces_warning(
PerformanceWarning, pa_version_under7p0, check_stacklevel=False
):
Expand All @@ -557,13 +535,6 @@ def test_groupby_extension_agg(self, as_index, data_for_grouping, request):
reason=f"{pa_dtype} only has 2 unique possible values",
)
)
elif pa.types.is_duration(pa_dtype):
request.node.add_marker(
pytest.mark.xfail(
raises=pa.ArrowNotImplementedError,
reason=f"pyarrow doesn't support factorizing {pa_dtype}",
)
)
with tm.maybe_produces_warning(
PerformanceWarning, pa_version_under7p0, check_stacklevel=False
):
Expand Down Expand Up @@ -784,25 +755,9 @@ def test_diff(self, data, periods, request):
@pytest.mark.filterwarnings("ignore:Falling back:pandas.errors.PerformanceWarning")
@pytest.mark.parametrize("dropna", [True, False])
def test_value_counts(self, all_data, dropna, request):
pa_dtype = all_data.dtype.pyarrow_dtype
if pa.types.is_duration(pa_dtype):
request.node.add_marker(
pytest.mark.xfail(
raises=pa.ArrowNotImplementedError,
reason=f"value_count has no kernel for {pa_dtype}",
)
)
super().test_value_counts(all_data, dropna)

def test_value_counts_with_normalize(self, data, request):
pa_dtype = data.dtype.pyarrow_dtype
if pa.types.is_duration(pa_dtype):
request.node.add_marker(
pytest.mark.xfail(
raises=pa.ArrowNotImplementedError,
reason=f"value_count has no pyarrow kernel for {pa_dtype}",
)
)
with tm.maybe_produces_warning(
PerformanceWarning, pa_version_under7p0, check_stacklevel=False
):
Expand Down Expand Up @@ -882,17 +837,6 @@ def test_nargsort(self, data_missing_for_sorting, na_position, expected):

@pytest.mark.parametrize("ascending", [True, False])
def test_sort_values(self, data_for_sorting, ascending, sort_by_key, request):
pa_dtype = data_for_sorting.dtype.pyarrow_dtype
if pa.types.is_duration(pa_dtype) and not ascending:
request.node.add_marker(
pytest.mark.xfail(
raises=pa.ArrowNotImplementedError,
reason=(
f"unique has no pyarrow kernel "
f"for {pa_dtype} when ascending={ascending}"
),
)
)
with tm.maybe_produces_warning(
PerformanceWarning, pa_version_under7p0, check_stacklevel=False
):
Expand All @@ -911,74 +855,21 @@ def test_sort_values_missing(

@pytest.mark.parametrize("ascending", [True, False])
def test_sort_values_frame(self, data_for_sorting, ascending, request):
pa_dtype = data_for_sorting.dtype.pyarrow_dtype
if pa.types.is_duration(pa_dtype):
request.node.add_marker(
pytest.mark.xfail(
raises=pa.ArrowNotImplementedError,
reason=(
f"dictionary_encode has no pyarrow kernel "
f"for {pa_dtype} when ascending={ascending}"
),
)
)
with tm.maybe_produces_warning(
PerformanceWarning, pa_version_under7p0, check_stacklevel=False
):
super().test_sort_values_frame(data_for_sorting, ascending)

@pytest.mark.parametrize("box", [pd.Series, lambda x: x])
@pytest.mark.parametrize("method", [lambda x: x.unique(), pd.unique])
def test_unique(self, data, box, method, request):
pa_dtype = data.dtype.pyarrow_dtype
if pa.types.is_duration(pa_dtype):
request.node.add_marker(
pytest.mark.xfail(
raises=pa.ArrowNotImplementedError,
reason=f"unique has no pyarrow kernel for {pa_dtype}.",
)
)
super().test_unique(data, box, method)

def test_factorize(self, data_for_grouping, request):
pa_dtype = data_for_grouping.dtype.pyarrow_dtype
if pa.types.is_duration(pa_dtype):
request.node.add_marker(
pytest.mark.xfail(
raises=pa.ArrowNotImplementedError,
reason=f"dictionary_encode has no pyarrow kernel for {pa_dtype}",
)
)
elif pa.types.is_boolean(pa_dtype):
if pa.types.is_boolean(pa_dtype):
request.node.add_marker(
pytest.mark.xfail(
reason=f"{pa_dtype} only has 2 unique possible values",
)
)
super().test_factorize(data_for_grouping)

def test_factorize_equivalence(self, data_for_grouping, request):
pa_dtype = data_for_grouping.dtype.pyarrow_dtype
if pa.types.is_duration(pa_dtype):
request.node.add_marker(
pytest.mark.xfail(
raises=pa.ArrowNotImplementedError,
reason=f"dictionary_encode has no pyarrow kernel for {pa_dtype}",
)
)
super().test_factorize_equivalence(data_for_grouping)

def test_factorize_empty(self, data, request):
pa_dtype = data.dtype.pyarrow_dtype
if pa.types.is_duration(pa_dtype):
request.node.add_marker(
pytest.mark.xfail(
raises=pa.ArrowNotImplementedError,
reason=f"dictionary_encode has no pyarrow kernel for {pa_dtype}",
)
)
super().test_factorize_empty(data)

@pytest.mark.xfail(
reason="result dtype pyarrow[bool] better than expected dtype object"
)
Expand Down