Skip to content

Commit

Permalink
Fix Series.groupby.shift with a MultiIndex (#15098)
Browse files Browse the repository at this point in the history
closes #15087
closes #11259

(The typing annotation is incorrect, but I guess there needs to be a check somewhere to make `_copy_type_metadata` stricter)

Authors:
  - Matthew Roeschke (https://github.com/mroeschke)

Approvers:
  - Bradley Dice (https://github.com/bdice)

URL: #15098
  • Loading branch information
mroeschke authored Feb 21, 2024
1 parent d053323 commit 4948aa2
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 2 deletions.
3 changes: 2 additions & 1 deletion python/cudf/cudf/core/multiindex.py
Original file line number Diff line number Diff line change
Expand Up @@ -2037,7 +2037,8 @@ def _copy_type_metadata(
self: MultiIndex, other: MultiIndex, *, override_dtypes=None
) -> MultiIndex:
res = super()._copy_type_metadata(other)
res._names = other._names
if isinstance(other, MultiIndex):
res._names = other._names
return res

@_cudf_nvtx_annotate
Expand Down
11 changes: 10 additions & 1 deletion python/cudf/cudf/tests/test_groupby.py
Original file line number Diff line number Diff line change
Expand Up @@ -3308,7 +3308,6 @@ def test_groupby_pct_change(data, gkey, periods, fill_method):
assert_eq(expected, actual)


@pytest.mark.xfail(reason="https://github.com/rapidsai/cudf/issues/11259")
@pytest.mark.parametrize("periods", [-5, 5])
def test_groupby_pct_change_multiindex_dataframe(periods):
gdf = cudf.DataFrame(
Expand Down Expand Up @@ -3812,3 +3811,13 @@ def test_groupby_internal_groups_empty(gdf):
gb = gdf.groupby("y")._groupby
_, _, grouped_vals = gb.groups([])
assert grouped_vals == []


def test_groupby_shift_series_multiindex():
idx = cudf.MultiIndex.from_tuples(
[("a", 1), ("a", 2), ("b", 1), ("b", 2)], names=["f", "s"]
)
ser = Series(range(4), index=idx)
result = ser.groupby(level=0).shift(1)
expected = ser.to_pandas().groupby(level=0).shift(1)
assert_eq(expected, result)

0 comments on commit 4948aa2

Please sign in to comment.