Skip to content

Commit

Permalink
Merge pull request #11839 from rapidsai/branch-22.10
Browse files Browse the repository at this point in the history
[gpuCI] Forward-merge branch-22.10 to branch-22.12 [skip gpuci]
  • Loading branch information
GPUtester authored Sep 30, 2022
2 parents cb81ebc + 3f9b3fe commit 71167d7
Showing 1 changed file with 8 additions and 7 deletions.
15 changes: 8 additions & 7 deletions python/dask_cudf/dask_cudf/groupby.py
Original file line number Diff line number Diff line change
Expand Up @@ -492,7 +492,6 @@ def _shuffle_aggregate(
chunked = ddf.map_partitions(
chunk,
meta=chunk(ddf._meta, **chunk_kwargs),
enforce_metadata=False,
token=chunk_name,
**chunk_kwargs,
)
Expand All @@ -514,7 +513,6 @@ def _shuffle_aggregate(
.map_partitions(
aggregate,
meta=aggregate(chunked._meta, **aggregate_kwargs),
enforce_metadata=False,
**aggregate_kwargs,
)
)
Expand All @@ -528,7 +526,6 @@ def _shuffle_aggregate(
).map_partitions(
aggregate,
meta=aggregate(chunked._meta, **aggregate_kwargs),
enforce_metadata=False,
**aggregate_kwargs,
)

Expand Down Expand Up @@ -809,8 +806,10 @@ def _groupby_partition_agg(df, gb_cols, aggs, columns, dropna, sort, sep):
gb = df.groupby(gb_cols, dropna=dropna, as_index=False, sort=sort).agg(
_agg_dict
)
gb.columns = [_make_name(name, sep=sep) for name in gb.columns]
return gb
output_columns = [_make_name(name, sep=sep) for name in gb.columns]
gb.columns = output_columns
# Return with deterministic column ordering
return gb[sorted(output_columns)]


@_dask_cudf_nvtx_annotate
Expand Down Expand Up @@ -841,11 +840,13 @@ def _tree_node_agg(df, gb_cols, dropna, sort, sep):
)

# Don't include the last aggregation in the column names
gb.columns = [
output_columns = [
_make_name(name[:-1] if isinstance(name, tuple) else name, sep=sep)
for name in gb.columns
]
return gb
gb.columns = output_columns
# Return with deterministic column ordering
return gb[sorted(output_columns)]


@_dask_cudf_nvtx_annotate
Expand Down

0 comments on commit 71167d7

Please sign in to comment.