|
1 |
| -# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. |
| 1 | +# SPDX-FileCopyrightText: Copyright (c) 2024-2025 NVIDIA CORPORATION & AFFILIATES. |
2 | 2 | # SPDX-License-Identifier: Apache-2.0
|
3 | 3 | # TODO: remove need for this
|
4 | 4 | # ruff: noqa: D101
|
|
8 | 8 |
|
9 | 9 | from typing import TYPE_CHECKING, Any
|
10 | 10 |
|
11 |
| -from cudf_polars.dsl.expressions.base import Expr |
| 11 | +import pylibcudf as plc |
| 12 | + |
| 13 | +from cudf_polars.containers import Column |
| 14 | +from cudf_polars.dsl import expr |
| 15 | +from cudf_polars.dsl.expressions.base import ExecutionContext, Expr |
| 16 | +from cudf_polars.dsl.utils.windows import range_window_bounds |
12 | 17 |
|
13 | 18 | if TYPE_CHECKING:
|
14 |
| - import pylibcudf as plc |
| 19 | + import pyarrow as pa |
| 20 | + |
| 21 | + from cudf_polars.containers import DataFrame |
| 22 | + from cudf_polars.typing import ClosedInterval |
| 23 | + |
| 24 | +__all__ = ["GroupedRollingWindow", "RollingWindow", "to_request"] |
| 25 | + |
| 26 | + |
| 27 | +def to_request( |
| 28 | + value: expr.Expr, orderby: Column, df: DataFrame |
| 29 | +) -> plc.rolling.RollingRequest: |
| 30 | + """ |
| 31 | + Produce a rolling request for evaluation with pylibcudf. |
15 | 32 |
|
16 |
| -__all__ = ["GroupedRollingWindow", "RollingWindow"] |
| 33 | + Parameters |
| 34 | + ---------- |
| 35 | + value |
| 36 | + The expression to perform the rolling aggregation on. |
| 37 | + orderby |
| 38 | + Orderby column, used as input to the request when the aggregation is Len. |
| 39 | + df |
| 40 | + DataFrame used to evaluate the inputs to the aggregation. |
| 41 | + """ |
| 42 | + min_periods = 1 |
| 43 | + if isinstance(value, expr.Len): |
| 44 | + # A count aggregation, we need a column so use the orderby column |
| 45 | + col = orderby |
| 46 | + elif isinstance(value, expr.Agg): |
| 47 | + child = value.children[0] |
| 48 | + col = child.evaluate(df, context=ExecutionContext.ROLLING) |
| 49 | + if value.name == "var": |
| 50 | + # Polars variance produces null if nvalues <= ddof |
| 51 | + # libcudf produces NaN. However, we can get the polars |
| 52 | + # behaviour by setting the minimum window size to ddof + |
| 53 | + # 1. |
| 54 | + min_periods = value.options + 1 |
| 55 | + else: |
| 56 | + col = value.evaluate( |
| 57 | + df, context=ExecutionContext.ROLLING |
| 58 | + ) # pragma: no cover; raise before we get here because we |
| 59 | + # don't do correct handling of empty groups |
| 60 | + return plc.rolling.RollingRequest(col.obj, min_periods, value.agg_request) |
17 | 61 |
|
18 | 62 |
|
19 | 63 | class RollingWindow(Expr):
|
20 |
| - __slots__ = ("options",) |
21 |
| - _non_child = ("dtype", "options") |
| 64 | + __slots__ = ("closed_window", "following", "orderby", "preceding") |
| 65 | + _non_child = ("dtype", "preceding", "following", "closed_window", "orderby") |
22 | 66 |
|
23 |
| - def __init__(self, dtype: plc.DataType, options: Any, agg: Expr) -> None: |
| 67 | + def __init__( |
| 68 | + self, |
| 69 | + dtype: plc.DataType, |
| 70 | + preceding: pa.Scalar, |
| 71 | + following: pa.Scalar, |
| 72 | + closed_window: ClosedInterval, |
| 73 | + orderby: str, |
| 74 | + agg: Expr, |
| 75 | + ) -> None: |
24 | 76 | self.dtype = dtype
|
25 |
| - self.options = options |
| 77 | + self.preceding = preceding |
| 78 | + self.following = following |
| 79 | + self.closed_window = closed_window |
| 80 | + self.orderby = orderby |
26 | 81 | self.children = (agg,)
|
27 | 82 | self.is_pointwise = False
|
28 |
| - raise NotImplementedError("Rolling window not implemented") |
| 83 | + if agg.agg_request.kind() == plc.aggregation.Kind.COLLECT_LIST: |
| 84 | + raise NotImplementedError( |
| 85 | + "Incorrect handling of empty groups for list collection" |
| 86 | + ) |
| 87 | + if not plc.rolling.is_valid_rolling_aggregation(agg.dtype, agg.agg_request): |
| 88 | + raise NotImplementedError(f"Unsupported rolling aggregation {agg}") |
| 89 | + |
| 90 | + def do_evaluate( # noqa: D102 |
| 91 | + self, df: DataFrame, *, context: ExecutionContext = ExecutionContext.FRAME |
| 92 | + ) -> Column: |
| 93 | + if context != ExecutionContext.FRAME: |
| 94 | + raise RuntimeError( |
| 95 | + "Rolling aggregation inside groupby/over/rolling" |
| 96 | + ) # pragma: no cover; translation raises first |
| 97 | + (agg,) = self.children |
| 98 | + orderby = df.column_map[self.orderby] |
| 99 | + # Polars casts integral orderby to int64, but only for calculating window bounds |
| 100 | + if ( |
| 101 | + plc.traits.is_integral(orderby.obj.type()) |
| 102 | + and orderby.obj.type().id() != plc.TypeId.INT64 |
| 103 | + ): |
| 104 | + orderby_obj = plc.unary.cast(orderby.obj, plc.DataType(plc.TypeId.INT64)) |
| 105 | + else: |
| 106 | + orderby_obj = orderby.obj |
| 107 | + preceding, following = range_window_bounds( |
| 108 | + self.preceding, self.following, self.closed_window |
| 109 | + ) |
| 110 | + if orderby.obj.null_count() != 0: |
| 111 | + raise RuntimeError( |
| 112 | + f"Index column '{self.orderby}' in rolling may not contain nulls" |
| 113 | + ) |
| 114 | + if not orderby.check_sorted( |
| 115 | + order=plc.types.Order.ASCENDING, null_order=plc.types.NullOrder.BEFORE |
| 116 | + ): |
| 117 | + raise RuntimeError( |
| 118 | + f"Index column '{self.orderby}' in rolling is not sorted, please sort first" |
| 119 | + ) |
| 120 | + (result,) = plc.rolling.grouped_range_rolling_window( |
| 121 | + plc.Table([]), |
| 122 | + orderby_obj, |
| 123 | + plc.types.Order.ASCENDING, |
| 124 | + plc.types.NullOrder.BEFORE, |
| 125 | + preceding, |
| 126 | + following, |
| 127 | + [to_request(agg, orderby, df)], |
| 128 | + ).columns() |
| 129 | + return Column(result) |
29 | 130 |
|
30 | 131 |
|
31 | 132 | class GroupedRollingWindow(Expr):
|
|
0 commit comments