alamb commented on code in PR #6703:
URL: https://github.com/apache/arrow-datafusion/pull/6703#discussion_r1238711464
##########
datafusion/expr/src/expr_fn.rs:
##########
@@ -791,6 +793,25 @@ pub fn create_udaf(
)
}
+/// Creates a new UDWF with a specific signature, state type and return type.
+///
+/// The signature and state type must match the ]`PartitionEvaluator`]'s
implementation`.
Review Comment:
thank you -- this is a great catch
##########
datafusion-examples/examples/simple_udwf.rs:
##########
@@ -0,0 +1,213 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+use std::sync::Arc;
+
+use arrow::{
+ array::{ArrayRef, AsArray, Float64Array},
+ datatypes::Float64Type,
+};
+use arrow_schema::DataType;
+use datafusion::datasource::file_format::options::CsvReadOptions;
+
+use datafusion::error::Result;
+use datafusion::prelude::*;
+use datafusion_common::{DataFusionError, ScalarValue};
+use datafusion_expr::{
+ PartitionEvaluator, Signature, Volatility, WindowFrame, WindowUDF,
+};
+
+// create local execution context with `cars.csv` registered as a table named
`cars`
+async fn create_context() -> Result<SessionContext> {
+ // declare a new context. In spark API, this corresponds to a new spark
SQLsession
+ let ctx = SessionContext::new();
+
+ // declare a table in memory. In spark API, this corresponds to
createDataFrame(...).
+ println!("pwd: {}", std::env::current_dir().unwrap().display());
+ let csv_path = "datafusion/core/tests/data/cars.csv".to_string();
+ let read_options = CsvReadOptions::default().has_header(true);
+
+ ctx.register_csv("cars", &csv_path, read_options).await?;
+ Ok(ctx)
+}
+
+/// In this example we will declare a user defined window function that
computes a moving average and then run it using SQL
+#[tokio::main]
+async fn main() -> Result<()> {
+ let ctx = create_context().await?;
+
+ // register the window function with DataFusion so wecan call it
+ ctx.register_udwf(smooth_it());
+
+ // Use SQL to run the new window function
+ let df = ctx.sql("SELECT * from cars").await?;
+ // print the results
+ df.show().await?;
+
+ // Use SQL to run the new window function:
+ //
+ // `PARTITION BY car`:each distinct value of car (red, and green)
+ // should be treated as a seprate partition (and will result in
+ // creating a new `PartitionEvaluator`)
+ //
+ // `ORDER BY time`: within each partition ('green' or 'red') the
+ // rows will be be orderd by the value in the `time` column
+ //
+ // `evaluate_inside_range` is invoked with a window defined by the
+ // SQL. In this case:
+ //
+ // The first invocation will be passed row 0, the first row in the
+ // partition.
+ //
+ // The second invocation will be passed rows 0 and 1, the first
+ // two rows in the partition.
+ //
+ // etc.
+ let df = ctx
+ .sql(
+ "SELECT \
+ car, \
+ speed, \
+ smooth_it(speed) OVER (PARTITION BY car ORDER BY time),\
+ time \
+ from cars \
+ ORDER BY \
+ car",
+ )
+ .await?;
+ // print the results
+ df.show().await?;
+
+ // this time, call the new widow function with an explicit
+ // window so evaluate will be invoked with each window.
+ //
+ // `ROWS BETWEEN 2 PRECEDING AND 2 FOLLOWING`: each invocation
+ // sees at most 3 rows: the row before, the current row, and the 1
+ // row afterward.
+ let df = ctx.sql(
+ "SELECT \
+ car, \
+ speed, \
+ smooth_it(speed) OVER (PARTITION BY car ORDER BY time ROWS BETWEEN
1 PRECEDING AND 1 FOLLOWING),\
+ time \
+ from cars \
+ ORDER BY \
+ car",
+ ).await?;
+ // print the results
+ df.show().await?;
+
+ // Now, run the function using the DataFrame API:
+ let window_expr = smooth_it().call(
+ vec![col("speed")], // smooth_it(speed)
+ vec![col("car")], // PARTITION BY car
+ vec![col("time").sort(true, true)], // ORDER BY time ASC
+ WindowFrame::new(false),
+ );
+ let df = ctx.table("cars").await?.window(vec![window_expr])?;
+
+ // print the results
+ df.show().await?;
+
+ Ok(())
+}
+
+fn smooth_it() -> WindowUDF {
+ WindowUDF {
+ name: String::from("smooth_it"),
+ // it will take 1 arguments -- the column to smooth
+ signature: Signature::exact(vec![DataType::Int32],
Volatility::Immutable),
Review Comment:
I thought it was the default type that the CSV parser picked, but apparently
not:
```sql
❯ describe './datafusion/core/tests/data/cars.csv';
describe './datafusion/core/tests/data/cars.csv';
+-------------+-----------------------------+-------------+
| column_name | data_type | is_nullable |
+-------------+-----------------------------+-------------+
| car | Utf8 | YES |
| speed | Float64 | YES |
| time | Timestamp(Nanosecond, None) | YES |
+-------------+-----------------------------+-------------+
3 rows in set. Query took 0.031 seconds.
```
I'lll update this to take a Float instead.
##########
datafusion/core/src/dataframe.rs:
##########
@@ -218,6 +218,14 @@ impl DataFrame {
Ok(DataFrame::new(self.session_state, plan))
}
+ /// Apply one or more window functions ([`Expr::WindowFunction`]) to
extend the schema
+ pub fn window(self, window_exprs: Vec<Expr>) -> Result<DataFrame> {
Review Comment:
Good call -- I tried to make one, and it turns out to be non trivial. I will
do it in a follow on PR
##########
datafusion/execution/src/task.rs:
##########
@@ -92,13 +100,15 @@ impl TaskContext {
config.set(&k, &v)?;
}
let session_config = SessionConfig::from(config);
+ let window_functions = HashMap::new();
Review Comment:
Yes, exactly. I also tried to lead people to `SessionContext::task_ctx` with
another doc comment above.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]