westonpace commented on code in PR #12590:
URL: https://github.com/apache/arrow/pull/12590#discussion_r861431218
##########
cpp/src/arrow/compute/function.h:
##########
@@ -392,7 +392,7 @@ class ARROW_EXPORT MetaFunction : public Function {
const FunctionOptions* options,
ExecContext* ctx) const = 0;
- MetaFunction(std::string name, const Arity& arity, const FunctionDoc* doc,
+ MetaFunction(std::string name, const Arity& arity, FunctionDoc doc,
const FunctionOptions* default_options = NULLPTR)
: Function(std::move(name), Function::META, arity, doc, default_options)
{}
Review Comment:
```suggestion
: Function(std::move(name), Function::META, arity, std::move(doc),
default_options) {}
```
##########
cpp/src/arrow/compute/kernels/scalar_arithmetic.cc:
##########
@@ -1949,7 +1949,7 @@ std::shared_ptr<ScalarFunction>
MakeArithmeticFunction(std::string name,
// only on non-null output.
template <typename Op, typename FunctionImpl = ArithmeticFunction>
std::shared_ptr<ScalarFunction> MakeArithmeticFunctionNotNull(std::string name,
- const
FunctionDoc* doc) {
+ FunctionDoc doc)
{
auto func = std::make_shared<FunctionImpl>(name, Arity::Binary(), doc);
Review Comment:
```suggestion
auto func = std::make_shared<FunctionImpl>(name, Arity::Binary(),
std::move(doc));
```
##########
cpp/src/arrow/compute/kernels/scalar_arithmetic.cc:
##########
@@ -2075,7 +2075,7 @@ Status ExecRound(KernelContext* ctx, const ExecBatch&
batch, Datum* out) {
// kernel dispatch based on RoundMode, only on non-null output.
template <template <typename, RoundMode, typename...> class Op, typename
OptionsType>
std::shared_ptr<ScalarFunction> MakeUnaryRoundFunction(std::string name,
- const FunctionDoc* doc)
{
+ FunctionDoc doc) {
using State = RoundOptionsWrapper<OptionsType>;
static const OptionsType kDefaultOptions = OptionsType::Defaults();
auto func = std::make_shared<ArithmeticIntegerToFloatingPointFunction>(
Review Comment:
Below this line there is another spot for std::move(doc) but Github won't
let me suggest it.
##########
cpp/src/arrow/compute/kernels/scalar_arithmetic.cc:
##########
@@ -2160,7 +2160,7 @@ std::shared_ptr<ScalarFunction>
MakeUnaryArithmeticFunctionFloatingPoint(
template <typename Op>
std::shared_ptr<ScalarFunction>
MakeUnaryArithmeticFunctionFloatingPointNotNull(
- std::string name, const FunctionDoc* doc) {
+ std::string name, FunctionDoc doc) {
auto func =
std::make_shared<ArithmeticFloatingPointFunction>(name, Arity::Unary(),
doc);
Review Comment:
```suggestion
std::make_shared<ArithmeticFloatingPointFunction>(name,
Arity::Unary(), std::move(doc));
```
##########
cpp/src/arrow/compute/kernels/scalar_arithmetic.cc:
##########
@@ -2186,7 +2186,7 @@ std::shared_ptr<ScalarFunction>
MakeArithmeticFunctionFloatingPoint(
template <typename Op>
std::shared_ptr<ScalarFunction> MakeArithmeticFunctionFloatingPointNotNull(
- std::string name, const FunctionDoc* doc) {
+ std::string name, FunctionDoc doc) {
auto func =
std::make_shared<ArithmeticFloatingPointFunction>(name, Arity::Binary(),
doc);
Review Comment:
```suggestion
std::make_shared<ArithmeticFloatingPointFunction>(name,
Arity::Binary(), std::move(doc));
```
##########
cpp/src/arrow/compute/kernels/scalar_string_utf8.cc:
##########
@@ -39,7 +39,7 @@ namespace {
template <template <typename> class Transformer>
void MakeUnaryStringUTF8TransformKernel(std::string name, FunctionRegistry*
registry,
- const FunctionDoc* doc) {
+ FunctionDoc doc) {
auto func = std::make_shared<ScalarFunction>(name, Arity::Unary(), doc);
Review Comment:
```suggestion
auto func = std::make_shared<ScalarFunction>(name, Arity::Unary(),
std::move(doc));
```
##########
cpp/src/arrow/compute/kernels/vector_selection.cc:
##########
@@ -2329,7 +2329,7 @@ struct SelectionKernelDescr {
ArrayKernelExec exec;
};
-void RegisterSelectionFunction(const std::string& name, const FunctionDoc* doc,
+void RegisterSelectionFunction(const std::string& name, FunctionDoc doc,
VectorKernel base_kernel, InputType
selection_type,
const std::vector<SelectionKernelDescr>& descrs,
const FunctionOptions* default_options,
Review Comment:
std::move(doc) below this
##########
cpp/src/arrow/python/udf.cc:
##########
@@ -0,0 +1,131 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+#include "arrow/python/udf.h"
+#include "arrow/compute/function.h"
+#include "arrow/python/common.h"
+
+namespace arrow {
+
+namespace py {
+
+namespace {
+Status CheckOutputType(const DataType& expected, const DataType& actual) {
+ if (!expected.Equals(actual)) {
+ return Status::TypeError("Expected output type, ", expected.ToString(),
+ ", but function returned type ",
actual.ToString());
+ }
+ return Status::OK();
+}
+
+struct PythonUdf {
+ ScalarUdfWrapperCallback cb;
+ std::shared_ptr<OwnedRefNoGIL> function;
+ compute::OutputType output_type;
+
+ // function needs to be destroyed at process exit
+ // and Python may no longer be initialized.
+ ~PythonUdf() {
+ if (_Py_IsFinalizing()) {
+ function->detach();
+ }
+ }
+
+ Status operator()(compute::KernelContext* ctx, const compute::ExecBatch&
batch,
+ Datum* out) {
+ return SafeCallIntoPython([=]() -> Status { return Execute(ctx, batch,
out); });
Review Comment:
```suggestion
return SafeCallIntoPython([&]() -> Status { return Execute(ctx, batch,
out); });
```
Minor nit but this will force a copy of a `batch` if you have `=` and we can
avoid it with `&`.
[Example](https://godbolt.org/#z:OYLghAFBqd5QCxAYwPYBMCmBRdBLAF1QCcAaPECAMzwBtMA7AQwFtMQByARg9KtQYEAysib0QXACx8BBAKoBnTAAUAHpwAMvAFYTStJg1DIApACYAQuYukl9ZATwDKjdAGFUtAK4sGIAOykrgAyeAyYAHI%2BAEaYxCAAzABspAAOqAqETgwe3r4BaRlZAqHhUSyx8cm2mPaOAkIETMQEuT5%2BgXaYDtmNzQSlkTFxiSkKTS1t%2BZ0TA2FDFSPJAJS2qF7EyOwc5glhyN5YANQmCW5sLCQAnqfYJhoAgrv7h5gnZ07jxJist/dPjwOTAUCiOABViExkABrTDobCCQhXE7%2BKyPI4Yo6pLzRWh4ZAgf7/THgyEwuEIxwEK4QZYotEPEkk8boEAodYEd5uU5uE5mMwQqGw%2BGI6nmMxcnlHFls1y0U4MkkmfwAEWJmMF5JFVJpaAY41JQopopuZiSR1QBAQcTpysVTIxMvZXk5PKl4o8qVNErdZ2lBFZIDlCvVGOVavRGrJwspSIgmpjJvMSWTFqtNvpoaZTrQLslfvFAFlUAA3TDi/O8p3BhL2sOqrMJ4061OoVJxJhEYinFUQPUGpvapGt9PEW2orPMgNs3Ous7u/memnAzLABjLCu%2BqvToMMdDy2uTzHfAgbBhHABUVrwChDkfrEcZUaNQ7FZot7chXZ78ejzeHZojtaY6Zve2Y7rOlZ8mYxZlhAK54GuG78lB1Z7gedZMieZ6Xtet6HmB4ZEo8REEQCDxhJyLBMGEtKgU%2BGKDrG1JHIwLZkVOgaQVu0FPChPFofud4MUcTAuqgRwGBU6BMFw7wqicACsVgJGqim9uOmEkthxDnm
xw4cZipGYTmHJQeKVj8fOfqCRhWZiUQkmsNEMk%2BqpSmWSm6l0XaR4YjpelJoZD7CZxM5mTxHqbtZ26BjWmEBUcGjCeGHCrLQnCKbwfgcFopCoJw3KWNY0rrJsby7DwpAEJoaWrNCiQJAAdAkrVte17UpBlHCSNltX5ZwvAKCAGjVbVqxwLASBoCwqR0HE5CUDNc30PEwBcFwCR8HQBBxMNEDRP10RhM0Vz9TNbCCAA8gwtBnblvBYNRRjiA9pD4N8PRlsNb2YKo3QutseWUbU/V4tEkLEFcHhYP1BDEHgLCcDwqxUAYwAKAAangmAAO5Xe2OVVfwggiGI7BSDIgiKCo6hvboXD6IYxjWNY%2Bh4NEw2QKsbb1PqnAALRXQkRwCyyPamMVlgbUNtTdHzLh7lMfiMyE8zlJUejpJkfPK1rRR84MGsjIzXQ9A0sx66bcvmwwfQtEbwzxKblueO0ejjP0juLM7qwKGVWx6PDmDbCj%2BiZX1b0FRwqgABxJALSSSEcwDIMgRwbS1RwQEVlk2EcuCECQfIJIzRweLN83ECXXDLLwNUPcs9WNS1HVt61XWcL1pA5Xl0dDSNY2N6Qk2ICAHLYgQi0QHCRcI6yjMk8IojiJTS802o/UM6QuOQqkyNpeHHBZT3/XR1dLqTxaVBHHHCdJynacZ1wWc56glerTXddD1oTekA1rVW7tw6kfbuvdeD91sIPBuv90qcDMJHPug0f51VIGWYgmRnCSCAA%3D%3D%3D)
The rules for lambda captures are a bit odd :)
##########
python/pyarrow/_compute.pyx:
##########
@@ -2275,3 +2279,207 @@ cdef CExpression _bind(Expression filter, Schema
schema) except *:
return GetResultValue(filter.unwrap().Bind(
deref(pyarrow_unwrap_schema(schema).get())))
+
+
+cdef class ScalarUdfContext:
+ """
+ Per-invocation function context/state.
+
+ This object will always be the first argument to a user-defined
+ function. It should not be used outside of a call to the function.
+ """
+
+ def __init__(self):
+ raise TypeError("Do not call {}'s constructor directly"
+ .format(self.__class__.__name__))
+
+ cdef void init(self, const CScalarUdfContext &c_context):
+ self.c_context = c_context
+
+ @property
+ def batch_length(self):
+ """
+ The common length of all input arguments (int).
+
+ In the case that all arguments are scalars, this value
+ is used to pass the "actual length" of the arguments,
+ e.g. because the scalar values are encoding a column
+ with a constant value.
+ """
+ return self.c_context.batch_length
+
+ @property
+ def memory_pool(self):
+ """
+ A memory pool for allocations (:class:`MemoryPool`).
+ """
+ return box_memory_pool(self.c_context.pool)
+
+
+cdef inline CFunctionDoc _make_function_doc(dict func_doc) except *:
+ """
+ Helper function to generate the FunctionDoc
+ This function accepts a dictionary and expect the
+ summary(str), description(str) and arg_names(List[str]) keys.
+ """
+ cdef:
+ CFunctionDoc f_doc
+ vector[c_string] c_arg_names
+
+ f_doc.summary = tobytes(func_doc["summary"])
+ f_doc.description = tobytes(func_doc["description"])
+ for arg_name in func_doc["arg_names"]:
+ c_arg_names.push_back(tobytes(arg_name))
+ f_doc.arg_names = c_arg_names
+ # UDFOptions integration:
+ # TODO: https://issues.apache.org/jira/browse/ARROW-16041
+ f_doc.options_class = b""
+ f_doc.options_required = False
+ return f_doc
+
+
+cdef object box_scalar_udf_context(const CScalarUdfContext& c_context):
+ cdef ScalarUdfContext context = ScalarUdfContext.__new__(ScalarUdfContext)
+ context.init(c_context)
+ return context
+
+
+cdef _scalar_udf_callback(user_function, const CScalarUdfContext& c_context,
inputs):
+ """
+ Helper callback function used to wrap the ScalarUdfContext from Python to
C++
+ execution.
+ """
+ context = box_scalar_udf_context(c_context)
+ return user_function(context, *inputs)
+
+
+def _get_scalar_udf_context(memory_pool, batch_length):
+ cdef CScalarUdfContext c_context
+ c_context.pool = maybe_unbox_memory_pool(memory_pool)
+ c_context.batch_length = batch_length
+ context = box_scalar_udf_context(c_context)
+ return context
+
+
+def register_scalar_function(func, function_name, function_doc, in_types,
+ out_type):
+ """
+ Register a user-defined scalar function.
+
+ A scalar function is a function that executes elementwise
+ operations on arrays or scalars, i.e. a scalar function must
+ be computed row-by-row with no state where each output row
+ is computed by only from its corresponding input row.
+ In other words, all argument arrays have the same length,
+ and the output array is of the same length as the arguments.
+ Scalar functions are the only functions allowed in query engine
+ expressions.
+
+ Parameters
+ ----------
+ func : callable
+ A callable implementing the user-defined function.
+ The first argument is the context argument of type
+ ScalarUdfContext.
+ Then, it must take arguments equal to the number of
+ in_types defined. It must return an Array or Scalar
+ matching the out_type. It must return a Scalar if
+ all arguments are scalar, else it must return an Array.
+
+ To define a varargs function, pass a callable that takes
+ varargs. The last in_type will be the type of all varargs
+ arguments.
+ function_name : str
+ Name of the function. This name must be globally unique.
+ function_doc : dict
+ A dictionary object with keys "summary" (str),
+ and "description" (str).
+ in_types : Dict[str, DataType]
+ A dictionary mapping function argument names to
+ their respective DataType.
+ The argument names will be used to generate
+ documentation for the function. The number of
+ arguments specified here determines the function
+ arity.
+ out_type : DataType
+ Output type of the function.
+
+ Examples
+ --------
+
+ >>> import pyarrow.compute as pc
+ >>>
+ >>> func_doc = {}
+ >>> func_doc["summary"] = "simple udf"
+ >>> func_doc["description"] = "add a constant to a scalar"
+ >>>
+ >>> def add_constant(ctx, array):
+ ... return pc.add(array, 1)
Review Comment:
```suggestion
... return pc.add(array, 1, memory_pool=ctx.memory_pool)
```
##########
cpp/src/arrow/compute/kernels/scalar_arithmetic.cc:
##########
@@ -1961,7 +1961,7 @@ std::shared_ptr<ScalarFunction>
MakeArithmeticFunctionNotNull(std::string name,
template <typename Op>
std::shared_ptr<ScalarFunction> MakeUnaryArithmeticFunction(std::string name,
- const FunctionDoc*
doc) {
+ FunctionDoc doc) {
auto func = std::make_shared<ArithmeticFunction>(name, Arity::Unary(), doc);
Review Comment:
```suggestion
auto func = std::make_shared<ArithmeticFunction>(name, Arity::Unary(),
std::move(doc));
```
##########
python/pyarrow/tests/test_udf.py:
##########
@@ -0,0 +1,498 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+
+import pytest
+
+import pyarrow as pa
+from pyarrow import compute as pc
+
+# UDFs are all tested with a dataset scan
+pytestmark = pytest.mark.dataset
+
+
+try:
+ import pyarrow.dataset as ds
+except ImportError:
+ ds = None
+
+
+def mock_udf_context(batch_length=10):
+ from pyarrow._compute import _get_scalar_udf_context
+ return _get_scalar_udf_context(pa.default_memory_pool(), batch_length)
+
+
[email protected](scope="session")
+def unary_func_fixture():
+ def unary_function(ctx, scalar1):
+ return pc.call_function("add", [scalar1, 1])
+ func_name = "y=x+k"
+ unary_doc = {"summary": "add function",
+ "description": "test add function"}
+ pc.register_scalar_function(unary_function,
+ func_name,
+ unary_doc,
+ {"array": pa.int64()},
+ pa.int64())
+ return unary_function, func_name
+
+
[email protected](scope="session")
+def binary_func_fixture():
+ def binary_function(ctx, m, x):
+ return pc.call_function("multiply", [m, x])
+ func_name = "y=mx"
+ binary_doc = {"summary": "y=mx",
+ "description": "find y from y = mx"}
+ pc.register_scalar_function(binary_function,
+ func_name,
+ binary_doc,
+ {"m": pa.int64(),
+ "x": pa.int64(),
+ },
+ pa.int64())
+ return binary_function, func_name
+
+
[email protected](scope="session")
+def ternary_func_fixture():
+ def ternary_function(ctx, m, x, c):
+ mx = pc.call_function("multiply", [m, x])
+ return pc.call_function("add", [mx, c])
+ ternary_doc = {"summary": "y=mx+c",
+ "description": "find y from y = mx + c"}
+ func_name = "y=mx+c"
+ pc.register_scalar_function(ternary_function,
+ func_name,
+ ternary_doc,
+ {
+ "array1": pa.int64(),
+ "array2": pa.int64(),
+ "array3": pa.int64(),
+ },
+ pa.int64())
+ return ternary_function, func_name
+
+
[email protected](scope="session")
+def varargs_func_fixture():
+ def varargs_function(ctx, *values):
+ base_val = values[:2]
+ res = pc.call_function("add", base_val)
+ for other_val in values[2:]:
+ res = pc.call_function("add", [res, other_val])
+ return res
+ func_name = "z=ax+by+c"
+ varargs_doc = {"summary": "z=ax+by+c",
+ "description": "find z from z = ax + by + c"
+ }
+ pc.register_scalar_function(varargs_function,
+ func_name,
+ varargs_doc,
+ {
+ "array1": pa.int64(),
+ "array2": pa.int64(),
+ "array3": pa.int64(),
+ "array4": pa.int64(),
+ "array5": pa.int64(),
+ },
+ pa.int64())
+ return varargs_function, func_name
+
+
[email protected](scope="session")
+def random_with_udf_ctx_func_fixture():
+ def random_with_udf_ctx(context, one, two):
+ return pc.add(one, two, memory_pool=context.memory_pool)
+
+ in_types = {"one": pa.int64(),
+ "two": pa.int64(),
+ }
+ func_doc = {
+ "summary": "test udf context",
+ "description": "udf context test"
+ }
+ func_name = "test_udf_context"
+ pc.register_scalar_function(random_with_udf_ctx,
+ func_name, func_doc,
+ in_types,
+ pa.int64())
+ return random_with_udf_ctx, func_name
+
+
[email protected](scope="session")
+def output_check_func_fixture():
+ # The objective of this fixture is to evaluate,
+ # how the UDF interface respond to unexpected
+ # output types. The types chosen at the test
+ # end are either of different Arrow data type
+ # or non-Arrow type.
+ def output_check(ctx, array):
+ ar = pc.call_function("add", [array, 1])
+ ar = ar.cast(pa.int32())
+ return ar
+ func_name = "test_output_value"
+ in_types = {"array": pa.int64()}
+ out_type = pa.int64()
+ doc = {
+ "summary": "add function scalar",
+ "description": "add function"
+ }
+ pc.register_scalar_function(output_check, func_name, doc,
+ in_types, out_type)
+ return output_check, func_name
+
+
[email protected](scope="session")
+def nullary_check_func_fixture():
+ # this needs to return array values
+ def nullary_check(context):
+ return pa.array([42] * context.batch_length, type=pa.int64(),
+ memory_pool=context.memory_pool)
+
+ func_doc = {
+ "summary": "random function",
+ "description": "generates a random value"
+ }
+ func_name = "test_random_func"
+ pc.register_scalar_function(nullary_check,
+ func_name,
+ func_doc,
+ {},
+ pa.int64())
+
+ return nullary_check, func_name
+
+
[email protected](scope="session")
+def output_python_type_func_fixture():
+ # This fixture helps to check the response
+ # when the function return value is not an Arrow
+ # defined data type. Instead here the returned value
+ # is of type int in Python.
+ def const_return(ctx, scalar):
+ return 42
+
+ func_name = "test_output_type"
+ in_types = {"array": pa.int64()}
+ out_type = pa.int64()
+ doc = {
+ "summary": "add function scalar",
+ "description": "add function"
+ }
+ pc.register_scalar_function(const_return, func_name, doc,
+ in_types, out_type)
+ return const_return, func_name
+
+
[email protected](scope="session")
+def varargs_check_func_fixture():
+ def varargs_check(ctx, *values):
+ base_val = values[:2]
+ res = pc.call_function("add", base_val)
+ for other_val in values[2:]:
+ res = pc.call_function("add", [res, other_val])
+ return res
+ func_name = "test_varargs_function"
+ in_types = {"array1": pa.int64(),
+ "array2": pa.int64(),
+ }
+ doc = {"summary": "n add function",
+ "description": "add N number of arrays"
+ }
+ pc.register_scalar_function(varargs_check, func_name, doc,
+ in_types, pa.int64())
+
+ return varargs_check, func_name
+
+
[email protected](scope="session")
+def raise_func_fixture():
+ def raise_func(ctx):
+ raise ValueError("Test function with raise")
+ func_name = "test_raise"
+ doc = {
+ "summary": "test function with raise",
+ "description": "function with a raise"
+ }
+ pc.register_scalar_function(raise_func, func_name, doc,
+ {}, pa.int64())
+ return raise_func, func_name
+
+
+def check_scalar_function(func_fixture,
+ inputs,
+ run_in_dataset=True,
+ batch_length=None):
+ function, name = func_fixture
+ if batch_length is None:
+ for input in inputs:
+ try:
+ batch_length = len(inputs)
+ except TypeError:
+ pass
+ expected_output = function(mock_udf_context(batch_length), *inputs)
+ func = pc.get_function(name)
+ assert func.name == name
+
+ result = pc.call_function(name, inputs)
+ assert result == expected_output
+ if run_in_dataset:
+ field_names = [f'field{index}' for index, in_arr in inputs]
+ table = pa.Table.from_arrays(inputs, field_names)
+ dataset = ds.dataset(table)
+ func_args = [ds.field(field_name) for field_name in field_names]
+ result_table = dataset.to_table(
+ columns={'result': ds.field('')._call(name, func_args)})
+ assert result_table.column(0).chunks[0] == expected_output
+
+
+def test_scalar_udf_array_unary(unary_func_fixture):
+ check_scalar_function(unary_func_fixture,
+ [
+ pa.array([10, 20], pa.int64())
+ ]
+ )
+
+
+def test_scalar_udf_array_binary(binary_func_fixture):
+ check_scalar_function(binary_func_fixture,
+ [
+ pa.array([10, 20], pa.int64()),
+ pa.array([2, 4], pa.int64())
+ ]
+ )
+
+
+def test_scalar_udf_array_ternary(ternary_func_fixture):
+ check_scalar_function(ternary_func_fixture,
+ [
+ pa.array([10, 20], pa.int64()),
+ pa.array([2, 4], pa.int64()),
+ pa.array([5, 10], pa.int64())
+ ]
+ )
+
+
+def test_scalar_udf_array_varargs(varargs_func_fixture):
+ check_scalar_function(varargs_func_fixture,
+ [
+ pa.array([2, 3], pa.int64()),
+ pa.array([10, 20], pa.int64()),
+ pa.array([3, 7], pa.int64()),
+ pa.array([20, 30], pa.int64()),
+ pa.array([5, 10], pa.int64())
+ ]
+ )
+
+
+def test_registration_errors():
+ # validate function name
+ doc = {
+ "summary": "test udf input",
+ "description": "parameters are validated"
+ }
+ in_types = {"scalar": pa.int64()}
+ out_type = pa.int64()
+
+ def test_reg_function(context):
+ return pa.array([10])
+
+ with pytest.raises(TypeError):
+ pc.register_scalar_function(test_reg_function,
+ None, doc, in_types,
+ out_type)
+
+ # validate function
+ with pytest.raises(TypeError, match="func must be a callable"):
+ pc.register_scalar_function(None, "test_none_function", doc, in_types,
+ out_type)
+
+ # validate output type
+ expected_expr = "DataType expected, got <class 'NoneType'>"
+ with pytest.raises(TypeError, match=expected_expr):
+ pc.register_scalar_function(test_reg_function,
+ "test_output_function", doc, in_types,
+ None)
+
+ # validate input type
+ expected_expr = "in_types must be a dictionary of DataType"
+ with pytest.raises(TypeError, match=expected_expr):
+ pc.register_scalar_function(test_reg_function,
+ "test_input_function", doc, None,
+ out_type)
+
+ # register an already registered function
+ # first registration
+ pc.register_scalar_function(test_reg_function,
+ "test_reg_function", doc, {},
+ out_type)
+ # second registration
+ expected_expr = "Already have a function registered with name:" \
+ + " test_reg_function"
+ with pytest.raises(pa.lib.ArrowKeyError, match=expected_expr):
+ pc.register_scalar_function(test_reg_function,
+ "test_reg_function", doc, {},
+ out_type)
+
+
+def test_varargs_function_validation(varargs_check_func_fixture):
+ _, func_name = varargs_check_func_fixture
+ func = pc.get_function(func_name)
+
+ assert func.name == func_name
+
+ error_msg = "VarArgs function 'test_varargs_function'" \
+ + " needs at least 2 arguments"
+
+ with pytest.raises(pa.lib.ArrowInvalid, match=error_msg):
+ pc.call_function(func_name, [pa.array([1, 10]),
+ ])
+
+
+def test_function_doc_validation():
+ # validate arity
+ in_types = {"scalar": pa.int64()}
+ out_type = pa.int64()
+
+ # doc with no summary
+ func_doc = {
+ "description": "desc"
+ }
+
+ def add_const(ctx, scalar):
+ return pc.call_function("add", [scalar, 1])
+
+ expected_expr = "Function doc must contain a summary"
+
+ with pytest.raises(ValueError, match=expected_expr):
+ pc.register_scalar_function(add_const, "test_no_summary",
+ func_doc, in_types,
+ out_type)
+
+ # doc with no decription
+ func_doc = {
+ "summary": "test summary"
+ }
+
+ expected_expr = "Function doc must contain a description"
+
+ with pytest.raises(ValueError, match=expected_expr):
+ pc.register_scalar_function(add_const, "test_no_desc",
+ func_doc, in_types,
+ out_type)
+
+ # doc with empty dictionary
+ func_doc = {}
+ expected_expr = "Function doc must contain a summary"
+ with pytest.raises(ValueError, match=expected_expr):
+ pc.register_scalar_function(add_const,
+ "test_empty_dictionary",
+ func_doc, in_types,
+ out_type)
+
+
+def test_nullary_functions(nullary_check_func_fixture):
+ check_scalar_function(nullary_check_func_fixture, [], False, 1)
Review Comment:
Let's either remove this test (and the corresponding fixture) or explain
that the reason we pass `run_in_dataset=False` is because of ARROW-16290 and
ARROW-16286
##########
cpp/src/arrow/compute/kernels/scalar_arithmetic.cc:
##########
@@ -2124,7 +2124,7 @@ std::shared_ptr<ScalarFunction>
MakeUnarySignedArithmeticFunctionNotNull(
template <typename Op>
std::shared_ptr<ScalarFunction> MakeBitWiseFunctionNotNull(std::string name,
- const FunctionDoc*
doc) {
+ FunctionDoc doc) {
auto func = std::make_shared<ArithmeticFunction>(name, Arity::Binary(), doc);
Review Comment:
```suggestion
auto func = std::make_shared<ArithmeticFunction>(name, Arity::Binary(),
std::move(doc));
```
##########
cpp/src/arrow/compute/kernels/scalar_arithmetic.cc:
##########
@@ -1975,7 +1975,7 @@ std::shared_ptr<ScalarFunction>
MakeUnaryArithmeticFunction(std::string name,
// output type for integral inputs.
template <typename Op, typename IntOutType>
std::shared_ptr<ScalarFunction> MakeUnaryArithmeticFunctionWithFixedIntOutType(
- std::string name, const FunctionDoc* doc) {
+ std::string name, FunctionDoc doc) {
auto int_out_ty = TypeTraits<IntOutType>::type_singleton();
auto func = std::make_shared<ArithmeticFunction>(name, Arity::Unary(), doc);
Review Comment:
```suggestion
auto func = std::make_shared<ArithmeticFunction>(name, Arity::Unary(),
std::move(doc));
```
##########
cpp/src/arrow/compute/function.h:
##########
@@ -365,7 +365,7 @@ class ARROW_EXPORT HashAggregateFunction
public:
using KernelType = HashAggregateKernel;
- HashAggregateFunction(std::string name, const Arity& arity, const
FunctionDoc* doc,
+ HashAggregateFunction(std::string name, const Arity& arity, FunctionDoc doc,
const FunctionOptions* default_options = NULLPTR)
: detail::FunctionImpl<HashAggregateKernel>(
std::move(name), Function::HASH_AGGREGATE, arity, doc,
default_options) {}
Review Comment:
```suggestion
std::move(name), Function::HASH_AGGREGATE, arity,
std::move(doc), default_options) {}
```
##########
cpp/src/arrow/compute/function.h:
##########
@@ -350,7 +350,7 @@ class ARROW_EXPORT ScalarAggregateFunction
public:
using KernelType = ScalarAggregateKernel;
- ScalarAggregateFunction(std::string name, const Arity& arity, const
FunctionDoc* doc,
+ ScalarAggregateFunction(std::string name, const Arity& arity, FunctionDoc
doc,
const FunctionOptions* default_options = NULLPTR)
: detail::FunctionImpl<ScalarAggregateKernel>(
std::move(name), Function::SCALAR_AGGREGATE, arity, doc,
default_options) {}
Review Comment:
```suggestion
std::move(name), Function::SCALAR_AGGREGATE, arity,
std::move(doc), default_options) {}
```
##########
cpp/src/arrow/compute/kernels/scalar_arithmetic.cc:
##########
@@ -1996,8 +1996,8 @@ std::shared_ptr<ScalarFunction>
MakeUnaryArithmeticFunctionWithFixedIntOutType(
// Like MakeUnaryArithmeticFunction, but for arithmetic ops that need to run
// only on non-null output.
template <typename Op>
-std::shared_ptr<ScalarFunction> MakeUnaryArithmeticFunctionNotNull(
- std::string name, const FunctionDoc* doc) {
+std::shared_ptr<ScalarFunction> MakeUnaryArithmeticFunctionNotNull(std::string
name,
+ FunctionDoc
doc) {
auto func = std::make_shared<ArithmeticFunction>(name, Arity::Unary(), doc);
Review Comment:
```suggestion
auto func = std::make_shared<ArithmeticFunction>(name, Arity::Unary(),
std::move(doc));
```
##########
cpp/src/arrow/compute/kernels/scalar_arithmetic.cc:
##########
@@ -2172,8 +2172,8 @@ std::shared_ptr<ScalarFunction>
MakeUnaryArithmeticFunctionFloatingPointNotNull(
}
template <typename Op>
-std::shared_ptr<ScalarFunction> MakeArithmeticFunctionFloatingPoint(
- std::string name, const FunctionDoc* doc) {
+std::shared_ptr<ScalarFunction>
MakeArithmeticFunctionFloatingPoint(std::string name,
+
FunctionDoc doc) {
auto func =
std::make_shared<ArithmeticFloatingPointFunction>(name, Arity::Binary(),
doc);
Review Comment:
```suggestion
std::make_shared<ArithmeticFloatingPointFunction>(name,
Arity::Binary(), std::move(doc));
```
##########
cpp/src/arrow/compute/kernels/scalar_arithmetic.cc:
##########
@@ -2110,7 +2110,7 @@ std::shared_ptr<ScalarFunction>
MakeUnaryRoundFunction(std::string name,
// only on non-null output.
template <typename Op>
std::shared_ptr<ScalarFunction> MakeUnarySignedArithmeticFunctionNotNull(
- std::string name, const FunctionDoc* doc) {
+ std::string name, FunctionDoc doc) {
auto func = std::make_shared<ArithmeticFunction>(name, Arity::Unary(), doc);
Review Comment:
```suggestion
auto func = std::make_shared<ArithmeticFunction>(name, Arity::Unary(),
std::move(doc));
```
##########
cpp/src/arrow/compute/kernels/scalar_arithmetic.cc:
##########
@@ -2136,7 +2136,7 @@ std::shared_ptr<ScalarFunction>
MakeBitWiseFunctionNotNull(std::string name,
template <typename Op>
std::shared_ptr<ScalarFunction> MakeShiftFunctionNotNull(std::string name,
- const FunctionDoc*
doc) {
+ FunctionDoc doc) {
auto func = std::make_shared<ArithmeticFunction>(name, Arity::Binary(), doc);
Review Comment:
```suggestion
auto func = std::make_shared<ArithmeticFunction>(name, Arity::Binary(),
std::move(doc));
```
##########
cpp/src/arrow/compute/function.h:
##########
@@ -329,7 +329,7 @@ class ARROW_EXPORT VectorFunction : public
detail::FunctionImpl<VectorKernel> {
public:
using KernelType = VectorKernel;
- VectorFunction(std::string name, const Arity& arity, const FunctionDoc* doc,
+ VectorFunction(std::string name, const Arity& arity, FunctionDoc doc,
const FunctionOptions* default_options = NULLPTR)
: detail::FunctionImpl<VectorKernel>(std::move(name), Function::VECTOR,
arity, doc,
Review Comment:
```suggestion
: detail::FunctionImpl<VectorKernel>(std::move(name),
Function::VECTOR, arity, std::move(doc),
```
##########
cpp/src/arrow/compute/kernels/scalar_string_internal.h:
##########
@@ -185,7 +185,7 @@ void MakeUnaryStringBatchKernel(
template <template <typename> class ExecFunctor>
void MakeUnaryStringBatchKernelWithState(
- std::string name, FunctionRegistry* registry, const FunctionDoc* doc,
+ std::string name, FunctionRegistry* registry, FunctionDoc doc,
MemAllocation::type mem_allocation = MemAllocation::PREALLOCATE) {
auto func = std::make_shared<ScalarFunction>(name, Arity::Unary(), doc);
Review Comment:
```suggestion
auto func = std::make_shared<ScalarFunction>(name, Arity::Unary(),
std::move(doc));
```
##########
cpp/src/arrow/compute/kernels/scalar_arithmetic.cc:
##########
@@ -2148,7 +2148,7 @@ std::shared_ptr<ScalarFunction>
MakeShiftFunctionNotNull(std::string name,
template <typename Op, typename FunctionImpl = ArithmeticFloatingPointFunction>
std::shared_ptr<ScalarFunction> MakeUnaryArithmeticFunctionFloatingPoint(
- std::string name, const FunctionDoc* doc) {
+ std::string name, FunctionDoc doc) {
auto func = std::make_shared<FunctionImpl>(name, Arity::Unary(), doc);
Review Comment:
```suggestion
auto func = std::make_shared<FunctionImpl>(name, Arity::Unary(),
std::move(doc));
```
##########
cpp/src/arrow/compute/kernels/scalar_string_internal.h:
##########
@@ -271,7 +271,7 @@ struct StringPredicateFunctor {
template <typename Predicate>
void AddUnaryStringPredicate(std::string name, FunctionRegistry* registry,
- const FunctionDoc* doc) {
+ FunctionDoc doc) {
auto func = std::make_shared<ScalarFunction>(name, Arity::Unary(), doc);
Review Comment:
```suggestion
auto func = std::make_shared<ScalarFunction>(name, Arity::Unary(),
std::move(doc));
```
##########
cpp/src/arrow/compute/kernels/scalar_string_internal.h:
##########
@@ -171,7 +171,7 @@ struct StringTransformExecWithState
template <template <typename> class ExecFunctor>
void MakeUnaryStringBatchKernel(
- std::string name, FunctionRegistry* registry, const FunctionDoc* doc,
+ std::string name, FunctionRegistry* registry, FunctionDoc doc,
MemAllocation::type mem_allocation = MemAllocation::PREALLOCATE) {
auto func = std::make_shared<ScalarFunction>(name, Arity::Unary(), doc);
Review Comment:
```suggestion
auto func = std::make_shared<ScalarFunction>(name, Arity::Unary(),
std::move(doc));
```
##########
cpp/src/arrow/compute/kernels/scalar_temporal_unary.cc:
##########
@@ -1474,7 +1474,7 @@ struct UnaryTemporalFactory {
template <typename... WithTypes>
static std::shared_ptr<ScalarFunction> Make(
- std::string name, OutputType out_type, const FunctionDoc* doc,
+ std::string name, OutputType out_type, FunctionDoc doc,
const FunctionOptions* default_options = NULLPTR, KernelInit init =
NULLPTR) {
DCHECK_NE(sizeof...(WithTypes), 0);
UnaryTemporalFactory self{
Review Comment:
std::move(doc) below this
##########
cpp/src/arrow/compute/kernels/scalar_validity.cc:
##########
@@ -185,9 +185,8 @@ struct IsNanOperator {
}
};
-void MakeFunction(std::string name, const FunctionDoc* doc,
- std::vector<InputType> in_types, OutputType out_type,
- ArrayKernelExec exec, FunctionRegistry* registry,
+void MakeFunction(std::string name, FunctionDoc doc, std::vector<InputType>
in_types,
+ OutputType out_type, ArrayKernelExec exec, FunctionRegistry*
registry,
MemAllocation::type mem_allocation, NullHandling::type
null_handling,
bool can_write_into_slices,
const FunctionOptions* default_options = NULLPTR,
Review Comment:
std::move(doc) below this
##########
cpp/src/arrow/compute/kernels/scalar_validity.cc:
##########
@@ -223,7 +222,7 @@ Status ConstBoolExec(KernelContext* ctx, const ExecBatch&
batch, Datum* out) {
}
std::shared_ptr<ScalarFunction> MakeIsFiniteFunction(std::string name,
- const FunctionDoc* doc) {
+ const FunctionDoc doc) {
auto func = std::make_shared<ScalarFunction>(name, Arity::Unary(), doc);
Review Comment:
```suggestion
auto func = std::make_shared<ScalarFunction>(name, Arity::Unary(),
std::move(doc));
```
##########
cpp/src/arrow/compute/kernels/scalar_temporal_binary.cc:
##########
@@ -326,7 +326,7 @@ struct BinaryTemporalFactory {
template <typename... WithTypes>
static std::shared_ptr<ScalarFunction> Make(
- std::string name, OutputType out_type, const FunctionDoc* doc,
+ std::string name, OutputType out_type, FunctionDoc doc,
const FunctionOptions* default_options = NULLPTR, KernelInit init =
NULLPTR) {
DCHECK_NE(sizeof...(WithTypes), 0);
BinaryTemporalFactory self{
Review Comment:
Another place for std::move(doc) below this
##########
cpp/src/arrow/compute/kernels/scalar_temporal_unary.cc:
##########
@@ -1499,7 +1499,7 @@ struct SimpleUnaryTemporalFactory {
template <typename... WithTypes>
static std::shared_ptr<ScalarFunction> Make(
- std::string name, OutputType out_type, const FunctionDoc* doc,
+ std::string name, OutputType out_type, FunctionDoc doc,
const FunctionOptions* default_options = NULLPTR, KernelInit init =
NULLPTR) {
DCHECK_NE(sizeof...(WithTypes), 0);
SimpleUnaryTemporalFactory self{
Review Comment:
std::move(doc) below this
##########
cpp/src/arrow/compute/kernels/scalar_validity.cc:
##########
@@ -241,8 +240,7 @@ std::shared_ptr<ScalarFunction>
MakeIsFiniteFunction(std::string name,
return func;
}
-std::shared_ptr<ScalarFunction> MakeIsInfFunction(std::string name,
- const FunctionDoc* doc) {
+std::shared_ptr<ScalarFunction> MakeIsInfFunction(std::string name,
FunctionDoc doc) {
auto func = std::make_shared<ScalarFunction>(name, Arity::Unary(), doc);
Review Comment:
```suggestion
auto func = std::make_shared<ScalarFunction>(name, Arity::Unary(),
std::move(doc));
```
##########
cpp/src/arrow/python/udf.cc:
##########
@@ -0,0 +1,131 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+#include "arrow/python/udf.h"
+#include "arrow/compute/function.h"
+#include "arrow/python/common.h"
+
+namespace arrow {
+
+namespace py {
+
+namespace {
+Status CheckOutputType(const DataType& expected, const DataType& actual) {
+ if (!expected.Equals(actual)) {
+ return Status::TypeError("Expected output type, ", expected.ToString(),
+ ", but function returned type ",
actual.ToString());
+ }
+ return Status::OK();
+}
+
+struct PythonUdf {
+ ScalarUdfWrapperCallback cb;
+ std::shared_ptr<OwnedRefNoGIL> function;
+ compute::OutputType output_type;
+
+ // function needs to be destroyed at process exit
+ // and Python may no longer be initialized.
+ ~PythonUdf() {
+ if (_Py_IsFinalizing()) {
+ function->detach();
+ }
+ }
+
+ Status operator()(compute::KernelContext* ctx, const compute::ExecBatch&
batch,
+ Datum* out) {
+ return SafeCallIntoPython([=]() -> Status { return Execute(ctx, batch,
out); });
+ }
+
+ Status Execute(compute::KernelContext* ctx, const compute::ExecBatch& batch,
+ Datum* out) {
+ const auto num_args = batch.values.size();
+ ScalarUdfContext udf_context{ctx->memory_pool(),
static_cast<int64_t>(batch.length)};
+ PyObject* arg_tuple = PyTuple_New(num_args);
+ for (size_t arg_id = 0; arg_id < num_args; arg_id++) {
+ switch (batch[arg_id].kind()) {
+ case Datum::SCALAR: {
+ auto c_data = batch[arg_id].scalar();
+ PyObject* data = wrap_scalar(c_data);
+ PyTuple_SetItem(arg_tuple, arg_id, data);
+ break;
+ }
+ case Datum::ARRAY: {
+ auto c_data = batch[arg_id].make_array();
+ PyObject* data = wrap_array(c_data);
+ PyTuple_SetItem(arg_tuple, arg_id, data);
+ break;
+ }
+ default:
+ auto datum = batch[arg_id];
+ return Status::NotImplemented(
+ "User-defined-functions are not supported for the datum kind ",
+ ToString(batch[arg_id].kind()));
+ }
+ }
+ PyObject* result;
+ result = cb(function->obj(), udf_context, arg_tuple);
+ RETURN_NOT_OK(CheckPyError());
+ // unwrapping the output for expected output type
+ if (is_scalar(result)) {
+ ARROW_ASSIGN_OR_RAISE(auto val, unwrap_scalar(result));
+ RETURN_NOT_OK(CheckOutputType(*output_type.type(), *val->type));
+ *out = Datum(val);
+ return Status::OK();
+ } else if (is_array(result)) {
+ ARROW_ASSIGN_OR_RAISE(auto val, unwrap_array(result));
+ RETURN_NOT_OK(CheckOutputType(*output_type.type(), *val->type()));
+ *out = Datum(val);
+ return Status::OK();
+ } else {
+ return Status::TypeError("Unexpected output type: ",
Py_TYPE(result)->tp_name,
+ " (expected Scalar or Array)");
+ }
+ return Status::OK();
+ }
+};
+
+} // namespace
+
+Status RegisterScalarFunction(PyObject* user_function,
ScalarUdfWrapperCallback wrapper,
+ const ScalarUdfOptions& options) {
+ if (!PyCallable_Check(user_function)) {
+ return Status::TypeError("Expected a callable Python object.");
+ }
+ auto scalar_func = std::make_shared<compute::ScalarFunction>(
+ options.func_name, options.arity, options.func_doc);
+ Py_INCREF(user_function);
+ std::vector<compute::InputType> input_types;
+ for (auto in_dtype : options.input_types) {
+ compute::InputType in_type(in_dtype);
+ input_types.push_back(in_type);
+ }
Review Comment:
```suggestion
for (const auto& in_dtype : options.input_types) {
compute::InputType in_type(in_dtype);
input_types.push_back(std::move(in_type));
}
```
Or you can do...
```
for (const auto& in_dtype : options.input_types) {
input_types.emplace_back(in_dtype);
}
```
##########
cpp/src/arrow/compute/kernels/vector_selection.cc:
##########
@@ -2428,7 +2428,7 @@ Status IndicesNonZeroExec(KernelContext* ctx, const
ExecBatch& batch, Datum* out
}
std::shared_ptr<VectorFunction> MakeIndicesNonZeroFunction(std::string name,
- const FunctionDoc*
doc) {
+ FunctionDoc doc) {
auto func = std::make_shared<VectorFunction>(name, Arity::Unary(), doc);
Review Comment:
```suggestion
auto func = std::make_shared<VectorFunction>(name, Arity::Unary(),
std::move(doc));
```
##########
cpp/src/arrow/python/udf.cc:
##########
@@ -0,0 +1,131 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+#include "arrow/python/udf.h"
+#include "arrow/compute/function.h"
+#include "arrow/python/common.h"
+
+namespace arrow {
+
+namespace py {
+
+namespace {
+Status CheckOutputType(const DataType& expected, const DataType& actual) {
+ if (!expected.Equals(actual)) {
+ return Status::TypeError("Expected output type, ", expected.ToString(),
+ ", but function returned type ",
actual.ToString());
+ }
+ return Status::OK();
+}
+
+struct PythonUdf {
+ ScalarUdfWrapperCallback cb;
+ std::shared_ptr<OwnedRefNoGIL> function;
+ compute::OutputType output_type;
+
+ // function needs to be destroyed at process exit
+ // and Python may no longer be initialized.
+ ~PythonUdf() {
+ if (_Py_IsFinalizing()) {
+ function->detach();
+ }
+ }
+
+ Status operator()(compute::KernelContext* ctx, const compute::ExecBatch&
batch,
+ Datum* out) {
+ return SafeCallIntoPython([=]() -> Status { return Execute(ctx, batch,
out); });
+ }
+
+ Status Execute(compute::KernelContext* ctx, const compute::ExecBatch& batch,
+ Datum* out) {
+ const auto num_args = batch.values.size();
+ ScalarUdfContext udf_context{ctx->memory_pool(),
static_cast<int64_t>(batch.length)};
Review Comment:
```suggestion
ScalarUdfContext udf_context{ctx->memory_pool(), batch.length};
```
##########
cpp/src/arrow/python/udf.cc:
##########
@@ -0,0 +1,131 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+#include "arrow/python/udf.h"
+#include "arrow/compute/function.h"
+#include "arrow/python/common.h"
+
+namespace arrow {
+
+namespace py {
+
+namespace {
+Status CheckOutputType(const DataType& expected, const DataType& actual) {
+ if (!expected.Equals(actual)) {
+ return Status::TypeError("Expected output type, ", expected.ToString(),
+ ", but function returned type ",
actual.ToString());
+ }
+ return Status::OK();
+}
+
+struct PythonUdf {
+ ScalarUdfWrapperCallback cb;
+ std::shared_ptr<OwnedRefNoGIL> function;
+ compute::OutputType output_type;
+
+ // function needs to be destroyed at process exit
+ // and Python may no longer be initialized.
+ ~PythonUdf() {
+ if (_Py_IsFinalizing()) {
+ function->detach();
+ }
+ }
+
+ Status operator()(compute::KernelContext* ctx, const compute::ExecBatch&
batch,
+ Datum* out) {
+ return SafeCallIntoPython([=]() -> Status { return Execute(ctx, batch,
out); });
+ }
+
+ Status Execute(compute::KernelContext* ctx, const compute::ExecBatch& batch,
+ Datum* out) {
+ const auto num_args = batch.values.size();
+ ScalarUdfContext udf_context{ctx->memory_pool(),
static_cast<int64_t>(batch.length)};
+ PyObject* arg_tuple = PyTuple_New(num_args);
+ for (size_t arg_id = 0; arg_id < num_args; arg_id++) {
+ switch (batch[arg_id].kind()) {
+ case Datum::SCALAR: {
+ auto c_data = batch[arg_id].scalar();
+ PyObject* data = wrap_scalar(c_data);
+ PyTuple_SetItem(arg_tuple, arg_id, data);
+ break;
+ }
+ case Datum::ARRAY: {
+ auto c_data = batch[arg_id].make_array();
+ PyObject* data = wrap_array(c_data);
+ PyTuple_SetItem(arg_tuple, arg_id, data);
+ break;
+ }
+ default:
+ auto datum = batch[arg_id];
+ return Status::NotImplemented(
+ "User-defined-functions are not supported for the datum kind ",
+ ToString(batch[arg_id].kind()));
+ }
+ }
+ PyObject* result;
+ result = cb(function->obj(), udf_context, arg_tuple);
+ RETURN_NOT_OK(CheckPyError());
+ // unwrapping the output for expected output type
+ if (is_scalar(result)) {
+ ARROW_ASSIGN_OR_RAISE(auto val, unwrap_scalar(result));
+ RETURN_NOT_OK(CheckOutputType(*output_type.type(), *val->type));
+ *out = Datum(val);
+ return Status::OK();
+ } else if (is_array(result)) {
+ ARROW_ASSIGN_OR_RAISE(auto val, unwrap_array(result));
+ RETURN_NOT_OK(CheckOutputType(*output_type.type(), *val->type()));
+ *out = Datum(val);
+ return Status::OK();
+ } else {
+ return Status::TypeError("Unexpected output type: ",
Py_TYPE(result)->tp_name,
+ " (expected Scalar or Array)");
+ }
+ return Status::OK();
+ }
+};
+
+} // namespace
+
+Status RegisterScalarFunction(PyObject* user_function,
ScalarUdfWrapperCallback wrapper,
+ const ScalarUdfOptions& options) {
+ if (!PyCallable_Check(user_function)) {
+ return Status::TypeError("Expected a callable Python object.");
+ }
+ auto scalar_func = std::make_shared<compute::ScalarFunction>(
+ options.func_name, options.arity, options.func_doc);
+ Py_INCREF(user_function);
+ std::vector<compute::InputType> input_types;
+ for (auto in_dtype : options.input_types) {
+ compute::InputType in_type(in_dtype);
+ input_types.push_back(in_type);
+ }
+ compute::OutputType output_type(options.output_type);
+ PythonUdf exec{wrapper, std::make_shared<OwnedRefNoGIL>(user_function),
output_type};
+ compute::ScalarKernel kernel(
+ compute::KernelSignature::Make(input_types, output_type,
options.arity.is_varargs),
Review Comment:
```suggestion
compute::KernelSignature::Make(std::move(input_types),
std::move(output_type), options.arity.is_varargs),
```
##########
python/pyarrow/_compute.pyx:
##########
@@ -2275,3 +2279,207 @@ cdef CExpression _bind(Expression filter, Schema
schema) except *:
return GetResultValue(filter.unwrap().Bind(
deref(pyarrow_unwrap_schema(schema).get())))
+
+
+cdef class ScalarUdfContext:
+ """
+ Per-invocation function context/state.
+
+ This object will always be the first argument to a user-defined
+ function. It should not be used outside of a call to the function.
+ """
+
+ def __init__(self):
+ raise TypeError("Do not call {}'s constructor directly"
+ .format(self.__class__.__name__))
+
+ cdef void init(self, const CScalarUdfContext &c_context):
+ self.c_context = c_context
+
+ @property
+ def batch_length(self):
+ """
+ The common length of all input arguments (int).
+
+ In the case that all arguments are scalars, this value
+ is used to pass the "actual length" of the arguments,
+ e.g. because the scalar values are encoding a column
+ with a constant value.
+ """
+ return self.c_context.batch_length
+
+ @property
+ def memory_pool(self):
+ """
+ A memory pool for allocations (:class:`MemoryPool`).
+ """
Review Comment:
```suggestion
This is the memory pool supplied by the user when they invoked
the function and it should be used in any calls to arrow that the
UDF makes if that call accepts a memory_pool.
"""
```
Minor nit but it might be nice to give the user some guidance on how to use
this.
##########
python/pyarrow/tests/test_udf.py:
##########
@@ -0,0 +1,498 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+
+import pytest
+
+import pyarrow as pa
+from pyarrow import compute as pc
+
+# UDFs are all tested with a dataset scan
+pytestmark = pytest.mark.dataset
+
+
+try:
+ import pyarrow.dataset as ds
+except ImportError:
+ ds = None
+
+
+def mock_udf_context(batch_length=10):
+ from pyarrow._compute import _get_scalar_udf_context
+ return _get_scalar_udf_context(pa.default_memory_pool(), batch_length)
+
+
[email protected](scope="session")
+def unary_func_fixture():
+ def unary_function(ctx, scalar1):
+ return pc.call_function("add", [scalar1, 1])
+ func_name = "y=x+k"
+ unary_doc = {"summary": "add function",
+ "description": "test add function"}
+ pc.register_scalar_function(unary_function,
+ func_name,
+ unary_doc,
+ {"array": pa.int64()},
+ pa.int64())
+ return unary_function, func_name
+
+
[email protected](scope="session")
+def binary_func_fixture():
+ def binary_function(ctx, m, x):
+ return pc.call_function("multiply", [m, x])
+ func_name = "y=mx"
+ binary_doc = {"summary": "y=mx",
+ "description": "find y from y = mx"}
+ pc.register_scalar_function(binary_function,
+ func_name,
+ binary_doc,
+ {"m": pa.int64(),
+ "x": pa.int64(),
+ },
+ pa.int64())
+ return binary_function, func_name
+
+
[email protected](scope="session")
+def ternary_func_fixture():
+ def ternary_function(ctx, m, x, c):
+ mx = pc.call_function("multiply", [m, x])
+ return pc.call_function("add", [mx, c])
+ ternary_doc = {"summary": "y=mx+c",
+ "description": "find y from y = mx + c"}
+ func_name = "y=mx+c"
+ pc.register_scalar_function(ternary_function,
+ func_name,
+ ternary_doc,
+ {
+ "array1": pa.int64(),
+ "array2": pa.int64(),
+ "array3": pa.int64(),
+ },
+ pa.int64())
+ return ternary_function, func_name
+
+
[email protected](scope="session")
+def varargs_func_fixture():
+ def varargs_function(ctx, *values):
+ base_val = values[:2]
+ res = pc.call_function("add", base_val)
+ for other_val in values[2:]:
+ res = pc.call_function("add", [res, other_val])
+ return res
+ func_name = "z=ax+by+c"
+ varargs_doc = {"summary": "z=ax+by+c",
+ "description": "find z from z = ax + by + c"
+ }
+ pc.register_scalar_function(varargs_function,
+ func_name,
+ varargs_doc,
+ {
+ "array1": pa.int64(),
+ "array2": pa.int64(),
+ "array3": pa.int64(),
+ "array4": pa.int64(),
+ "array5": pa.int64(),
Review Comment:
If it's varargs do we need to list 5 input types?
##########
python/pyarrow/_compute.pyx:
##########
@@ -2275,3 +2279,207 @@ cdef CExpression _bind(Expression filter, Schema
schema) except *:
return GetResultValue(filter.unwrap().Bind(
deref(pyarrow_unwrap_schema(schema).get())))
+
+
+cdef class ScalarUdfContext:
+ """
+ Per-invocation function context/state.
+
+ This object will always be the first argument to a user-defined
+ function. It should not be used outside of a call to the function.
+ """
+
+ def __init__(self):
+ raise TypeError("Do not call {}'s constructor directly"
+ .format(self.__class__.__name__))
+
+ cdef void init(self, const CScalarUdfContext &c_context):
+ self.c_context = c_context
+
+ @property
+ def batch_length(self):
+ """
+ The common length of all input arguments (int).
+
+ In the case that all arguments are scalars, this value
+ is used to pass the "actual length" of the arguments,
+ e.g. because the scalar values are encoding a column
+ with a constant value.
+ """
+ return self.c_context.batch_length
+
+ @property
+ def memory_pool(self):
+ """
+ A memory pool for allocations (:class:`MemoryPool`).
+ """
+ return box_memory_pool(self.c_context.pool)
+
+
+cdef inline CFunctionDoc _make_function_doc(dict func_doc) except *:
+ """
+ Helper function to generate the FunctionDoc
+ This function accepts a dictionary and expect the
+ summary(str), description(str) and arg_names(List[str]) keys.
+ """
+ cdef:
+ CFunctionDoc f_doc
+ vector[c_string] c_arg_names
+
+ f_doc.summary = tobytes(func_doc["summary"])
+ f_doc.description = tobytes(func_doc["description"])
+ for arg_name in func_doc["arg_names"]:
+ c_arg_names.push_back(tobytes(arg_name))
+ f_doc.arg_names = c_arg_names
+ # UDFOptions integration:
+ # TODO: https://issues.apache.org/jira/browse/ARROW-16041
+ f_doc.options_class = b""
+ f_doc.options_required = False
+ return f_doc
+
+
+cdef object box_scalar_udf_context(const CScalarUdfContext& c_context):
+ cdef ScalarUdfContext context = ScalarUdfContext.__new__(ScalarUdfContext)
+ context.init(c_context)
+ return context
+
+
+cdef _scalar_udf_callback(user_function, const CScalarUdfContext& c_context,
inputs):
+ """
+ Helper callback function used to wrap the ScalarUdfContext from Python to
C++
+ execution.
+ """
+ context = box_scalar_udf_context(c_context)
+ return user_function(context, *inputs)
+
+
+def _get_scalar_udf_context(memory_pool, batch_length):
+ cdef CScalarUdfContext c_context
+ c_context.pool = maybe_unbox_memory_pool(memory_pool)
+ c_context.batch_length = batch_length
+ context = box_scalar_udf_context(c_context)
+ return context
+
+
+def register_scalar_function(func, function_name, function_doc, in_types,
+ out_type):
+ """
+ Register a user-defined scalar function.
+
+ A scalar function is a function that executes elementwise
+ operations on arrays or scalars, i.e. a scalar function must
+ be computed row-by-row with no state where each output row
+ is computed by only from its corresponding input row.
Review Comment:
```suggestion
is computed only from its corresponding input row.
```
##########
python/pyarrow/_compute.pyx:
##########
@@ -2275,3 +2279,207 @@ cdef CExpression _bind(Expression filter, Schema
schema) except *:
return GetResultValue(filter.unwrap().Bind(
deref(pyarrow_unwrap_schema(schema).get())))
+
+
+cdef class ScalarUdfContext:
+ """
+ Per-invocation function context/state.
+
+ This object will always be the first argument to a user-defined
+ function. It should not be used outside of a call to the function.
+ """
+
+ def __init__(self):
+ raise TypeError("Do not call {}'s constructor directly"
+ .format(self.__class__.__name__))
+
+ cdef void init(self, const CScalarUdfContext &c_context):
+ self.c_context = c_context
+
+ @property
+ def batch_length(self):
+ """
+ The common length of all input arguments (int).
+
+ In the case that all arguments are scalars, this value
+ is used to pass the "actual length" of the arguments,
+ e.g. because the scalar values are encoding a column
+ with a constant value.
+ """
+ return self.c_context.batch_length
+
+ @property
+ def memory_pool(self):
+ """
+ A memory pool for allocations (:class:`MemoryPool`).
+ """
+ return box_memory_pool(self.c_context.pool)
+
+
+cdef inline CFunctionDoc _make_function_doc(dict func_doc) except *:
+ """
+ Helper function to generate the FunctionDoc
+ This function accepts a dictionary and expect the
Review Comment:
```suggestion
This function accepts a dictionary and expects the
```
##########
python/pyarrow/tests/test_udf.py:
##########
@@ -0,0 +1,498 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+
+import pytest
+
+import pyarrow as pa
+from pyarrow import compute as pc
+
+# UDFs are all tested with a dataset scan
+pytestmark = pytest.mark.dataset
+
+
+try:
+ import pyarrow.dataset as ds
+except ImportError:
+ ds = None
+
+
+def mock_udf_context(batch_length=10):
+ from pyarrow._compute import _get_scalar_udf_context
+ return _get_scalar_udf_context(pa.default_memory_pool(), batch_length)
+
+
[email protected](scope="session")
+def unary_func_fixture():
+ def unary_function(ctx, scalar1):
+ return pc.call_function("add", [scalar1, 1])
+ func_name = "y=x+k"
+ unary_doc = {"summary": "add function",
+ "description": "test add function"}
+ pc.register_scalar_function(unary_function,
+ func_name,
+ unary_doc,
+ {"array": pa.int64()},
+ pa.int64())
+ return unary_function, func_name
+
+
[email protected](scope="session")
+def binary_func_fixture():
+ def binary_function(ctx, m, x):
+ return pc.call_function("multiply", [m, x])
+ func_name = "y=mx"
+ binary_doc = {"summary": "y=mx",
+ "description": "find y from y = mx"}
+ pc.register_scalar_function(binary_function,
+ func_name,
+ binary_doc,
+ {"m": pa.int64(),
+ "x": pa.int64(),
+ },
+ pa.int64())
+ return binary_function, func_name
+
+
[email protected](scope="session")
+def ternary_func_fixture():
+ def ternary_function(ctx, m, x, c):
+ mx = pc.call_function("multiply", [m, x])
+ return pc.call_function("add", [mx, c])
+ ternary_doc = {"summary": "y=mx+c",
+ "description": "find y from y = mx + c"}
+ func_name = "y=mx+c"
+ pc.register_scalar_function(ternary_function,
+ func_name,
+ ternary_doc,
+ {
+ "array1": pa.int64(),
+ "array2": pa.int64(),
+ "array3": pa.int64(),
+ },
+ pa.int64())
+ return ternary_function, func_name
+
+
[email protected](scope="session")
+def varargs_func_fixture():
+ def varargs_function(ctx, *values):
+ base_val = values[:2]
+ res = pc.call_function("add", base_val)
+ for other_val in values[2:]:
+ res = pc.call_function("add", [res, other_val])
+ return res
+ func_name = "z=ax+by+c"
+ varargs_doc = {"summary": "z=ax+by+c",
+ "description": "find z from z = ax + by + c"
+ }
+ pc.register_scalar_function(varargs_function,
+ func_name,
+ varargs_doc,
+ {
+ "array1": pa.int64(),
+ "array2": pa.int64(),
+ "array3": pa.int64(),
+ "array4": pa.int64(),
+ "array5": pa.int64(),
+ },
+ pa.int64())
+ return varargs_function, func_name
+
+
[email protected](scope="session")
+def random_with_udf_ctx_func_fixture():
+ def random_with_udf_ctx(context, one, two):
+ return pc.add(one, two, memory_pool=context.memory_pool)
+
+ in_types = {"one": pa.int64(),
+ "two": pa.int64(),
+ }
+ func_doc = {
+ "summary": "test udf context",
+ "description": "udf context test"
+ }
+ func_name = "test_udf_context"
+ pc.register_scalar_function(random_with_udf_ctx,
+ func_name, func_doc,
+ in_types,
+ pa.int64())
+ return random_with_udf_ctx, func_name
+
+
[email protected](scope="session")
+def output_check_func_fixture():
+ # The objective of this fixture is to evaluate,
+ # how the UDF interface respond to unexpected
Review Comment:
```suggestion
# how the UDF interface responds to unexpected
```
##########
cpp/src/arrow/compute/kernels/scalar_validity.cc:
##########
@@ -260,8 +258,7 @@ std::shared_ptr<ScalarFunction>
MakeIsInfFunction(std::string name,
return func;
}
-std::shared_ptr<ScalarFunction> MakeIsNanFunction(std::string name,
- const FunctionDoc* doc) {
+std::shared_ptr<ScalarFunction> MakeIsNanFunction(std::string name,
FunctionDoc doc) {
auto func = std::make_shared<ScalarFunction>(name, Arity::Unary(), doc);
Review Comment:
```suggestion
auto func = std::make_shared<ScalarFunction>(name, Arity::Unary(),
std::move(doc));
```
##########
python/pyarrow/tests/test_udf.py:
##########
@@ -0,0 +1,498 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+
+import pytest
+
+import pyarrow as pa
+from pyarrow import compute as pc
+
+# UDFs are all tested with a dataset scan
+pytestmark = pytest.mark.dataset
+
+
+try:
+ import pyarrow.dataset as ds
+except ImportError:
+ ds = None
+
+
+def mock_udf_context(batch_length=10):
+ from pyarrow._compute import _get_scalar_udf_context
+ return _get_scalar_udf_context(pa.default_memory_pool(), batch_length)
+
+
[email protected](scope="session")
+def unary_func_fixture():
+ def unary_function(ctx, scalar1):
+ return pc.call_function("add", [scalar1, 1])
+ func_name = "y=x+k"
Review Comment:
```suggestion
func_name = "y=x+1"
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]