pitrou commented on code in PR #12590:
URL: https://github.com/apache/arrow/pull/12590#discussion_r858752085


##########
cpp/src/arrow/python/udf.cc:
##########
@@ -0,0 +1,131 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+#include "arrow/python/udf.h"
+#include "arrow/compute/function.h"
+#include "arrow/python/common.h"
+
+namespace arrow {
+
+namespace py {
+
+Status CheckOutputType(const DataType& expected, const DataType& actual) {
+  if (!expected.Equals(actual)) {
+    return Status::TypeError("Expected output type, ", expected.name(),
+                             ", but function returned type ", actual.name());
+  }
+  return Status::OK();
+}
+
+struct PythonUdf {
+  ScalarUdfWrapperCallback cb;
+  std::shared_ptr<OwnedRefNoGIL> function;
+  compute::OutputType output_type;
+
+  // function needs to be destroyed at process exit
+  // and Python may no longer be initialized.
+  ~PythonUdf() {
+    if (_Py_IsFinalizing()) {
+      function->detach();
+    }
+  }
+
+  Status operator()(compute::KernelContext* ctx, const compute::ExecBatch& 
batch,
+                    Datum* out) {
+    return SafeCallIntoPython([=]() -> Status { return Execute(ctx, batch, 
out); });
+  }
+
+  Status Execute(compute::KernelContext* ctx, const compute::ExecBatch& batch,
+                 Datum* out) {
+    const auto num_args = batch.values.size();
+    ScalarUdfContext udf_context{ctx->memory_pool(), 
static_cast<int64_t>(batch.length)};
+    PyObject* arg_tuple = PyTuple_New(num_args);
+    for (size_t arg_id = 0; arg_id < num_args; arg_id++) {
+      switch (batch[arg_id].kind()) {
+        case Datum::SCALAR: {
+          auto c_data = batch[arg_id].scalar();
+          PyObject* data = wrap_scalar(c_data);
+          PyTuple_SetItem(arg_tuple, arg_id, data);
+          break;
+        }
+        case Datum::ARRAY: {
+          auto c_data = batch[arg_id].make_array();
+          PyObject* data = wrap_array(c_data);
+          PyTuple_SetItem(arg_tuple, arg_id, data);
+          break;
+        }
+        default:
+          auto datum = batch[arg_id];
+          return Status::NotImplemented(
+              "User-defined-functions are not supported for the datum kind ",
+              ToString(batch[arg_id].kind()));
+      }
+    }
+    PyObject* result;
+    result = cb(function->obj(), udf_context, arg_tuple);
+    RETURN_NOT_OK(CheckPyError());
+    // unwrapping the output for expected output type
+    if (is_scalar(result)) {
+      ARROW_ASSIGN_OR_RAISE(auto val, unwrap_scalar(result));
+      RETURN_NOT_OK(CheckOutputType(*output_type.type(), *val->type));
+      *out = Datum(val);
+      return Status::OK();
+    } else if (is_array(result)) {
+      ARROW_ASSIGN_OR_RAISE(auto val, unwrap_array(result));
+      RETURN_NOT_OK(CheckOutputType(*output_type.type(), *val->type()));
+      *out = Datum(val);
+      return Status::OK();
+    } else {
+      return Status::TypeError("Unexpected output type: ", 
Py_TYPE(result)->tp_name,
+                               " (expected Scalar or Array)");
+    }
+    return Status::OK();
+  }
+};
+
+Status RegisterScalarFunction(PyObject* user_function, 
ScalarUdfWrapperCallback wrapper,
+                              const ScalarUdfOptions& options) {
+  if (user_function == nullptr) {
+    return Status::Invalid("Python function cannot be null");

Review Comment:
   I don't think this will happen, so probably not worth checking for.



##########
cpp/src/arrow/python/udf.cc:
##########
@@ -0,0 +1,131 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+#include "arrow/python/udf.h"
+#include "arrow/compute/function.h"
+#include "arrow/python/common.h"
+
+namespace arrow {
+
+namespace py {
+
+Status CheckOutputType(const DataType& expected, const DataType& actual) {

Review Comment:
   Please put internal functions and classes in the anonymous namespace. In 
this module, only `RegisterScalarFunction` needs to be exposed.



##########
cpp/src/arrow/python/udf.cc:
##########
@@ -0,0 +1,131 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+#include "arrow/python/udf.h"
+#include "arrow/compute/function.h"
+#include "arrow/python/common.h"
+
+namespace arrow {
+
+namespace py {
+
+Status CheckOutputType(const DataType& expected, const DataType& actual) {
+  if (!expected.Equals(actual)) {
+    return Status::TypeError("Expected output type, ", expected.name(),
+                             ", but function returned type ", actual.name());

Review Comment:
   `DataType::name()` only returns the type's base name, which is less helpful 
on parametric types.
   ```suggestion
       return Status::TypeError("Expected output type ", expected.ToString(),
                                ", but function returned type ", 
actual.ToString());
   ```



##########
python/pyarrow/_compute.pyx:
##########
@@ -2275,3 +2279,205 @@ cdef CExpression _bind(Expression filter, Schema 
schema) except *:
 
     return GetResultValue(filter.unwrap().Bind(
         deref(pyarrow_unwrap_schema(schema).get())))
+
+
+cdef class ScalarUdfContext:
+    """
+    Per-invocation function context/state.
+
+    This object will always be the first argument to a user-defined
+    function. It should not be used outside of a call to the function.
+    """
+
+    def __init__(self):
+        raise TypeError("Do not call {}'s constructor directly"
+                        .format(self.__class__.__name__))
+
+    cdef void init(self, const CScalarUdfContext &c_context):
+        self.c_context = c_context
+
+    @property
+    def batch_length(self):
+        """
+        The common length of all input arguments (int).
+
+        In the case that all arguments are scalars, this value
+        is used to pass the "actual length" of the arguments,
+        e.g. because the scalar values are encoding a column
+        with a constant value.
+        """
+        return self.c_context.batch_length
+
+    @property
+    def memory_pool(self):
+        """
+        A memory pool for allocations (:class:`MemoryPool`).
+        """
+        return box_memory_pool(self.c_context.pool)
+
+
+cdef inline CFunctionDoc _make_function_doc(dict func_doc) except *:
+    """
+    Helper function to generate the FunctionDoc
+    This function accepts a dictionary and expect the 
+    summary(str), description(str) and arg_names(List[str]) keys. 
+    """
+    cdef:
+        CFunctionDoc f_doc
+        vector[c_string] c_arg_names
+
+    f_doc.summary = tobytes(func_doc["summary"])
+    f_doc.description = tobytes(func_doc["description"])
+    for arg_name in func_doc["arg_names"]:
+        c_arg_names.push_back(tobytes(arg_name))
+    f_doc.arg_names = c_arg_names
+    # UDFOptions integration:
+    # TODO: https://issues.apache.org/jira/browse/ARROW-16041
+    f_doc.options_class = b""
+    f_doc.options_required = False
+    return f_doc
+
+
+cdef object box_scalar_udf_context(const CScalarUdfContext& c_context):
+    cdef ScalarUdfContext context = ScalarUdfContext.__new__(ScalarUdfContext)
+    context.init(c_context)
+    return context
+
+
+cdef _scalar_udf_callback(user_function, const CScalarUdfContext& c_context, 
inputs):
+    """
+    Helper callback function used to wrap the ScalarUdfContext from Python to 
C++
+    execution.
+    """
+    context = box_scalar_udf_context(c_context)
+    return user_function(context, *inputs)
+
+
+def _get_scalar_udf_context(memory_pool, batch_length):
+    cdef CScalarUdfContext c_context
+    c_context.pool = maybe_unbox_memory_pool(memory_pool)
+    c_context.batch_length = batch_length
+    context = box_scalar_udf_context(c_context)
+    return context
+
+
+def register_scalar_function(func, func_name, function_doc, in_types,
+                             out_type):
+    """
+    Register a user-defined scalar function. 
+
+    A scalar function is a function that executes elementwise
+    operations on arrays or scalars, and therefore whose results
+    generally do not depend on the order of the values in the
+    arguments. Accepts and returns arrays that are all of the
+    same size. These functions roughly correspond to the functions
+    used in SQL expressions.
+
+    Parameters
+    ----------
+    func : callable
+        A callable implementing the user-defined function.
+        It must take arguments equal to the number of
+        in_types defined. It must return an Array or Scalar
+        matching the out_type. It must return a Scalar if
+        all arguments are scalar, else it must return an Array.
+
+        To define a varargs function, pass a callable that takes
+        varargs. The last in_type will be the type of the all
+        varargs arguments.
+    func_name : str
+        Name of the function. This name must be globally unique. 
+    function_doc : dict
+        A dictionary object with keys "summary" (str),
+        and "description" (str).

Review Comment:
   Can we be consistent wrt. naming? Either "func_name" and "func_doc", or 
"function_name" and "function_doc".



##########
python/pyarrow/_compute.pyx:
##########
@@ -2275,3 +2279,205 @@ cdef CExpression _bind(Expression filter, Schema 
schema) except *:
 
     return GetResultValue(filter.unwrap().Bind(
         deref(pyarrow_unwrap_schema(schema).get())))
+
+
+cdef class ScalarUdfContext:
+    """
+    Per-invocation function context/state.
+
+    This object will always be the first argument to a user-defined
+    function. It should not be used outside of a call to the function.
+    """
+
+    def __init__(self):
+        raise TypeError("Do not call {}'s constructor directly"
+                        .format(self.__class__.__name__))
+
+    cdef void init(self, const CScalarUdfContext &c_context):
+        self.c_context = c_context
+
+    @property
+    def batch_length(self):
+        """
+        The common length of all input arguments (int).
+
+        In the case that all arguments are scalars, this value
+        is used to pass the "actual length" of the arguments,
+        e.g. because the scalar values are encoding a column
+        with a constant value.
+        """
+        return self.c_context.batch_length
+
+    @property
+    def memory_pool(self):
+        """
+        A memory pool for allocations (:class:`MemoryPool`).
+        """
+        return box_memory_pool(self.c_context.pool)
+
+
+cdef inline CFunctionDoc _make_function_doc(dict func_doc) except *:
+    """
+    Helper function to generate the FunctionDoc
+    This function accepts a dictionary and expect the 
+    summary(str), description(str) and arg_names(List[str]) keys. 
+    """
+    cdef:
+        CFunctionDoc f_doc
+        vector[c_string] c_arg_names
+
+    f_doc.summary = tobytes(func_doc["summary"])
+    f_doc.description = tobytes(func_doc["description"])
+    for arg_name in func_doc["arg_names"]:
+        c_arg_names.push_back(tobytes(arg_name))
+    f_doc.arg_names = c_arg_names
+    # UDFOptions integration:
+    # TODO: https://issues.apache.org/jira/browse/ARROW-16041
+    f_doc.options_class = b""
+    f_doc.options_required = False
+    return f_doc
+
+
+cdef object box_scalar_udf_context(const CScalarUdfContext& c_context):
+    cdef ScalarUdfContext context = ScalarUdfContext.__new__(ScalarUdfContext)
+    context.init(c_context)
+    return context
+
+
+cdef _scalar_udf_callback(user_function, const CScalarUdfContext& c_context, 
inputs):
+    """
+    Helper callback function used to wrap the ScalarUdfContext from Python to 
C++
+    execution.
+    """
+    context = box_scalar_udf_context(c_context)
+    return user_function(context, *inputs)
+
+
+def _get_scalar_udf_context(memory_pool, batch_length):
+    cdef CScalarUdfContext c_context
+    c_context.pool = maybe_unbox_memory_pool(memory_pool)
+    c_context.batch_length = batch_length
+    context = box_scalar_udf_context(c_context)
+    return context
+
+
+def register_scalar_function(func, func_name, function_doc, in_types,
+                             out_type):
+    """
+    Register a user-defined scalar function. 
+
+    A scalar function is a function that executes elementwise
+    operations on arrays or scalars, and therefore whose results
+    generally do not depend on the order of the values in the
+    arguments. Accepts and returns arrays that are all of the
+    same size. These functions roughly correspond to the functions
+    used in SQL expressions.
+
+    Parameters
+    ----------
+    func : callable
+        A callable implementing the user-defined function.
+        It must take arguments equal to the number of
+        in_types defined. It must return an Array or Scalar
+        matching the out_type. It must return a Scalar if
+        all arguments are scalar, else it must return an Array.
+
+        To define a varargs function, pass a callable that takes
+        varargs. The last in_type will be the type of the all
+        varargs arguments.

Review Comment:
   ```suggestion
           varargs. The last in_type will be the type of all
           varargs arguments.
   ```



##########
python/pyarrow/_compute.pyx:
##########
@@ -2275,3 +2279,205 @@ cdef CExpression _bind(Expression filter, Schema 
schema) except *:
 
     return GetResultValue(filter.unwrap().Bind(
         deref(pyarrow_unwrap_schema(schema).get())))
+
+
+cdef class ScalarUdfContext:
+    """
+    Per-invocation function context/state.
+
+    This object will always be the first argument to a user-defined
+    function. It should not be used outside of a call to the function.
+    """
+
+    def __init__(self):
+        raise TypeError("Do not call {}'s constructor directly"
+                        .format(self.__class__.__name__))
+
+    cdef void init(self, const CScalarUdfContext &c_context):
+        self.c_context = c_context
+
+    @property
+    def batch_length(self):
+        """
+        The common length of all input arguments (int).
+
+        In the case that all arguments are scalars, this value
+        is used to pass the "actual length" of the arguments,
+        e.g. because the scalar values are encoding a column
+        with a constant value.
+        """
+        return self.c_context.batch_length
+
+    @property
+    def memory_pool(self):
+        """
+        A memory pool for allocations (:class:`MemoryPool`).
+        """
+        return box_memory_pool(self.c_context.pool)
+
+
+cdef inline CFunctionDoc _make_function_doc(dict func_doc) except *:
+    """
+    Helper function to generate the FunctionDoc
+    This function accepts a dictionary and expect the 
+    summary(str), description(str) and arg_names(List[str]) keys. 
+    """
+    cdef:
+        CFunctionDoc f_doc
+        vector[c_string] c_arg_names
+
+    f_doc.summary = tobytes(func_doc["summary"])
+    f_doc.description = tobytes(func_doc["description"])
+    for arg_name in func_doc["arg_names"]:
+        c_arg_names.push_back(tobytes(arg_name))
+    f_doc.arg_names = c_arg_names
+    # UDFOptions integration:
+    # TODO: https://issues.apache.org/jira/browse/ARROW-16041
+    f_doc.options_class = b""
+    f_doc.options_required = False
+    return f_doc
+
+
+cdef object box_scalar_udf_context(const CScalarUdfContext& c_context):
+    cdef ScalarUdfContext context = ScalarUdfContext.__new__(ScalarUdfContext)
+    context.init(c_context)
+    return context
+
+
+cdef _scalar_udf_callback(user_function, const CScalarUdfContext& c_context, 
inputs):
+    """
+    Helper callback function used to wrap the ScalarUdfContext from Python to 
C++
+    execution.
+    """
+    context = box_scalar_udf_context(c_context)
+    return user_function(context, *inputs)
+
+
+def _get_scalar_udf_context(memory_pool, batch_length):
+    cdef CScalarUdfContext c_context
+    c_context.pool = maybe_unbox_memory_pool(memory_pool)
+    c_context.batch_length = batch_length
+    context = box_scalar_udf_context(c_context)
+    return context
+
+
+def register_scalar_function(func, func_name, function_doc, in_types,
+                             out_type):
+    """
+    Register a user-defined scalar function. 
+
+    A scalar function is a function that executes elementwise
+    operations on arrays or scalars, and therefore whose results
+    generally do not depend on the order of the values in the
+    arguments. Accepts and returns arrays that are all of the
+    same size. These functions roughly correspond to the functions
+    used in SQL expressions.
+
+    Parameters
+    ----------
+    func : callable
+        A callable implementing the user-defined function.
+        It must take arguments equal to the number of
+        in_types defined. It must return an Array or Scalar
+        matching the out_type. It must return a Scalar if
+        all arguments are scalar, else it must return an Array.
+
+        To define a varargs function, pass a callable that takes
+        varargs. The last in_type will be the type of the all
+        varargs arguments.
+    func_name : str
+        Name of the function. This name must be globally unique. 
+    function_doc : dict
+        A dictionary object with keys "summary" (str),
+        and "description" (str).
+    in_types : Dict[str, DataType]
+        A dictionarym mapping function argument names to
+        their respective DataType.
+        The argument names will be used to generate
+        documentation for the function. The number of
+        arguments specified here determines the function
+        arity.
+    out_type : DataType
+        Output type of the function.
+
+    Examples
+    --------
+
+    >>> import pyarrow.compute as pc
+    >>> 
+    >>> func_doc = {}
+    >>> func_doc["summary"] = "simple udf"
+    >>> func_doc["description"] = "add a constant to a scalar"
+    >>> 
+    >>> def add_constant(ctx, array):
+    ...     return pc.add(array, 1)
+    >>> 
+    >>> func_name = "py_add_func"
+    >>> in_types = {"array": pa.int64()}
+    >>> out_type = pa.int64()
+    >>> pc.register_scalar_function(add_constant, func_name, func_doc,
+    ...                   in_types, out_type)
+    >>> 
+    >>> func = pc.get_function(func_name)
+    >>> func.name
+    'py_add_func'
+    >>> answer = pc.call_function(func_name, [pa.array([20])])
+    >>> answer
+    <pyarrow.lib.Int64Array object at 0x10c22e700>
+    [
+    21
+    ]
+    """
+    cdef:
+        c_string c_func_name
+        CArity c_arity
+        CFunctionDoc c_func_doc
+        vector[shared_ptr[CDataType]] c_in_types
+        PyObject* c_function
+        shared_ptr[CDataType] c_out_type
+        CScalarUdfOptions c_options
+
+    c_func_name = tobytes(func_name)
+
+    if callable(func):
+        c_function = <PyObject*>func
+    else:
+        raise TypeError("func must be a callable")
+
+    func_spec = inspect.getfullargspec(func)
+    num_args = -1
+    if isinstance(in_types, dict):
+        for in_type in in_types.values():
+            if isinstance(in_type, DataType):
+                c_in_types.push_back(pyarrow_unwrap_data_type(in_type))
+            else:
+                raise TypeError("in_types must be of type DataType")
+        function_doc["arg_names"] = in_types.keys()
+        num_args = len(in_types)
+    else:
+        raise TypeError(
+            "in_types must be a dictionary of DataType")
+
+    c_arity = CArity(num_args, func_spec.varargs)
+
+    if "summary" not in function_doc.keys():
+        raise ValueError("Function doc must contain a summary")
+
+    if "description" not in function_doc.keys():
+        raise ValueError("Function doc must contain a description")
+
+    if "arg_names" not in function_doc.keys():
+        raise ValueError("Function doc must contain arg_names")

Review Comment:
   Nit: no need to call `.keys`
   ```suggestion
       if "summary" not in function_doc:
           raise ValueError("Function doc must contain a summary")
   
       if "description" not in function_doc:
           raise ValueError("Function doc must contain a description")
   
       if "arg_names" not in function_doc:
           raise ValueError("Function doc must contain arg_names")
   ```



##########
python/pyarrow/includes/libarrow.pxd:
##########
@@ -2334,6 +2338,7 @@ cdef extern from "arrow/compute/api.h" namespace 
"arrow::compute" nogil:
 
         DatumType kind() const
         c_string ToString() const
+        c_string ToString(DatumType kind)

Review Comment:
   Hmm, this is not a `Datum` method, you should move this out of the 
`cppclass` declaration.



##########
python/pyarrow/tests/test_udf.py:
##########
@@ -0,0 +1,483 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+
+import pytest
+
+import pyarrow as pa
+from pyarrow import compute as pc
+import random
+
+# UDFs are all tested with a dataset scan
+pytestmark = pytest.mark.dataset
+
+
+try:
+    import pyarrow.dataset as ds
+except ImportError:
+    ds = None
+
+
[email protected](scope="session")
+def mock_udf_context():
+    # The batch_length is 1 for nullary functions.
+    # batch_length=1 since the udf expected output
+    # is generated by calling the udf function external
+    # to the call_function framework.
+    from pyarrow._compute import _get_scalar_udf_context
+    return _get_scalar_udf_context(pa.default_memory_pool(), 1)

Review Comment:
   A length of `1` is wrong for most inputs, isn't it?



##########
python/pyarrow/_compute.pyx:
##########
@@ -2275,3 +2279,205 @@ cdef CExpression _bind(Expression filter, Schema 
schema) except *:
 
     return GetResultValue(filter.unwrap().Bind(
         deref(pyarrow_unwrap_schema(schema).get())))
+
+
+cdef class ScalarUdfContext:
+    """
+    Per-invocation function context/state.
+
+    This object will always be the first argument to a user-defined
+    function. It should not be used outside of a call to the function.
+    """
+
+    def __init__(self):
+        raise TypeError("Do not call {}'s constructor directly"
+                        .format(self.__class__.__name__))
+
+    cdef void init(self, const CScalarUdfContext &c_context):
+        self.c_context = c_context
+
+    @property
+    def batch_length(self):
+        """
+        The common length of all input arguments (int).
+
+        In the case that all arguments are scalars, this value
+        is used to pass the "actual length" of the arguments,
+        e.g. because the scalar values are encoding a column
+        with a constant value.
+        """
+        return self.c_context.batch_length
+
+    @property
+    def memory_pool(self):
+        """
+        A memory pool for allocations (:class:`MemoryPool`).
+        """
+        return box_memory_pool(self.c_context.pool)
+
+
+cdef inline CFunctionDoc _make_function_doc(dict func_doc) except *:
+    """
+    Helper function to generate the FunctionDoc
+    This function accepts a dictionary and expect the 
+    summary(str), description(str) and arg_names(List[str]) keys. 
+    """
+    cdef:
+        CFunctionDoc f_doc
+        vector[c_string] c_arg_names
+
+    f_doc.summary = tobytes(func_doc["summary"])
+    f_doc.description = tobytes(func_doc["description"])
+    for arg_name in func_doc["arg_names"]:
+        c_arg_names.push_back(tobytes(arg_name))
+    f_doc.arg_names = c_arg_names
+    # UDFOptions integration:
+    # TODO: https://issues.apache.org/jira/browse/ARROW-16041
+    f_doc.options_class = b""
+    f_doc.options_required = False
+    return f_doc
+
+
+cdef object box_scalar_udf_context(const CScalarUdfContext& c_context):
+    cdef ScalarUdfContext context = ScalarUdfContext.__new__(ScalarUdfContext)
+    context.init(c_context)
+    return context
+
+
+cdef _scalar_udf_callback(user_function, const CScalarUdfContext& c_context, 
inputs):
+    """
+    Helper callback function used to wrap the ScalarUdfContext from Python to 
C++
+    execution.
+    """
+    context = box_scalar_udf_context(c_context)
+    return user_function(context, *inputs)
+
+
+def _get_scalar_udf_context(memory_pool, batch_length):
+    cdef CScalarUdfContext c_context
+    c_context.pool = maybe_unbox_memory_pool(memory_pool)
+    c_context.batch_length = batch_length
+    context = box_scalar_udf_context(c_context)
+    return context
+
+
+def register_scalar_function(func, func_name, function_doc, in_types,
+                             out_type):
+    """
+    Register a user-defined scalar function. 
+
+    A scalar function is a function that executes elementwise
+    operations on arrays or scalars, and therefore whose results
+    generally do not depend on the order of the values in the
+    arguments. Accepts and returns arrays that are all of the
+    same size. These functions roughly correspond to the functions
+    used in SQL expressions.
+
+    Parameters
+    ----------
+    func : callable
+        A callable implementing the user-defined function.
+        It must take arguments equal to the number of
+        in_types defined. It must return an Array or Scalar
+        matching the out_type. It must return a Scalar if
+        all arguments are scalar, else it must return an Array.
+
+        To define a varargs function, pass a callable that takes
+        varargs. The last in_type will be the type of the all
+        varargs arguments.
+    func_name : str
+        Name of the function. This name must be globally unique. 
+    function_doc : dict
+        A dictionary object with keys "summary" (str),
+        and "description" (str).
+    in_types : Dict[str, DataType]
+        A dictionarym mapping function argument names to
+        their respective DataType.
+        The argument names will be used to generate
+        documentation for the function. The number of
+        arguments specified here determines the function
+        arity.
+    out_type : DataType
+        Output type of the function.
+
+    Examples
+    --------
+
+    >>> import pyarrow.compute as pc
+    >>> 
+    >>> func_doc = {}
+    >>> func_doc["summary"] = "simple udf"
+    >>> func_doc["description"] = "add a constant to a scalar"
+    >>> 
+    >>> def add_constant(ctx, array):
+    ...     return pc.add(array, 1)
+    >>> 
+    >>> func_name = "py_add_func"
+    >>> in_types = {"array": pa.int64()}
+    >>> out_type = pa.int64()
+    >>> pc.register_scalar_function(add_constant, func_name, func_doc,
+    ...                   in_types, out_type)
+    >>> 
+    >>> func = pc.get_function(func_name)
+    >>> func.name
+    'py_add_func'
+    >>> answer = pc.call_function(func_name, [pa.array([20])])
+    >>> answer
+    <pyarrow.lib.Int64Array object at 0x10c22e700>
+    [
+    21
+    ]
+    """
+    cdef:
+        c_string c_func_name
+        CArity c_arity
+        CFunctionDoc c_func_doc
+        vector[shared_ptr[CDataType]] c_in_types
+        PyObject* c_function
+        shared_ptr[CDataType] c_out_type
+        CScalarUdfOptions c_options
+
+    c_func_name = tobytes(func_name)
+
+    if callable(func):
+        c_function = <PyObject*>func
+    else:
+        raise TypeError("func must be a callable")
+
+    func_spec = inspect.getfullargspec(func)
+    num_args = -1
+    if isinstance(in_types, dict):
+        for in_type in in_types.values():
+            if isinstance(in_type, DataType):

Review Comment:
   Why are we not using `ensure_type` here?



##########
python/pyarrow/tests/test_udf.py:
##########
@@ -0,0 +1,483 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+
+import pytest
+
+import pyarrow as pa
+from pyarrow import compute as pc
+import random
+
+# UDFs are all tested with a dataset scan
+pytestmark = pytest.mark.dataset
+
+
+try:
+    import pyarrow.dataset as ds
+except ImportError:
+    ds = None
+
+
[email protected](scope="session")
+def mock_udf_context():
+    # The batch_length is 1 for nullary functions.
+    # batch_length=1 since the udf expected output
+    # is generated by calling the udf function external
+    # to the call_function framework.
+    from pyarrow._compute import _get_scalar_udf_context
+    return _get_scalar_udf_context(pa.default_memory_pool(), 1)
+
+
[email protected](scope="session")
+def unary_func_fixture():
+    def unary_function(ctx, scalar1):
+        return pc.call_function("add", [scalar1, 1])
+    func_name = "y=x+k"
+    unary_doc = {"summary": "add function",
+                 "description": "test add function"}
+    pc.register_scalar_function(unary_function,
+                                func_name,
+                                unary_doc,
+                                {"array": pa.int64()},
+                                pa.int64())
+    return unary_function, func_name
+
+
[email protected](scope="session")
+def binary_func_fixture():
+    def binary_function(ctx, m, x):
+        return pc.call_function("multiply", [m, x])
+    func_name = "y=mx"
+    binary_doc = {"summary": "y=mx",
+                  "description": "find y from y = mx"}
+    pc.register_scalar_function(binary_function,
+                                func_name,
+                                binary_doc,
+                                {"m": pa.int64(),
+                                 "x": pa.int64(),
+                                 },
+                                pa.int64())
+    return binary_function, func_name
+
+
[email protected](scope="session")
+def ternary_func_fixture():
+    def ternary_function(ctx, m, x, c):
+        mx = pc.call_function("multiply", [m, x])
+        return pc.call_function("add", [mx, c])
+    ternary_doc = {"summary": "y=mx+c",
+                   "description": "find y from y = mx + c"}
+    func_name = "y=mx+c"
+    pc.register_scalar_function(ternary_function,
+                                func_name,
+                                ternary_doc,
+                                {
+                                    "array1": pa.int64(),
+                                    "array2": pa.int64(),
+                                    "array3": pa.int64(),
+                                },
+                                pa.int64())
+    return ternary_function, func_name
+
+
[email protected](scope="session")
+def varargs_func_fixture():
+    def varargs_function(ctx, *values):
+        base_val = values[:2]
+        res = pc.call_function("add", base_val)
+        for other_val in values[2:]:
+            res = pc.call_function("add", [res, other_val])
+        return res
+    func_name = "z=ax+by+c"
+    varargs_doc = {"summary": "z=ax+by+c",
+                   "description": "find z from z = ax + by + c"
+                   }
+    pc.register_scalar_function(varargs_function,
+                                func_name,
+                                varargs_doc,
+                                {
+                                    "array1": pa.int64(),
+                                    "array2": pa.int64(),
+                                    "array3": pa.int64(),
+                                    "array4": pa.int64(),
+                                    "array5": pa.int64(),
+                                },
+                                pa.int64())
+    return varargs_function, func_name
+
+
[email protected](scope="session")
+def random_with_udf_ctx_func_fixture():
+    def random_with_udf_ctx(context, one, two):
+        proxy_pool = pa.proxy_memory_pool(context.memory_pool)
+        ans = pc.add(one, two, memory_pool=proxy_pool)
+        res = pa.array([ans.as_py()], memory_pool=proxy_pool)
+        return res
+    in_types = {"one": pa.int64(),
+                "two": pa.int64(),
+                }
+    func_doc = {
+        "summary": "test udf context",
+        "description": "udf context test"
+    }
+    func_name = "test_udf_context"
+    pc.register_scalar_function(random_with_udf_ctx,
+                                func_name, func_doc,
+                                in_types,
+                                pa.int64())
+    return random_with_udf_ctx, func_name
+
+
+def const_return(ctx, scalar):
+    return 42
+
+
[email protected](scope="session")
+def output_check_func_fixture():

Review Comment:
   Can you add a comment explaining what the point of this example is?



##########
python/pyarrow/tests/test_udf.py:
##########
@@ -0,0 +1,483 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+
+import pytest
+
+import pyarrow as pa
+from pyarrow import compute as pc
+import random
+
+# UDFs are all tested with a dataset scan
+pytestmark = pytest.mark.dataset
+
+
+try:
+    import pyarrow.dataset as ds
+except ImportError:
+    ds = None
+
+
[email protected](scope="session")
+def mock_udf_context():
+    # The batch_length is 1 for nullary functions.
+    # batch_length=1 since the udf expected output
+    # is generated by calling the udf function external
+    # to the call_function framework.
+    from pyarrow._compute import _get_scalar_udf_context
+    return _get_scalar_udf_context(pa.default_memory_pool(), 1)
+
+
[email protected](scope="session")
+def unary_func_fixture():
+    def unary_function(ctx, scalar1):
+        return pc.call_function("add", [scalar1, 1])
+    func_name = "y=x+k"
+    unary_doc = {"summary": "add function",
+                 "description": "test add function"}
+    pc.register_scalar_function(unary_function,
+                                func_name,
+                                unary_doc,
+                                {"array": pa.int64()},
+                                pa.int64())
+    return unary_function, func_name
+
+
[email protected](scope="session")
+def binary_func_fixture():
+    def binary_function(ctx, m, x):
+        return pc.call_function("multiply", [m, x])
+    func_name = "y=mx"
+    binary_doc = {"summary": "y=mx",
+                  "description": "find y from y = mx"}
+    pc.register_scalar_function(binary_function,
+                                func_name,
+                                binary_doc,
+                                {"m": pa.int64(),
+                                 "x": pa.int64(),
+                                 },
+                                pa.int64())
+    return binary_function, func_name
+
+
[email protected](scope="session")
+def ternary_func_fixture():
+    def ternary_function(ctx, m, x, c):
+        mx = pc.call_function("multiply", [m, x])
+        return pc.call_function("add", [mx, c])
+    ternary_doc = {"summary": "y=mx+c",
+                   "description": "find y from y = mx + c"}
+    func_name = "y=mx+c"
+    pc.register_scalar_function(ternary_function,
+                                func_name,
+                                ternary_doc,
+                                {
+                                    "array1": pa.int64(),
+                                    "array2": pa.int64(),
+                                    "array3": pa.int64(),
+                                },
+                                pa.int64())
+    return ternary_function, func_name
+
+
[email protected](scope="session")
+def varargs_func_fixture():
+    def varargs_function(ctx, *values):
+        base_val = values[:2]
+        res = pc.call_function("add", base_val)
+        for other_val in values[2:]:
+            res = pc.call_function("add", [res, other_val])
+        return res
+    func_name = "z=ax+by+c"
+    varargs_doc = {"summary": "z=ax+by+c",
+                   "description": "find z from z = ax + by + c"
+                   }
+    pc.register_scalar_function(varargs_function,
+                                func_name,
+                                varargs_doc,
+                                {
+                                    "array1": pa.int64(),
+                                    "array2": pa.int64(),
+                                    "array3": pa.int64(),
+                                    "array4": pa.int64(),
+                                    "array5": pa.int64(),
+                                },
+                                pa.int64())
+    return varargs_function, func_name
+
+
[email protected](scope="session")
+def random_with_udf_ctx_func_fixture():
+    def random_with_udf_ctx(context, one, two):
+        proxy_pool = pa.proxy_memory_pool(context.memory_pool)
+        ans = pc.add(one, two, memory_pool=proxy_pool)
+        res = pa.array([ans.as_py()], memory_pool=proxy_pool)
+        return res
+    in_types = {"one": pa.int64(),
+                "two": pa.int64(),
+                }
+    func_doc = {
+        "summary": "test udf context",
+        "description": "udf context test"
+    }
+    func_name = "test_udf_context"
+    pc.register_scalar_function(random_with_udf_ctx,
+                                func_name, func_doc,
+                                in_types,
+                                pa.int64())
+    return random_with_udf_ctx, func_name
+
+
+def const_return(ctx, scalar):

Review Comment:
   Can you keep a consistent convention of always defining the function inside 
the corresponding fixture?



##########
python/pyarrow/tests/test_udf.py:
##########
@@ -0,0 +1,483 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+
+import pytest
+
+import pyarrow as pa
+from pyarrow import compute as pc
+import random
+
+# UDFs are all tested with a dataset scan
+pytestmark = pytest.mark.dataset
+
+
+try:
+    import pyarrow.dataset as ds
+except ImportError:
+    ds = None
+
+
[email protected](scope="session")
+def mock_udf_context():
+    # The batch_length is 1 for nullary functions.
+    # batch_length=1 since the udf expected output
+    # is generated by calling the udf function external
+    # to the call_function framework.
+    from pyarrow._compute import _get_scalar_udf_context
+    return _get_scalar_udf_context(pa.default_memory_pool(), 1)
+
+
[email protected](scope="session")
+def unary_func_fixture():
+    def unary_function(ctx, scalar1):
+        return pc.call_function("add", [scalar1, 1])
+    func_name = "y=x+k"
+    unary_doc = {"summary": "add function",
+                 "description": "test add function"}
+    pc.register_scalar_function(unary_function,
+                                func_name,
+                                unary_doc,
+                                {"array": pa.int64()},
+                                pa.int64())
+    return unary_function, func_name
+
+
[email protected](scope="session")
+def binary_func_fixture():
+    def binary_function(ctx, m, x):
+        return pc.call_function("multiply", [m, x])
+    func_name = "y=mx"
+    binary_doc = {"summary": "y=mx",
+                  "description": "find y from y = mx"}
+    pc.register_scalar_function(binary_function,
+                                func_name,
+                                binary_doc,
+                                {"m": pa.int64(),
+                                 "x": pa.int64(),
+                                 },
+                                pa.int64())
+    return binary_function, func_name
+
+
[email protected](scope="session")
+def ternary_func_fixture():
+    def ternary_function(ctx, m, x, c):
+        mx = pc.call_function("multiply", [m, x])
+        return pc.call_function("add", [mx, c])
+    ternary_doc = {"summary": "y=mx+c",
+                   "description": "find y from y = mx + c"}
+    func_name = "y=mx+c"
+    pc.register_scalar_function(ternary_function,
+                                func_name,
+                                ternary_doc,
+                                {
+                                    "array1": pa.int64(),
+                                    "array2": pa.int64(),
+                                    "array3": pa.int64(),
+                                },
+                                pa.int64())
+    return ternary_function, func_name
+
+
[email protected](scope="session")
+def varargs_func_fixture():
+    def varargs_function(ctx, *values):
+        base_val = values[:2]
+        res = pc.call_function("add", base_val)
+        for other_val in values[2:]:
+            res = pc.call_function("add", [res, other_val])
+        return res
+    func_name = "z=ax+by+c"
+    varargs_doc = {"summary": "z=ax+by+c",
+                   "description": "find z from z = ax + by + c"
+                   }
+    pc.register_scalar_function(varargs_function,
+                                func_name,
+                                varargs_doc,
+                                {
+                                    "array1": pa.int64(),
+                                    "array2": pa.int64(),
+                                    "array3": pa.int64(),
+                                    "array4": pa.int64(),
+                                    "array5": pa.int64(),
+                                },
+                                pa.int64())
+    return varargs_function, func_name
+
+
[email protected](scope="session")
+def random_with_udf_ctx_func_fixture():
+    def random_with_udf_ctx(context, one, two):
+        proxy_pool = pa.proxy_memory_pool(context.memory_pool)
+        ans = pc.add(one, two, memory_pool=proxy_pool)
+        res = pa.array([ans.as_py()], memory_pool=proxy_pool)
+        return res
+    in_types = {"one": pa.int64(),
+                "two": pa.int64(),
+                }
+    func_doc = {
+        "summary": "test udf context",
+        "description": "udf context test"
+    }
+    func_name = "test_udf_context"
+    pc.register_scalar_function(random_with_udf_ctx,
+                                func_name, func_doc,
+                                in_types,
+                                pa.int64())
+    return random_with_udf_ctx, func_name
+
+
+def const_return(ctx, scalar):
+    return 42
+
+
[email protected](scope="session")
+def output_check_func_fixture():
+    def output_check(ctx, array):
+        ar = pc.call_function("add", [array, 1])
+        ar = ar.cast(pa.int32())
+        return ar
+    func_name = "test_output_value"
+    in_types = {"array": pa.int64()}
+    out_type = pa.int64()
+    doc = {
+        "summary": "add function scalar",
+        "description": "add function"
+    }
+    pc.register_scalar_function(output_check, func_name, doc,
+                                in_types, out_type)
+    return output_check, func_name
+
+
[email protected](scope="session")
+def nullary_check_func_fixture():
+    # this needs to return array values
+    def nullary_check(ctx):
+        rand_vals = []
+        print("batch_length: ", ctx.batch_length)
+        for _ in range(ctx.batch_length):
+            random.seed(10)
+            rand_vals.append(random.randint(0, 10))
+        return pa.array(rand_vals)
+
+    func_doc = {
+        "summary": "random function",
+        "description": "generates a random value"
+    }
+    func_name = "test_random_func"
+    pc.register_scalar_function(nullary_check,
+                                func_name,
+                                func_doc,
+                                {},
+                                pa.int64())
+
+    return nullary_check, func_name
+
+
+def add_const(ctx, scalar):
+    return pc.call_function("add", [scalar, 1])
+
+
[email protected](scope="session")
+def output_type_func_fixture():

Review Comment:
   Can you add a comment explaining what the point of this fixture is?



##########
python/pyarrow/tests/test_udf.py:
##########
@@ -0,0 +1,483 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+
+import pytest
+
+import pyarrow as pa
+from pyarrow import compute as pc
+import random
+
+# UDFs are all tested with a dataset scan
+pytestmark = pytest.mark.dataset
+
+
+try:
+    import pyarrow.dataset as ds
+except ImportError:
+    ds = None
+
+
[email protected](scope="session")
+def mock_udf_context():
+    # The batch_length is 1 for nullary functions.
+    # batch_length=1 since the udf expected output
+    # is generated by calling the udf function external
+    # to the call_function framework.
+    from pyarrow._compute import _get_scalar_udf_context
+    return _get_scalar_udf_context(pa.default_memory_pool(), 1)
+
+
[email protected](scope="session")
+def unary_func_fixture():
+    def unary_function(ctx, scalar1):
+        return pc.call_function("add", [scalar1, 1])
+    func_name = "y=x+k"
+    unary_doc = {"summary": "add function",
+                 "description": "test add function"}
+    pc.register_scalar_function(unary_function,
+                                func_name,
+                                unary_doc,
+                                {"array": pa.int64()},
+                                pa.int64())
+    return unary_function, func_name
+
+
[email protected](scope="session")
+def binary_func_fixture():
+    def binary_function(ctx, m, x):
+        return pc.call_function("multiply", [m, x])
+    func_name = "y=mx"
+    binary_doc = {"summary": "y=mx",
+                  "description": "find y from y = mx"}
+    pc.register_scalar_function(binary_function,
+                                func_name,
+                                binary_doc,
+                                {"m": pa.int64(),
+                                 "x": pa.int64(),
+                                 },
+                                pa.int64())
+    return binary_function, func_name
+
+
[email protected](scope="session")
+def ternary_func_fixture():
+    def ternary_function(ctx, m, x, c):
+        mx = pc.call_function("multiply", [m, x])
+        return pc.call_function("add", [mx, c])
+    ternary_doc = {"summary": "y=mx+c",
+                   "description": "find y from y = mx + c"}
+    func_name = "y=mx+c"
+    pc.register_scalar_function(ternary_function,
+                                func_name,
+                                ternary_doc,
+                                {
+                                    "array1": pa.int64(),
+                                    "array2": pa.int64(),
+                                    "array3": pa.int64(),
+                                },
+                                pa.int64())
+    return ternary_function, func_name
+
+
[email protected](scope="session")
+def varargs_func_fixture():
+    def varargs_function(ctx, *values):
+        base_val = values[:2]
+        res = pc.call_function("add", base_val)
+        for other_val in values[2:]:
+            res = pc.call_function("add", [res, other_val])
+        return res
+    func_name = "z=ax+by+c"
+    varargs_doc = {"summary": "z=ax+by+c",
+                   "description": "find z from z = ax + by + c"
+                   }
+    pc.register_scalar_function(varargs_function,
+                                func_name,
+                                varargs_doc,
+                                {
+                                    "array1": pa.int64(),
+                                    "array2": pa.int64(),
+                                    "array3": pa.int64(),
+                                    "array4": pa.int64(),
+                                    "array5": pa.int64(),
+                                },
+                                pa.int64())
+    return varargs_function, func_name
+
+
[email protected](scope="session")
+def random_with_udf_ctx_func_fixture():
+    def random_with_udf_ctx(context, one, two):
+        proxy_pool = pa.proxy_memory_pool(context.memory_pool)
+        ans = pc.add(one, two, memory_pool=proxy_pool)
+        res = pa.array([ans.as_py()], memory_pool=proxy_pool)
+        return res
+    in_types = {"one": pa.int64(),
+                "two": pa.int64(),
+                }
+    func_doc = {
+        "summary": "test udf context",
+        "description": "udf context test"
+    }
+    func_name = "test_udf_context"
+    pc.register_scalar_function(random_with_udf_ctx,
+                                func_name, func_doc,
+                                in_types,
+                                pa.int64())
+    return random_with_udf_ctx, func_name
+
+
+def const_return(ctx, scalar):
+    return 42
+
+
[email protected](scope="session")
+def output_check_func_fixture():
+    def output_check(ctx, array):
+        ar = pc.call_function("add", [array, 1])
+        ar = ar.cast(pa.int32())
+        return ar
+    func_name = "test_output_value"
+    in_types = {"array": pa.int64()}
+    out_type = pa.int64()
+    doc = {
+        "summary": "add function scalar",
+        "description": "add function"
+    }
+    pc.register_scalar_function(output_check, func_name, doc,
+                                in_types, out_type)
+    return output_check, func_name
+
+
[email protected](scope="session")
+def nullary_check_func_fixture():
+    # this needs to return array values
+    def nullary_check(ctx):
+        rand_vals = []
+        print("batch_length: ", ctx.batch_length)
+        for _ in range(ctx.batch_length):
+            random.seed(10)
+            rand_vals.append(random.randint(0, 10))
+        return pa.array(rand_vals)
+
+    func_doc = {
+        "summary": "random function",
+        "description": "generates a random value"
+    }
+    func_name = "test_random_func"
+    pc.register_scalar_function(nullary_check,
+                                func_name,
+                                func_doc,
+                                {},
+                                pa.int64())
+
+    return nullary_check, func_name
+
+
+def add_const(ctx, scalar):
+    return pc.call_function("add", [scalar, 1])
+
+
[email protected](scope="session")
+def output_type_func_fixture():
+    func_name = "test_output_type"
+    in_types = {"array": pa.int64()}
+    out_type = pa.int64()
+    doc = {
+        "summary": "add function scalar",
+        "description": "add function"
+    }
+    pc.register_scalar_function(const_return, func_name, doc,
+                                in_types, out_type)
+    return const_return, func_name
+
+
[email protected](scope="session")
+def varargs_check_func_fixture():
+    def varargs_check(ctx, *values):
+        base_val = values[:2]
+        res = pc.call_function("add", base_val)
+        for other_val in values[2:]:
+            res = pc.call_function("add", [res, other_val])
+        return res
+    func_name = "test_varargs_function"
+    in_types = {"array1": pa.int64(),
+                "array2": pa.int64(),
+                }
+    doc = {"summary": "n add function",
+           "description": "add N number of arrays"
+           }
+    pc.register_scalar_function(varargs_check, func_name, doc,
+                                in_types, pa.int64())
+
+    return varargs_check, func_name
+
+
[email protected](scope="session")
+def raise_func_fixture():
+    def raise_func(ctx):
+        raise ValueError("Test function with raise")
+    func_name = "test_raise"
+    doc = {
+        "summary": "test function with raise",
+        "description": "function with a raise"
+    }
+    pc.register_scalar_function(raise_func, func_name, doc,
+                                {}, pa.int64())
+    return raise_func, func_name
+
+
+def check_scalar_function(func_fixture,
+                          input,
+                          mock_udf_context,
+                          run_in_dataset=True):
+    function, name = func_fixture
+    expected_output = function(mock_udf_context, *input)
+    func = pc.get_function(name)
+    assert func.name == name
+
+    result = pc.call_function(name, input)
+
+    assert result == expected_output
+    if run_in_dataset:
+        field_names = [f'field{index}' for index, in_arr in input]
+        table = pa.Table.from_arrays(input, field_names)
+        dataset = ds.dataset(table)
+        func_args = [ds.field(field_name) for field_name in field_names]
+        result_table = dataset.to_table(
+            columns={'result': ds.field('')._call(name, func_args)})
+        assert result_table.column(0).chunks[0] == expected_output
+
+
+def test_scalar_udf_array_unary(unary_func_fixture, mock_udf_context):
+    check_scalar_function(unary_func_fixture,
+                          [
+                              pa.array([10, 20], pa.int64())
+                          ],
+                          mock_udf_context
+                          )
+
+
+def test_scalar_udf_array_binary(binary_func_fixture, mock_udf_context):
+    check_scalar_function(binary_func_fixture,
+                          [
+                              pa.array([10, 20], pa.int64()),
+                              pa.array([2, 4], pa.int64())
+                          ],
+                          mock_udf_context
+                          )
+
+
+def test_scalar_udf_array_ternary(ternary_func_fixture, mock_udf_context):
+    check_scalar_function(ternary_func_fixture,
+                          [
+                              pa.array([10, 20], pa.int64()),
+                              pa.array([2, 4], pa.int64()),
+                              pa.array([5, 10], pa.int64())
+                          ],
+                          mock_udf_context
+                          )
+
+
+def test_scalar_udf_array_varargs(varargs_func_fixture, mock_udf_context):
+    check_scalar_function(varargs_func_fixture,
+                          [
+                              pa.array([2, 3], pa.int64()),
+                              pa.array([10, 20], pa.int64()),
+                              pa.array([3, 7], pa.int64()),
+                              pa.array([20, 30], pa.int64()),
+                              pa.array([5, 10], pa.int64())
+                          ],
+                          mock_udf_context
+                          )
+
+
+def test_udf_input():
+    # validate function name
+    doc = {
+        "summary": "test udf input",
+        "description": "parameters are validated"
+    }
+    in_types = {"scalar": pa.int64()}
+    out_type = pa.int64()
+    with pytest.raises(TypeError):
+        pc.register_scalar_function(add_const,
+                                    None, doc, in_types,
+                                    out_type)
+
+    # validate function
+    with pytest.raises(TypeError, match="func must be a callable"):
+        pc.register_scalar_function(None, "test_none_function", doc, in_types,
+                                    out_type)
+
+    # validate output type
+    expected_expr = "DataType expected, got <class 'NoneType'>"
+    with pytest.raises(TypeError, match=expected_expr):
+        pc.register_scalar_function(add_const,
+                                    "test_output_function", doc, in_types,
+                                    None)
+
+    # validate input type
+    expected_expr = r'in_types must be a dictionary of DataType'
+    with pytest.raises(TypeError, match=expected_expr):
+        pc.register_scalar_function(add_const,
+                                    "test_input_function", doc, None,
+                                    out_type)

Review Comment:
   What happens if one tries to register a function with an already existing 
name?



##########
python/pyarrow/tests/test_udf.py:
##########
@@ -0,0 +1,483 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+
+import pytest
+
+import pyarrow as pa
+from pyarrow import compute as pc
+import random
+
+# UDFs are all tested with a dataset scan
+pytestmark = pytest.mark.dataset
+
+
+try:
+    import pyarrow.dataset as ds
+except ImportError:
+    ds = None
+
+
[email protected](scope="session")
+def mock_udf_context():
+    # The batch_length is 1 for nullary functions.
+    # batch_length=1 since the udf expected output
+    # is generated by calling the udf function external
+    # to the call_function framework.
+    from pyarrow._compute import _get_scalar_udf_context
+    return _get_scalar_udf_context(pa.default_memory_pool(), 1)
+
+
[email protected](scope="session")
+def unary_func_fixture():
+    def unary_function(ctx, scalar1):
+        return pc.call_function("add", [scalar1, 1])
+    func_name = "y=x+k"
+    unary_doc = {"summary": "add function",
+                 "description": "test add function"}
+    pc.register_scalar_function(unary_function,
+                                func_name,
+                                unary_doc,
+                                {"array": pa.int64()},
+                                pa.int64())
+    return unary_function, func_name
+
+
[email protected](scope="session")
+def binary_func_fixture():
+    def binary_function(ctx, m, x):
+        return pc.call_function("multiply", [m, x])
+    func_name = "y=mx"
+    binary_doc = {"summary": "y=mx",
+                  "description": "find y from y = mx"}
+    pc.register_scalar_function(binary_function,
+                                func_name,
+                                binary_doc,
+                                {"m": pa.int64(),
+                                 "x": pa.int64(),
+                                 },
+                                pa.int64())
+    return binary_function, func_name
+
+
[email protected](scope="session")
+def ternary_func_fixture():
+    def ternary_function(ctx, m, x, c):
+        mx = pc.call_function("multiply", [m, x])
+        return pc.call_function("add", [mx, c])
+    ternary_doc = {"summary": "y=mx+c",
+                   "description": "find y from y = mx + c"}
+    func_name = "y=mx+c"
+    pc.register_scalar_function(ternary_function,
+                                func_name,
+                                ternary_doc,
+                                {
+                                    "array1": pa.int64(),
+                                    "array2": pa.int64(),
+                                    "array3": pa.int64(),
+                                },
+                                pa.int64())
+    return ternary_function, func_name
+
+
[email protected](scope="session")
+def varargs_func_fixture():
+    def varargs_function(ctx, *values):
+        base_val = values[:2]
+        res = pc.call_function("add", base_val)
+        for other_val in values[2:]:
+            res = pc.call_function("add", [res, other_val])
+        return res
+    func_name = "z=ax+by+c"
+    varargs_doc = {"summary": "z=ax+by+c",
+                   "description": "find z from z = ax + by + c"
+                   }
+    pc.register_scalar_function(varargs_function,
+                                func_name,
+                                varargs_doc,
+                                {
+                                    "array1": pa.int64(),
+                                    "array2": pa.int64(),
+                                    "array3": pa.int64(),
+                                    "array4": pa.int64(),
+                                    "array5": pa.int64(),
+                                },
+                                pa.int64())
+    return varargs_function, func_name
+
+
[email protected](scope="session")
+def random_with_udf_ctx_func_fixture():
+    def random_with_udf_ctx(context, one, two):
+        proxy_pool = pa.proxy_memory_pool(context.memory_pool)
+        ans = pc.add(one, two, memory_pool=proxy_pool)
+        res = pa.array([ans.as_py()], memory_pool=proxy_pool)
+        return res
+    in_types = {"one": pa.int64(),
+                "two": pa.int64(),
+                }
+    func_doc = {
+        "summary": "test udf context",
+        "description": "udf context test"
+    }
+    func_name = "test_udf_context"
+    pc.register_scalar_function(random_with_udf_ctx,
+                                func_name, func_doc,
+                                in_types,
+                                pa.int64())
+    return random_with_udf_ctx, func_name
+
+
+def const_return(ctx, scalar):
+    return 42
+
+
[email protected](scope="session")
+def output_check_func_fixture():
+    def output_check(ctx, array):
+        ar = pc.call_function("add", [array, 1])
+        ar = ar.cast(pa.int32())
+        return ar
+    func_name = "test_output_value"
+    in_types = {"array": pa.int64()}
+    out_type = pa.int64()
+    doc = {
+        "summary": "add function scalar",
+        "description": "add function"
+    }
+    pc.register_scalar_function(output_check, func_name, doc,
+                                in_types, out_type)
+    return output_check, func_name
+
+
[email protected](scope="session")
+def nullary_check_func_fixture():
+    # this needs to return array values
+    def nullary_check(ctx):
+        rand_vals = []
+        print("batch_length: ", ctx.batch_length)
+        for _ in range(ctx.batch_length):
+            random.seed(10)
+            rand_vals.append(random.randint(0, 10))

Review Comment:
   Hmm, do you realize this always appends the same value?
   ```python
   >>> random.seed(10)
   >>> random.randint(0, 10)
   9
   >>> random.seed(10)
   >>> random.randint(0, 10)
   9
   ```



##########
python/pyarrow/tests/test_udf.py:
##########
@@ -0,0 +1,483 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+
+import pytest
+
+import pyarrow as pa
+from pyarrow import compute as pc
+import random
+
+# UDFs are all tested with a dataset scan
+pytestmark = pytest.mark.dataset
+
+
+try:
+    import pyarrow.dataset as ds
+except ImportError:
+    ds = None
+
+
[email protected](scope="session")
+def mock_udf_context():
+    # The batch_length is 1 for nullary functions.
+    # batch_length=1 since the udf expected output
+    # is generated by calling the udf function external
+    # to the call_function framework.
+    from pyarrow._compute import _get_scalar_udf_context
+    return _get_scalar_udf_context(pa.default_memory_pool(), 1)
+
+
[email protected](scope="session")
+def unary_func_fixture():
+    def unary_function(ctx, scalar1):
+        return pc.call_function("add", [scalar1, 1])
+    func_name = "y=x+k"
+    unary_doc = {"summary": "add function",
+                 "description": "test add function"}
+    pc.register_scalar_function(unary_function,
+                                func_name,
+                                unary_doc,
+                                {"array": pa.int64()},
+                                pa.int64())
+    return unary_function, func_name
+
+
[email protected](scope="session")
+def binary_func_fixture():
+    def binary_function(ctx, m, x):
+        return pc.call_function("multiply", [m, x])
+    func_name = "y=mx"
+    binary_doc = {"summary": "y=mx",
+                  "description": "find y from y = mx"}
+    pc.register_scalar_function(binary_function,
+                                func_name,
+                                binary_doc,
+                                {"m": pa.int64(),
+                                 "x": pa.int64(),
+                                 },
+                                pa.int64())
+    return binary_function, func_name
+
+
[email protected](scope="session")
+def ternary_func_fixture():
+    def ternary_function(ctx, m, x, c):
+        mx = pc.call_function("multiply", [m, x])
+        return pc.call_function("add", [mx, c])
+    ternary_doc = {"summary": "y=mx+c",
+                   "description": "find y from y = mx + c"}
+    func_name = "y=mx+c"
+    pc.register_scalar_function(ternary_function,
+                                func_name,
+                                ternary_doc,
+                                {
+                                    "array1": pa.int64(),
+                                    "array2": pa.int64(),
+                                    "array3": pa.int64(),
+                                },
+                                pa.int64())
+    return ternary_function, func_name
+
+
[email protected](scope="session")
+def varargs_func_fixture():
+    def varargs_function(ctx, *values):
+        base_val = values[:2]
+        res = pc.call_function("add", base_val)
+        for other_val in values[2:]:
+            res = pc.call_function("add", [res, other_val])
+        return res
+    func_name = "z=ax+by+c"
+    varargs_doc = {"summary": "z=ax+by+c",
+                   "description": "find z from z = ax + by + c"
+                   }
+    pc.register_scalar_function(varargs_function,
+                                func_name,
+                                varargs_doc,
+                                {
+                                    "array1": pa.int64(),
+                                    "array2": pa.int64(),
+                                    "array3": pa.int64(),
+                                    "array4": pa.int64(),
+                                    "array5": pa.int64(),
+                                },
+                                pa.int64())
+    return varargs_function, func_name
+
+
[email protected](scope="session")
+def random_with_udf_ctx_func_fixture():
+    def random_with_udf_ctx(context, one, two):
+        proxy_pool = pa.proxy_memory_pool(context.memory_pool)
+        ans = pc.add(one, two, memory_pool=proxy_pool)
+        res = pa.array([ans.as_py()], memory_pool=proxy_pool)
+        return res
+    in_types = {"one": pa.int64(),
+                "two": pa.int64(),
+                }
+    func_doc = {
+        "summary": "test udf context",
+        "description": "udf context test"
+    }
+    func_name = "test_udf_context"
+    pc.register_scalar_function(random_with_udf_ctx,
+                                func_name, func_doc,
+                                in_types,
+                                pa.int64())
+    return random_with_udf_ctx, func_name
+
+
+def const_return(ctx, scalar):
+    return 42
+
+
[email protected](scope="session")
+def output_check_func_fixture():
+    def output_check(ctx, array):
+        ar = pc.call_function("add", [array, 1])
+        ar = ar.cast(pa.int32())
+        return ar
+    func_name = "test_output_value"
+    in_types = {"array": pa.int64()}
+    out_type = pa.int64()
+    doc = {
+        "summary": "add function scalar",
+        "description": "add function"
+    }
+    pc.register_scalar_function(output_check, func_name, doc,
+                                in_types, out_type)
+    return output_check, func_name
+
+
[email protected](scope="session")
+def nullary_check_func_fixture():
+    # this needs to return array values
+    def nullary_check(ctx):
+        rand_vals = []
+        print("batch_length: ", ctx.batch_length)

Review Comment:
   I understand you probably did this for debugging, but you should remove this 
`print` call once you are finished.



##########
python/pyarrow/tests/test_udf.py:
##########
@@ -0,0 +1,483 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+
+import pytest
+
+import pyarrow as pa
+from pyarrow import compute as pc
+import random
+
+# UDFs are all tested with a dataset scan
+pytestmark = pytest.mark.dataset
+
+
+try:
+    import pyarrow.dataset as ds
+except ImportError:
+    ds = None
+
+
[email protected](scope="session")
+def mock_udf_context():
+    # The batch_length is 1 for nullary functions.
+    # batch_length=1 since the udf expected output
+    # is generated by calling the udf function external
+    # to the call_function framework.
+    from pyarrow._compute import _get_scalar_udf_context
+    return _get_scalar_udf_context(pa.default_memory_pool(), 1)
+
+
[email protected](scope="session")
+def unary_func_fixture():
+    def unary_function(ctx, scalar1):
+        return pc.call_function("add", [scalar1, 1])
+    func_name = "y=x+k"
+    unary_doc = {"summary": "add function",
+                 "description": "test add function"}
+    pc.register_scalar_function(unary_function,
+                                func_name,
+                                unary_doc,
+                                {"array": pa.int64()},
+                                pa.int64())
+    return unary_function, func_name
+
+
[email protected](scope="session")
+def binary_func_fixture():
+    def binary_function(ctx, m, x):
+        return pc.call_function("multiply", [m, x])
+    func_name = "y=mx"
+    binary_doc = {"summary": "y=mx",
+                  "description": "find y from y = mx"}
+    pc.register_scalar_function(binary_function,
+                                func_name,
+                                binary_doc,
+                                {"m": pa.int64(),
+                                 "x": pa.int64(),
+                                 },
+                                pa.int64())
+    return binary_function, func_name
+
+
[email protected](scope="session")
+def ternary_func_fixture():
+    def ternary_function(ctx, m, x, c):
+        mx = pc.call_function("multiply", [m, x])
+        return pc.call_function("add", [mx, c])
+    ternary_doc = {"summary": "y=mx+c",
+                   "description": "find y from y = mx + c"}
+    func_name = "y=mx+c"
+    pc.register_scalar_function(ternary_function,
+                                func_name,
+                                ternary_doc,
+                                {
+                                    "array1": pa.int64(),
+                                    "array2": pa.int64(),
+                                    "array3": pa.int64(),
+                                },
+                                pa.int64())
+    return ternary_function, func_name
+
+
[email protected](scope="session")
+def varargs_func_fixture():
+    def varargs_function(ctx, *values):
+        base_val = values[:2]
+        res = pc.call_function("add", base_val)
+        for other_val in values[2:]:
+            res = pc.call_function("add", [res, other_val])
+        return res
+    func_name = "z=ax+by+c"
+    varargs_doc = {"summary": "z=ax+by+c",
+                   "description": "find z from z = ax + by + c"
+                   }
+    pc.register_scalar_function(varargs_function,
+                                func_name,
+                                varargs_doc,
+                                {
+                                    "array1": pa.int64(),
+                                    "array2": pa.int64(),
+                                    "array3": pa.int64(),
+                                    "array4": pa.int64(),
+                                    "array5": pa.int64(),
+                                },
+                                pa.int64())
+    return varargs_function, func_name
+
+
[email protected](scope="session")
+def random_with_udf_ctx_func_fixture():
+    def random_with_udf_ctx(context, one, two):
+        proxy_pool = pa.proxy_memory_pool(context.memory_pool)
+        ans = pc.add(one, two, memory_pool=proxy_pool)
+        res = pa.array([ans.as_py()], memory_pool=proxy_pool)
+        return res
+    in_types = {"one": pa.int64(),
+                "two": pa.int64(),
+                }
+    func_doc = {
+        "summary": "test udf context",
+        "description": "udf context test"
+    }
+    func_name = "test_udf_context"
+    pc.register_scalar_function(random_with_udf_ctx,
+                                func_name, func_doc,
+                                in_types,
+                                pa.int64())
+    return random_with_udf_ctx, func_name
+
+
+def const_return(ctx, scalar):
+    return 42
+
+
[email protected](scope="session")
+def output_check_func_fixture():
+    def output_check(ctx, array):
+        ar = pc.call_function("add", [array, 1])
+        ar = ar.cast(pa.int32())
+        return ar
+    func_name = "test_output_value"
+    in_types = {"array": pa.int64()}
+    out_type = pa.int64()
+    doc = {
+        "summary": "add function scalar",
+        "description": "add function"
+    }
+    pc.register_scalar_function(output_check, func_name, doc,
+                                in_types, out_type)
+    return output_check, func_name
+
+
[email protected](scope="session")
+def nullary_check_func_fixture():
+    # this needs to return array values
+    def nullary_check(ctx):
+        rand_vals = []
+        print("batch_length: ", ctx.batch_length)
+        for _ in range(ctx.batch_length):
+            random.seed(10)
+            rand_vals.append(random.randint(0, 10))
+        return pa.array(rand_vals)
+
+    func_doc = {
+        "summary": "random function",
+        "description": "generates a random value"
+    }
+    func_name = "test_random_func"
+    pc.register_scalar_function(nullary_check,
+                                func_name,
+                                func_doc,
+                                {},
+                                pa.int64())
+
+    return nullary_check, func_name
+
+
+def add_const(ctx, scalar):
+    return pc.call_function("add", [scalar, 1])
+
+
[email protected](scope="session")
+def output_type_func_fixture():
+    func_name = "test_output_type"
+    in_types = {"array": pa.int64()}
+    out_type = pa.int64()
+    doc = {
+        "summary": "add function scalar",
+        "description": "add function"
+    }
+    pc.register_scalar_function(const_return, func_name, doc,
+                                in_types, out_type)
+    return const_return, func_name
+
+
[email protected](scope="session")
+def varargs_check_func_fixture():
+    def varargs_check(ctx, *values):
+        base_val = values[:2]
+        res = pc.call_function("add", base_val)
+        for other_val in values[2:]:
+            res = pc.call_function("add", [res, other_val])
+        return res
+    func_name = "test_varargs_function"
+    in_types = {"array1": pa.int64(),
+                "array2": pa.int64(),
+                }
+    doc = {"summary": "n add function",
+           "description": "add N number of arrays"
+           }
+    pc.register_scalar_function(varargs_check, func_name, doc,
+                                in_types, pa.int64())
+
+    return varargs_check, func_name
+
+
[email protected](scope="session")
+def raise_func_fixture():
+    def raise_func(ctx):
+        raise ValueError("Test function with raise")
+    func_name = "test_raise"
+    doc = {
+        "summary": "test function with raise",
+        "description": "function with a raise"
+    }
+    pc.register_scalar_function(raise_func, func_name, doc,
+                                {}, pa.int64())
+    return raise_func, func_name
+
+
+def check_scalar_function(func_fixture,
+                          input,
+                          mock_udf_context,
+                          run_in_dataset=True):
+    function, name = func_fixture
+    expected_output = function(mock_udf_context, *input)
+    func = pc.get_function(name)
+    assert func.name == name
+
+    result = pc.call_function(name, input)
+
+    assert result == expected_output
+    if run_in_dataset:
+        field_names = [f'field{index}' for index, in_arr in input]
+        table = pa.Table.from_arrays(input, field_names)
+        dataset = ds.dataset(table)
+        func_args = [ds.field(field_name) for field_name in field_names]
+        result_table = dataset.to_table(
+            columns={'result': ds.field('')._call(name, func_args)})
+        assert result_table.column(0).chunks[0] == expected_output
+
+
+def test_scalar_udf_array_unary(unary_func_fixture, mock_udf_context):
+    check_scalar_function(unary_func_fixture,
+                          [
+                              pa.array([10, 20], pa.int64())
+                          ],
+                          mock_udf_context
+                          )
+
+
+def test_scalar_udf_array_binary(binary_func_fixture, mock_udf_context):
+    check_scalar_function(binary_func_fixture,
+                          [
+                              pa.array([10, 20], pa.int64()),
+                              pa.array([2, 4], pa.int64())
+                          ],
+                          mock_udf_context
+                          )
+
+
+def test_scalar_udf_array_ternary(ternary_func_fixture, mock_udf_context):
+    check_scalar_function(ternary_func_fixture,
+                          [
+                              pa.array([10, 20], pa.int64()),
+                              pa.array([2, 4], pa.int64()),
+                              pa.array([5, 10], pa.int64())
+                          ],
+                          mock_udf_context
+                          )
+
+
+def test_scalar_udf_array_varargs(varargs_func_fixture, mock_udf_context):
+    check_scalar_function(varargs_func_fixture,
+                          [
+                              pa.array([2, 3], pa.int64()),
+                              pa.array([10, 20], pa.int64()),
+                              pa.array([3, 7], pa.int64()),
+                              pa.array([20, 30], pa.int64()),
+                              pa.array([5, 10], pa.int64())
+                          ],
+                          mock_udf_context
+                          )
+
+
+def test_udf_input():

Review Comment:
   Nit, but can you give more meaningful names to test functions? For example 
here: `test_registration_errors`.



##########
python/pyarrow/tests/test_udf.py:
##########
@@ -0,0 +1,483 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+
+import pytest
+
+import pyarrow as pa
+from pyarrow import compute as pc
+import random
+
+# UDFs are all tested with a dataset scan
+pytestmark = pytest.mark.dataset
+
+
+try:
+    import pyarrow.dataset as ds
+except ImportError:
+    ds = None
+
+
[email protected](scope="session")
+def mock_udf_context():

Review Comment:
   This doesn't need to be a fixture as it isn't initializing an expensive 
resource.



##########
python/pyarrow/tests/test_udf.py:
##########
@@ -0,0 +1,483 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+
+import pytest
+
+import pyarrow as pa
+from pyarrow import compute as pc
+import random
+
+# UDFs are all tested with a dataset scan
+pytestmark = pytest.mark.dataset
+
+
+try:
+    import pyarrow.dataset as ds
+except ImportError:
+    ds = None
+
+
[email protected](scope="session")
+def mock_udf_context():
+    # The batch_length is 1 for nullary functions.
+    # batch_length=1 since the udf expected output
+    # is generated by calling the udf function external
+    # to the call_function framework.
+    from pyarrow._compute import _get_scalar_udf_context
+    return _get_scalar_udf_context(pa.default_memory_pool(), 1)
+
+
[email protected](scope="session")
+def unary_func_fixture():
+    def unary_function(ctx, scalar1):
+        return pc.call_function("add", [scalar1, 1])
+    func_name = "y=x+k"
+    unary_doc = {"summary": "add function",
+                 "description": "test add function"}
+    pc.register_scalar_function(unary_function,
+                                func_name,
+                                unary_doc,
+                                {"array": pa.int64()},
+                                pa.int64())
+    return unary_function, func_name
+
+
[email protected](scope="session")
+def binary_func_fixture():
+    def binary_function(ctx, m, x):
+        return pc.call_function("multiply", [m, x])
+    func_name = "y=mx"
+    binary_doc = {"summary": "y=mx",
+                  "description": "find y from y = mx"}
+    pc.register_scalar_function(binary_function,
+                                func_name,
+                                binary_doc,
+                                {"m": pa.int64(),
+                                 "x": pa.int64(),
+                                 },
+                                pa.int64())
+    return binary_function, func_name
+
+
[email protected](scope="session")
+def ternary_func_fixture():
+    def ternary_function(ctx, m, x, c):
+        mx = pc.call_function("multiply", [m, x])
+        return pc.call_function("add", [mx, c])
+    ternary_doc = {"summary": "y=mx+c",
+                   "description": "find y from y = mx + c"}
+    func_name = "y=mx+c"
+    pc.register_scalar_function(ternary_function,
+                                func_name,
+                                ternary_doc,
+                                {
+                                    "array1": pa.int64(),
+                                    "array2": pa.int64(),
+                                    "array3": pa.int64(),
+                                },
+                                pa.int64())
+    return ternary_function, func_name
+
+
[email protected](scope="session")
+def varargs_func_fixture():
+    def varargs_function(ctx, *values):
+        base_val = values[:2]
+        res = pc.call_function("add", base_val)
+        for other_val in values[2:]:
+            res = pc.call_function("add", [res, other_val])
+        return res
+    func_name = "z=ax+by+c"
+    varargs_doc = {"summary": "z=ax+by+c",
+                   "description": "find z from z = ax + by + c"
+                   }
+    pc.register_scalar_function(varargs_function,
+                                func_name,
+                                varargs_doc,
+                                {
+                                    "array1": pa.int64(),
+                                    "array2": pa.int64(),
+                                    "array3": pa.int64(),
+                                    "array4": pa.int64(),
+                                    "array5": pa.int64(),
+                                },
+                                pa.int64())
+    return varargs_function, func_name
+
+
[email protected](scope="session")
+def random_with_udf_ctx_func_fixture():
+    def random_with_udf_ctx(context, one, two):
+        proxy_pool = pa.proxy_memory_pool(context.memory_pool)
+        ans = pc.add(one, two, memory_pool=proxy_pool)
+        res = pa.array([ans.as_py()], memory_pool=proxy_pool)
+        return res
+    in_types = {"one": pa.int64(),
+                "two": pa.int64(),
+                }
+    func_doc = {
+        "summary": "test udf context",
+        "description": "udf context test"
+    }
+    func_name = "test_udf_context"
+    pc.register_scalar_function(random_with_udf_ctx,
+                                func_name, func_doc,
+                                in_types,
+                                pa.int64())
+    return random_with_udf_ctx, func_name
+
+
+def const_return(ctx, scalar):
+    return 42
+
+
[email protected](scope="session")
+def output_check_func_fixture():
+    def output_check(ctx, array):
+        ar = pc.call_function("add", [array, 1])
+        ar = ar.cast(pa.int32())
+        return ar
+    func_name = "test_output_value"
+    in_types = {"array": pa.int64()}
+    out_type = pa.int64()
+    doc = {
+        "summary": "add function scalar",
+        "description": "add function"
+    }
+    pc.register_scalar_function(output_check, func_name, doc,
+                                in_types, out_type)
+    return output_check, func_name
+
+
[email protected](scope="session")
+def nullary_check_func_fixture():
+    # this needs to return array values
+    def nullary_check(ctx):
+        rand_vals = []
+        print("batch_length: ", ctx.batch_length)
+        for _ in range(ctx.batch_length):
+            random.seed(10)
+            rand_vals.append(random.randint(0, 10))

Review Comment:
   Also, you can just use Numpy's random generation facilities: 
https://numpy.org/doc/stable/reference/random/generator.html



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to