lidavidm commented on code in PR #12590: URL: https://github.com/apache/arrow/pull/12590#discussion_r841881769
########## cpp/src/arrow/python/udf.cc: ########## @@ -0,0 +1,128 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#include "arrow/python/udf.h" + +#include <cstddef> +#include <memory> +#include <sstream> + +#include "arrow/compute/function.h" +#include "arrow/python/common.h" + +namespace arrow { + +namespace py { + +Status VerifyArityAndInput(compute::Arity arity, const compute::ExecBatch& batch) { + bool match = static_cast<uint64_t>(arity.num_args) == batch.values.size(); + if (!match) { + return Status::Invalid("Function Arity and Input data shape doesn't match, expected ", + arity.num_args, ", got ", batch.values.size()); + } + return Status::OK(); +} + +Status ExecFunctionScalar(const compute::ExecBatch& batch, PyObject* function, + int num_args, Datum* out) { + std::shared_ptr<Scalar> c_res_data; + PyObject* arg_tuple = PyTuple_New(num_args); + for (int arg_id = 0; arg_id < num_args; arg_id++) { + if (!batch[arg_id].is_scalar()) { + return Status::Invalid("Input type and data type doesn't match"); + } + auto c_data = batch[arg_id].scalar(); + PyObject* data = wrap_scalar(c_data); + PyTuple_SetItem(arg_tuple, arg_id, data); + } + PyObject* result = PyObject_CallObject(function, arg_tuple); + if (result == NULL) { + return Status::ExecutionError("Output is null, but expected a scalar"); + } + if (!is_scalar(result)) { + return Status::Invalid("Output from function is not a scalar"); + } + ARROW_ASSIGN_OR_RAISE(auto unwrapped_result, unwrap_scalar(result)); + *out = unwrapped_result; + return Status::OK(); +} + +Status ExecFunctionArray(const compute::ExecBatch& batch, PyObject* function, + int num_args, Datum* out) { + std::shared_ptr<Array> c_res_data; Review Comment: This seems to be unused? ########## python/pyarrow/_compute.pyx: ########## @@ -2182,3 +2255,175 @@ cdef CExpression _bind(Expression filter, Schema schema) except *: return GetResultValue(filter.unwrap().Bind( deref(pyarrow_unwrap_schema(schema).get()))) + + +cdef CFunctionDoc _make_function_doc(func_doc): + """ + Helper function to generate the FunctionDoc + """ + cdef: + CFunctionDoc f_doc + vector[c_string] c_arg_names + c_bool c_options_required + if func_doc and isinstance(func_doc, dict): + if func_doc["summary"] and isinstance(func_doc["summary"], str): + f_doc.summary = func_doc["summary"].encode() + else: + raise ValueError("key `summary` cannot be None") + + if func_doc["description"] and isinstance(func_doc["description"], str): + f_doc.description = func_doc["description"].encode() + else: + raise ValueError("key `description` cannot be None") + + if func_doc["arg_names"] and isinstance(func_doc["arg_names"], list): + for arg_name in func_doc["arg_names"]: + if isinstance(arg_name, str): + c_arg_names.push_back(arg_name.encode()) + else: + raise ValueError( + "key `arg_names` must be a list of strings") + f_doc.arg_names = c_arg_names + else: + raise ValueError("key `arg_names` cannot be None") + + # UDFOptions integration: + # TODO: https://issues.apache.org/jira/browse/ARROW-16041 + f_doc.options_class = tobytes("None") + + c_options_required = False + f_doc.options_required = c_options_required + + return f_doc + else: + raise ValueError(f"func_doc must be a dictionary") + + +def register_function(func_name, num_args, function_doc, in_types, + out_type, callback): + """ + Register a user-defined-function + + Parameters + ---------- + + func_name : str + function name + num_args : int + number of arguments in the function + function_doc : dict + a dictionary object with keys + ("summary", + "description", + "arg_names" + ) + in_types : List[InputType] + list of InputType objects which defines the input + types for the function + out_type : DataType + output type of the function + callback : callable + user defined function + function includes arguments equal to the number + of input_types defined. The return type of the + function is of the type defined as output_type. + The output is a datum object which can be + an Array or a ChunkedArray or a Table or a RecordBatch. + + Example + ------- + + >>> from pyarrow import compute as pc + >>> from pyarrow.compute import register_function + >>> from pyarrow.compute import InputType + >>> + >>> func_doc = {} + >>> func_doc["summary"] = "simple udf" + >>> func_doc["description"] = "add a constant to a scalar" + >>> func_doc["arg_names"] = ["x"] + >>> + >>> def add_constant(array): + ... return pc.call_function("add", [array, 1]) + ... + >>> + >>> func_name = "py_add_func" + >>> arity = 1 + >>> in_types = [InputType.array(pa.int64())] + >>> out_type = pa.int64() + >>> register_function(func_name, arity, func_doc, + ... in_types, out_type, add_constant) + >>> + >>> func = pc.get_function(func_name) + >>> func.name + 'py_add_func' + >>> ans = pc.call_function(func_name, [pa.array([20])]) + >>> ans + <pyarrow.lib.Int64Array object at 0x10c22e700> + [ + 21 + ] + """ + cdef: + c_string c_func_name + CArity c_arity + CFunctionDoc c_func_doc + CInputType in_tmp + vector[CInputType] c_in_types + PyObject* c_callback + shared_ptr[CDataType] c_type + COutputType* c_out_type + CScalarUdfBuilder* c_sc_builder + CStatus st + CScalarUdfOptions* c_options + object obj + + if func_name and isinstance(func_name, str): + c_func_name = tobytes(func_name) + else: + raise ValueError("func_name should be str") + + if num_args and isinstance(num_args, int): + assert num_args > 0 + if num_args == 0: + c_arity = CArity.Nullary() + elif num_args == 1: + c_arity = CArity.Unary() + elif num_args == 2: + c_arity = CArity.Binary() + elif num_args == 3: + c_arity = CArity.Ternary() + elif num_args > 3: + c_arity = CArity.VarArgs(num_args) + else: + raise ValueError("arity must be an instance of Arity") + + c_func_doc = _make_function_doc(function_doc) + + if in_types and isinstance(in_types, list): + for in_type in in_types: + in_tmp = (<InputType> in_type).input_type + c_in_types.push_back(in_tmp) + else: + raise ValueError("input types must be of type InputType") + + if out_type: + c_type = pyarrow_unwrap_data_type(out_type) + else: + raise ValueError("Output value type must be defined") + + if callback and callable(callback): Review Comment: The intent here is that if callback is the wrong type or None, `callable` will still catch it. There's no need to separately check for none. ########## cpp/src/arrow/python/udf.cc: ########## @@ -0,0 +1,135 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#include "arrow/python/udf.h" + +#include <cstddef> +#include <memory> +#include <sstream> + +#include "arrow/compute/function.h" +#include "arrow/python/common.h" + +namespace arrow { + +namespace py { + +Status exec_function_scalar(const compute::ExecBatch& batch, PyObject* function, + int num_args, Datum* out) { + std::shared_ptr<Scalar> c_res_data; Review Comment: It seems to be unused? -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: github-unsubscr...@arrow.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org