jorisvandenbossche commented on code in PR #34102:
URL: https://github.com/apache/arrow/pull/34102#discussion_r1116914435


##########
python/pyarrow/_acero.pyx:
##########
@@ -0,0 +1,459 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# ---------------------------------------------------------------------
+# Low-level Acero bindings
+
+# cython: profile=False
+# distutils: language = c++
+# cython: language_level = 3
+
+from pyarrow.includes.common cimport *
+from pyarrow.includes.libarrow cimport *
+from pyarrow.includes.libarrow_dataset cimport *
+from pyarrow.lib cimport (Table, check_status, pyarrow_unwrap_table, 
pyarrow_wrap_table,
+                          RecordBatchReader)
+from pyarrow.lib import frombytes, tobytes
+from pyarrow._compute cimport Expression, FunctionOptions, _ensure_field_ref, 
_true
+from pyarrow.compute import field
+
+# Initialize()  # Initialise support for Datasets in ExecPlan
+
+
+cdef class ExecNodeOptions(_Weakrefable):
+    __slots__ = ()  # avoid mistakingly creating attributes
+
+    cdef const CExecNodeOptions* get_options(self) except NULL:
+        return self.wrapped.get()
+
+    cdef void init(self, const shared_ptr[CExecNodeOptions]& sp):
+        self.wrapped = sp
+
+    cdef inline shared_ptr[CExecNodeOptions] unwrap(self):
+        return self.wrapped
+
+    # def __repr__(self):
+    #     type_name = self.__class__.__name__
+    #     # Remove {} so we can use our own braces
+    #     string_repr = frombytes(self.get_options().ToString())[1:-1]
+    #     return f"{type_name}({string_repr})"
+
+    # def __eq__(self, FunctionOptions other):
+    #     return self.get_options().Equals(deref(other.get_options()))
+
+
+cdef class _TableSourceNodeOptions(ExecNodeOptions):
+
+    def _set_options(self, Table table):
+        cdef:
+            shared_ptr[CTable] c_table
+
+        c_table = pyarrow_unwrap_table(table)
+        self.wrapped.reset(
+            new CTableSourceNodeOptions(c_table)
+        )
+
+
+class TableSourceNodeOptions(_TableSourceNodeOptions):
+    """
+    A Source node which accepts a table.
+
+    Parameters
+    ----------
+    table : pyarrow.Table
+        The table which acts as the data source.
+    """
+
+    def __init__(self, Table table):
+        self._set_options(table)
+
+
+cdef class _FilterNodeOptions(ExecNodeOptions):
+
+    def _set_options(self, Expression filter_expression not None):
+        self.wrapped.reset(
+            new CFilterNodeOptions(<CExpression>filter_expression.unwrap())
+        )
+
+
+class FilterNodeOptions(_FilterNodeOptions):
+    """
+    Make a node which excludes some rows from batches passed through it.
+
+    The "filter" operation provides an option to define data filtering
+    criteria. It selects rows matching a given expression. Filters can
+    be written using pyarrow.compute.Expression.
+
+    Parameters
+    ----------
+    filter_expression : pyarrow.compute.Expression
+
+    """
+
+    def __init__(self, Expression filter_expression):
+        self._set_options(filter_expression)
+
+
+cdef class _ProjectNodeOptions(ExecNodeOptions):
+
+    def _set_options(self, expressions, names=None):
+        cdef:
+            Expression expr
+            vector[CExpression] c_expressions
+            vector[c_string] c_names
+
+        for expr in expressions:
+            c_expressions.push_back(expr.unwrap())
+
+        if names is not None:
+            if len(names) != len(expressions):
+                raise ValueError("dd")
+
+            for name in names:
+                c_names.push_back(<c_string>tobytes(name))
+
+            self.wrapped.reset(
+                new CProjectNodeOptions(c_expressions, c_names)
+            )
+        else:
+            self.wrapped.reset(
+                new CProjectNodeOptions(c_expressions)
+            )
+
+
+class ProjectNodeOptions(_ProjectNodeOptions):
+    """
+    Make a node which executes expressions on input batches,
+    producing new batches.
+
+    The "project" operation rearranges, deletes, transforms, and
+    creates columns. Each output column is computed by evaluating
+    an expression against the source record batch.
+
+    Parameters
+    ----------
+    expressions : list of pyarrow.compute.Expression
+        List of expressions to evaluate against the source batch.

Review Comment:
   > These must be scalar expressions
   
   Are those criteria already documented or described somewhere? We have the 
section about "scalar" compute functions 
(https://arrow.apache.org/docs/dev/cpp/compute.html#element-wise-scalar-functions),
 but that is also not that explicit.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to