This is an automated email from the ASF dual-hosted git repository.

hongze pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-gluten.git


The following commit(s) were added to refs/heads/main by this push:
     new 7d793868d1 [VL] Add VeloxTransitionSuite (#7324)
7d793868d1 is described below

commit 7d793868d16ebf46bb3199494418a3faf4cefe41
Author: Hongze Zhang <[email protected]>
AuthorDate: Tue Sep 24 13:34:11 2024 +0800

    [VL] Add VeloxTransitionSuite (#7324)
---
 .../gluten/backendsapi/clickhouse/CHBackend.scala  |   1 +
 .../package.scala => columnarbatch/CHBatch.scala}  |  59 ++++----
 .../gluten/backendsapi/velox/VeloxBackend.scala    |   1 +
 .../VeloxBatch.scala}                              |  33 ++---
 ...xBackendTestBase.java => MockVeloxBackend.java} |  20 +--
 .../apache/gluten/test/VeloxBackendTestBase.java   |  57 +-------
 .../columnar/transition/VeloxTransitionSuite.scala | 160 +++++++++++++++++++++
 dev/vcpkg/toolchain.cmake                          |   2 +-
 .../columnar/transition/TransitionSuite.scala      |  56 +-------
 .../columnar/transition/TransitionSuiteBase.scala  |  95 ++++++++++++
 10 files changed, 305 insertions(+), 179 deletions(-)

diff --git 
a/backends-clickhouse/src/main/scala/org/apache/gluten/backendsapi/clickhouse/CHBackend.scala
 
b/backends-clickhouse/src/main/scala/org/apache/gluten/backendsapi/clickhouse/CHBackend.scala
index af668b15fe..ff0cd39109 100644
--- 
a/backends-clickhouse/src/main/scala/org/apache/gluten/backendsapi/clickhouse/CHBackend.scala
+++ 
b/backends-clickhouse/src/main/scala/org/apache/gluten/backendsapi/clickhouse/CHBackend.scala
@@ -20,6 +20,7 @@ import org.apache.gluten.GlutenBuildInfo._
 import org.apache.gluten.GlutenConfig
 import org.apache.gluten.backend.Backend
 import org.apache.gluten.backendsapi._
+import org.apache.gluten.columnarbatch.CHBatch
 import org.apache.gluten.execution.WriteFilesExecTransformer
 import org.apache.gluten.expression.WindowFunctionsBuilder
 import org.apache.gluten.extension.ValidationResult
diff --git 
a/backends-clickhouse/src/main/scala/org/apache/gluten/backendsapi/clickhouse/package.scala
 
b/backends-clickhouse/src/main/scala/org/apache/gluten/columnarbatch/CHBatch.scala
similarity index 52%
rename from 
backends-clickhouse/src/main/scala/org/apache/gluten/backendsapi/clickhouse/package.scala
rename to 
backends-clickhouse/src/main/scala/org/apache/gluten/columnarbatch/CHBatch.scala
index 8975fb315f..0121d01578 100644
--- 
a/backends-clickhouse/src/main/scala/org/apache/gluten/backendsapi/clickhouse/package.scala
+++ 
b/backends-clickhouse/src/main/scala/org/apache/gluten/columnarbatch/CHBatch.scala
@@ -14,41 +14,38 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.gluten.backendsapi
+package org.apache.gluten.columnarbatch
 
 import org.apache.gluten.extension.columnar.transition.Convention
 
 import org.apache.spark.sql.execution.{CHColumnarToRowExec, 
RowToCHNativeColumnarExec, SparkPlan}
 
-package object clickhouse {
-
-  /**
-   * ClickHouse batch convention.
-   *
-   * [[fromRow]] and [[toRow]] need a [[TransitionDef]] instance. The scala 
allows an compact way to
-   * implement trait using a lambda function.
-   *
-   * Here the detail definition is given in [[CHBatch.fromRow]].
-   * {{{
-   *       fromRow(new TransitionDef {
-   *       override def create(): Transition = new Transition {
-   *         override protected def apply0(plan: SparkPlan): SparkPlan =
-   *           RowToCHNativeColumnarExec(plan)
-   *       }
-   *     })
-   * }}}
-   */
-  case object CHBatch extends Convention.BatchType {
-    fromRow(
-      () =>
-        (plan: SparkPlan) => {
-          RowToCHNativeColumnarExec(plan)
-        })
+/**
+ * ClickHouse batch convention.
+ *
+ * [[fromRow]] and [[toRow]] need a [[TransitionDef]] instance. The scala 
allows an compact way to
+ * implement trait using a lambda function.
+ *
+ * Here the detail definition is given in [[CHBatch.fromRow]].
+ * {{{
+ *       fromRow(new TransitionDef {
+ *       override def create(): Transition = new Transition {
+ *         override protected def apply0(plan: SparkPlan): SparkPlan =
+ *           RowToCHNativeColumnarExec(plan)
+ *       }
+ *     })
+ * }}}
+ */
+object CHBatch extends Convention.BatchType {
+  fromRow(
+    () =>
+      (plan: SparkPlan) => {
+        RowToCHNativeColumnarExec(plan)
+      })
 
-    toRow(
-      () =>
-        (plan: SparkPlan) => {
-          CHColumnarToRowExec(plan)
-        })
-  }
+  toRow(
+    () =>
+      (plan: SparkPlan) => {
+        CHColumnarToRowExec(plan)
+      })
 }
diff --git 
a/backends-velox/src/main/scala/org/apache/gluten/backendsapi/velox/VeloxBackend.scala
 
b/backends-velox/src/main/scala/org/apache/gluten/backendsapi/velox/VeloxBackend.scala
index a21a0dda37..027876a2b0 100644
--- 
a/backends-velox/src/main/scala/org/apache/gluten/backendsapi/velox/VeloxBackend.scala
+++ 
b/backends-velox/src/main/scala/org/apache/gluten/backendsapi/velox/VeloxBackend.scala
@@ -20,6 +20,7 @@ import org.apache.gluten.GlutenBuildInfo._
 import org.apache.gluten.GlutenConfig
 import org.apache.gluten.backend.Backend
 import org.apache.gluten.backendsapi._
+import org.apache.gluten.columnarbatch.VeloxBatch
 import org.apache.gluten.exception.GlutenNotSupportException
 import org.apache.gluten.execution.WriteFilesExecTransformer
 import org.apache.gluten.expression.WindowFunctionsBuilder
diff --git 
a/backends-velox/src/main/scala/org/apache/gluten/backendsapi/velox/package.scala
 
b/backends-velox/src/main/scala/org/apache/gluten/columnarbatch/VeloxBatch.scala
similarity index 63%
rename from 
backends-velox/src/main/scala/org/apache/gluten/backendsapi/velox/package.scala
rename to 
backends-velox/src/main/scala/org/apache/gluten/columnarbatch/VeloxBatch.scala
index 8ab68b7fe0..aa6676dc9b 100644
--- 
a/backends-velox/src/main/scala/org/apache/gluten/backendsapi/velox/package.scala
+++ 
b/backends-velox/src/main/scala/org/apache/gluten/columnarbatch/VeloxBatch.scala
@@ -14,30 +14,27 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.gluten.backendsapi
+package org.apache.gluten.columnarbatch
 
-import org.apache.gluten.columnarbatch.ArrowBatch
 import org.apache.gluten.execution.{RowToVeloxColumnarExec, 
VeloxColumnarToRowExec}
 import org.apache.gluten.extension.columnar.transition.{Convention, 
TransitionDef}
 
 import org.apache.spark.sql.execution.SparkPlan
 
-package object velox {
-  case object VeloxBatch extends Convention.BatchType {
-    fromRow(
-      () =>
-        (plan: SparkPlan) => {
-          RowToVeloxColumnarExec(plan)
-        })
+object VeloxBatch extends Convention.BatchType {
+  fromRow(
+    () =>
+      (plan: SparkPlan) => {
+        RowToVeloxColumnarExec(plan)
+      })
 
-    toRow(
-      () =>
-        (plan: SparkPlan) => {
-          VeloxColumnarToRowExec(plan)
-        })
+  toRow(
+    () =>
+      (plan: SparkPlan) => {
+        VeloxColumnarToRowExec(plan)
+      })
 
-    // Velox batch is considered one-way compatible with Arrow batch.
-    // This is practically achieved by utilizing C++ API 
VeloxColumnarBatch::from at runtime.
-    fromBatch(ArrowBatch, TransitionDef.empty)
-  }
+  // Velox batch is considered one-way compatible with Arrow batch.
+  // This is practically achieved by utilizing C++ API 
VeloxColumnarBatch::from at runtime.
+  fromBatch(ArrowBatch, TransitionDef.empty)
 }
diff --git 
a/backends-velox/src/test/java/org/apache/gluten/test/VeloxBackendTestBase.java 
b/backends-velox/src/test/java/org/apache/gluten/test/MockVeloxBackend.java
similarity index 81%
copy from 
backends-velox/src/test/java/org/apache/gluten/test/VeloxBackendTestBase.java
copy to 
backends-velox/src/test/java/org/apache/gluten/test/MockVeloxBackend.java
index 2117311599..b5ce30e39b 100644
--- 
a/backends-velox/src/test/java/org/apache/gluten/test/VeloxBackendTestBase.java
+++ b/backends-velox/src/test/java/org/apache/gluten/test/MockVeloxBackend.java
@@ -17,34 +17,18 @@
 package org.apache.gluten.test;
 
 import org.apache.gluten.GlutenConfig;
-import org.apache.gluten.backendsapi.ListenerApi;
-import org.apache.gluten.backendsapi.velox.VeloxListenerApi;
 
 import com.codahale.metrics.MetricRegistry;
 import org.apache.spark.SparkConf;
 import org.apache.spark.api.plugin.PluginContext;
 import org.apache.spark.resource.ResourceInformation;
 import org.jetbrains.annotations.NotNull;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
 
 import java.io.IOException;
 import java.util.Map;
 
-public abstract class VeloxBackendTestBase {
-  private static final ListenerApi API = new VeloxListenerApi();
-
-  @BeforeClass
-  public static void setup() {
-    API.onExecutorStart(mockPluginContext());
-  }
-
-  @AfterClass
-  public static void tearDown() {
-    API.onExecutorShutdown();
-  }
-
-  private static PluginContext mockPluginContext() {
+public final class MockVeloxBackend {
+  public static PluginContext mockPluginContext() {
     return new PluginContext() {
       @Override
       public MetricRegistry metricRegistry() {
diff --git 
a/backends-velox/src/test/java/org/apache/gluten/test/VeloxBackendTestBase.java 
b/backends-velox/src/test/java/org/apache/gluten/test/VeloxBackendTestBase.java
index 2117311599..2759613793 100644
--- 
a/backends-velox/src/test/java/org/apache/gluten/test/VeloxBackendTestBase.java
+++ 
b/backends-velox/src/test/java/org/apache/gluten/test/VeloxBackendTestBase.java
@@ -16,77 +16,22 @@
  */
 package org.apache.gluten.test;
 
-import org.apache.gluten.GlutenConfig;
 import org.apache.gluten.backendsapi.ListenerApi;
 import org.apache.gluten.backendsapi.velox.VeloxListenerApi;
 
-import com.codahale.metrics.MetricRegistry;
-import org.apache.spark.SparkConf;
-import org.apache.spark.api.plugin.PluginContext;
-import org.apache.spark.resource.ResourceInformation;
-import org.jetbrains.annotations.NotNull;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 
-import java.io.IOException;
-import java.util.Map;
-
 public abstract class VeloxBackendTestBase {
   private static final ListenerApi API = new VeloxListenerApi();
 
   @BeforeClass
   public static void setup() {
-    API.onExecutorStart(mockPluginContext());
+    API.onExecutorStart(MockVeloxBackend.mockPluginContext());
   }
 
   @AfterClass
   public static void tearDown() {
     API.onExecutorShutdown();
   }
-
-  private static PluginContext mockPluginContext() {
-    return new PluginContext() {
-      @Override
-      public MetricRegistry metricRegistry() {
-        throw new UnsupportedOperationException();
-      }
-
-      @Override
-      public SparkConf conf() {
-        return newSparkConf();
-      }
-
-      @Override
-      public String executorID() {
-        throw new UnsupportedOperationException();
-      }
-
-      @Override
-      public String hostname() {
-        throw new UnsupportedOperationException();
-      }
-
-      @Override
-      public Map<String, ResourceInformation> resources() {
-        throw new UnsupportedOperationException();
-      }
-
-      @Override
-      public void send(Object message) throws IOException {
-        throw new UnsupportedOperationException();
-      }
-
-      @Override
-      public Object ask(Object message) throws Exception {
-        throw new UnsupportedOperationException();
-      }
-    };
-  }
-
-  @NotNull
-  private static SparkConf newSparkConf() {
-    final SparkConf conf = new SparkConf();
-    conf.set(GlutenConfig.SPARK_OFFHEAP_SIZE_KEY(), "1g");
-    return conf;
-  }
 }
diff --git 
a/backends-velox/src/test/scala/org/apache/gluten/extension/columnar/transition/VeloxTransitionSuite.scala
 
b/backends-velox/src/test/scala/org/apache/gluten/extension/columnar/transition/VeloxTransitionSuite.scala
new file mode 100644
index 0000000000..8decedc411
--- /dev/null
+++ 
b/backends-velox/src/test/scala/org/apache/gluten/extension/columnar/transition/VeloxTransitionSuite.scala
@@ -0,0 +1,160 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.gluten.extension.columnar.transition
+
+import org.apache.gluten.backendsapi.velox.VeloxListenerApi
+import org.apache.gluten.columnarbatch.{ArrowBatch, VeloxBatch}
+import org.apache.gluten.exception.GlutenException
+import org.apache.gluten.execution.{RowToVeloxColumnarExec, 
VeloxColumnarToRowExec}
+import 
org.apache.gluten.extension.columnar.transition.Convention.BatchType.VanillaBatch
+import org.apache.gluten.test.MockVeloxBackend
+
+import org.apache.spark.sql.execution.{ColumnarToRowExec, RowToColumnarExec}
+import org.apache.spark.sql.test.SharedSparkSession
+
+class VeloxTransitionSuite extends SharedSparkSession {
+  import VeloxTransitionSuite._
+
+  private val api = new VeloxListenerApi()
+
+  test("Vanilla C2R - outputs row") {
+    val in = BatchLeaf(VanillaBatch)
+    val out = Transitions.insertTransitions(in, outputsColumnar = false)
+    assert(out == ColumnarToRowExec(BatchLeaf(VanillaBatch)))
+  }
+
+  test("Vanilla C2R - requires row input") {
+    val in = RowUnary(BatchLeaf(VanillaBatch))
+    val out = Transitions.insertTransitions(in, outputsColumnar = false)
+    assert(out == RowUnary(ColumnarToRowExec(BatchLeaf(VanillaBatch))))
+  }
+
+  test("Vanilla R2C - requires vanilla input") {
+    val in = BatchUnary(VanillaBatch, RowLeaf())
+    val out = Transitions.insertTransitions(in, outputsColumnar = false)
+    assert(out == ColumnarToRowExec(BatchUnary(VanillaBatch, 
RowToColumnarExec(RowLeaf()))))
+  }
+
+  test("Arrow C2R - outputs row") {
+    val in = BatchLeaf(ArrowBatch)
+    val out = Transitions.insertTransitions(in, outputsColumnar = false)
+    assert(out == ColumnarToRowExec(BatchLeaf(ArrowBatch)))
+  }
+
+  test("Arrow C2R - requires row input") {
+    val in = RowUnary(BatchLeaf(ArrowBatch))
+    val out = Transitions.insertTransitions(in, outputsColumnar = false)
+    assert(out == RowUnary(ColumnarToRowExec(BatchLeaf(ArrowBatch))))
+  }
+
+  test("Arrow R2C - requires Arrow input") {
+    val in = BatchUnary(ArrowBatch, RowLeaf())
+    assertThrows[GlutenException] {
+      // No viable transitions.
+      // FIXME: Support this case.
+      Transitions.insertTransitions(in, outputsColumnar = false)
+    }
+  }
+
+  test("Velox C2R - outputs row") {
+    val in = BatchLeaf(VeloxBatch)
+    val out = Transitions.insertTransitions(in, outputsColumnar = false)
+    assert(out == VeloxColumnarToRowExec(BatchLeaf(VeloxBatch)))
+  }
+
+  test("Velox C2R - requires row input") {
+    val in = RowUnary(BatchLeaf(VeloxBatch))
+    val out = Transitions.insertTransitions(in, outputsColumnar = false)
+    assert(out == RowUnary(VeloxColumnarToRowExec(BatchLeaf(VeloxBatch))))
+  }
+
+  test("Velox R2C - outputs Velox") {
+    val in = RowLeaf()
+    val out = Transitions.insertTransitions(in, outputsColumnar = true)
+    assert(out == RowToVeloxColumnarExec(RowLeaf()))
+  }
+
+  test("Velox R2C - requires Velox input") {
+    val in = BatchUnary(VeloxBatch, RowLeaf())
+    val out = Transitions.insertTransitions(in, outputsColumnar = false)
+    assert(out == VeloxColumnarToRowExec(BatchUnary(VeloxBatch, 
RowToVeloxColumnarExec(RowLeaf()))))
+  }
+
+  test("Arrow-to-Velox C2C") {
+    val in = BatchUnary(VeloxBatch, BatchLeaf(ArrowBatch))
+    val out = Transitions.insertTransitions(in, outputsColumnar = false)
+    // No explicit transition needed for Arrow-to-Velox.
+    // FIXME: Add explicit transitions.
+    //  See https://github.com/apache/incubator-gluten/issues/7313.
+    assert(out == VeloxColumnarToRowExec(BatchUnary(VeloxBatch, 
BatchLeaf(ArrowBatch))))
+  }
+
+  test("Velox-to-Arrow C2C") {
+    val in = BatchUnary(ArrowBatch, BatchLeaf(VeloxBatch))
+    assertThrows[GlutenException] {
+      // No viable transitions.
+      // FIXME: Support this case.
+      Transitions.insertTransitions(in, outputsColumnar = false)
+    }
+  }
+
+  test("Vanilla-to-Velox C2C") {
+    val in = BatchUnary(VeloxBatch, BatchLeaf(VanillaBatch))
+    val out = Transitions.insertTransitions(in, outputsColumnar = false)
+    assert(
+      out == VeloxColumnarToRowExec(
+        BatchUnary(VeloxBatch, 
RowToVeloxColumnarExec(ColumnarToRowExec(BatchLeaf(VanillaBatch))))))
+  }
+
+  test("Velox-to-Vanilla C2C") {
+    val in = BatchUnary(VanillaBatch, BatchLeaf(VeloxBatch))
+    val out = Transitions.insertTransitions(in, outputsColumnar = false)
+    assert(
+      out == ColumnarToRowExec(
+        BatchUnary(VanillaBatch, 
RowToColumnarExec(VeloxColumnarToRowExec(BatchLeaf(VeloxBatch))))))
+  }
+
+  test("Vanilla-to-Arrow C2C") {
+    val in = BatchUnary(ArrowBatch, BatchLeaf(VanillaBatch))
+    assertThrows[GlutenException] {
+      // No viable transitions.
+      // FIXME: Support this case.
+      Transitions.insertTransitions(in, outputsColumnar = false)
+    }
+  }
+
+  test("Arrow-to-Vanilla C2C") {
+    val in = BatchUnary(VanillaBatch, BatchLeaf(ArrowBatch))
+    val out = Transitions.insertTransitions(in, outputsColumnar = false)
+    // No explicit transition needed for Arrow-to-Vanilla.
+    // FIXME: Add explicit transitions.
+    //  See https://github.com/apache/incubator-gluten/issues/7313.
+    assert(out == ColumnarToRowExec(BatchUnary(VanillaBatch, 
BatchLeaf(ArrowBatch))))
+  }
+
+  override protected def beforeAll(): Unit = {
+    api.onExecutorStart(MockVeloxBackend.mockPluginContext())
+    super.beforeAll()
+  }
+
+  override protected def afterAll(): Unit = {
+    super.afterAll()
+    api.onExecutorShutdown()
+  }
+}
+
+object VeloxTransitionSuite extends TransitionSuiteBase {}
diff --git a/dev/vcpkg/toolchain.cmake b/dev/vcpkg/toolchain.cmake
index 10d156c1b0..7ab616e275 100644
--- a/dev/vcpkg/toolchain.cmake
+++ b/dev/vcpkg/toolchain.cmake
@@ -3,7 +3,7 @@
 
 set(ENABLE_GLUTEN_VCPKG ON)
 
-# Force the use of VCPKG classic mode to avoid reinstalling vcpkg features 
during
+# Force the use of VCPKG classic mode to avoid reinstalling vcpkg features 
during building
 # different CMake sub-projects. Which means, the features installed by `vcpkg 
install`
 # in script `init.sh` will be used across all CMake sub-projects.
 #
diff --git 
a/gluten-substrait/src/test/scala/org/apache/gluten/extension/columnar/transition/TransitionSuite.scala
 
b/gluten-substrait/src/test/scala/org/apache/gluten/extension/columnar/transition/TransitionSuite.scala
index 5c6d692ae2..1972592993 100644
--- 
a/gluten-substrait/src/test/scala/org/apache/gluten/extension/columnar/transition/TransitionSuite.scala
+++ 
b/gluten-substrait/src/test/scala/org/apache/gluten/extension/columnar/transition/TransitionSuite.scala
@@ -83,7 +83,7 @@ class TransitionSuite extends SharedSparkSession {
   }
 }
 
-object TransitionSuite {
+object TransitionSuite extends TransitionSuiteBase {
   object TypeA extends Convention.BatchType {
     fromRow(
       () =>
@@ -177,58 +177,4 @@ object TransitionSuite {
     override def output: Seq[Attribute] = child.output
   }
 
-  case class BatchLeaf(override val batchType0: Convention.BatchType)
-    extends LeafExecNode
-    with GlutenPlan {
-    override def supportsColumnar: Boolean = true
-    override protected def doExecute(): RDD[InternalRow] = throw new 
UnsupportedOperationException()
-    override def output: Seq[Attribute] = List.empty
-  }
-
-  case class BatchUnary(
-      override val batchType0: Convention.BatchType,
-      override val child: SparkPlan)
-    extends UnaryExecNode
-    with GlutenPlan {
-    override def supportsColumnar: Boolean = true
-    override protected def withNewChildInternal(newChild: SparkPlan): 
SparkPlan =
-      copy(child = newChild)
-    override protected def doExecute(): RDD[InternalRow] = throw new 
UnsupportedOperationException()
-    override def output: Seq[Attribute] = child.output
-  }
-
-  case class BatchBinary(
-      override val batchType0: Convention.BatchType,
-      override val left: SparkPlan,
-      override val right: SparkPlan)
-    extends BinaryExecNode
-    with GlutenPlan {
-    override def supportsColumnar: Boolean = true
-    override protected def withNewChildrenInternal(
-        newLeft: SparkPlan,
-        newRight: SparkPlan): SparkPlan = copy(left = newLeft, right = 
newRight)
-    override protected def doExecute(): RDD[InternalRow] = throw new 
UnsupportedOperationException()
-    override def output: Seq[Attribute] = left.output ++ right.output
-  }
-
-  case class RowLeaf() extends LeafExecNode {
-    override protected def doExecute(): RDD[InternalRow] = throw new 
UnsupportedOperationException()
-    override def output: Seq[Attribute] = List.empty
-  }
-
-  case class RowUnary(override val child: SparkPlan) extends UnaryExecNode {
-    override protected def withNewChildInternal(newChild: SparkPlan): 
SparkPlan =
-      copy(child = newChild)
-    override protected def doExecute(): RDD[InternalRow] = throw new 
UnsupportedOperationException()
-    override def output: Seq[Attribute] = child.output
-  }
-
-  case class RowBinary(override val left: SparkPlan, override val right: 
SparkPlan)
-    extends BinaryExecNode {
-    override protected def withNewChildrenInternal(
-        newLeft: SparkPlan,
-        newRight: SparkPlan): SparkPlan = copy(left = newLeft, right = 
newRight)
-    override protected def doExecute(): RDD[InternalRow] = throw new 
UnsupportedOperationException()
-    override def output: Seq[Attribute] = left.output ++ right.output
-  }
 }
diff --git 
a/gluten-substrait/src/test/scala/org/apache/gluten/extension/columnar/transition/TransitionSuiteBase.scala
 
b/gluten-substrait/src/test/scala/org/apache/gluten/extension/columnar/transition/TransitionSuiteBase.scala
new file mode 100644
index 0000000000..d82cc3aac9
--- /dev/null
+++ 
b/gluten-substrait/src/test/scala/org/apache/gluten/extension/columnar/transition/TransitionSuiteBase.scala
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.gluten.extension.columnar.transition
+
+import org.apache.gluten.extension.GlutenPlan
+
+import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.expressions.Attribute
+import org.apache.spark.sql.execution.{BinaryExecNode, LeafExecNode, 
SparkPlan, UnaryExecNode}
+
+trait TransitionSuiteBase {
+  case class BatchLeaf(override val batchType0: Convention.BatchType)
+    extends LeafExecNode
+    with GlutenPlan {
+    override def supportsColumnar: Boolean = true
+
+    override protected def doExecute(): RDD[InternalRow] = throw new 
UnsupportedOperationException()
+
+    override def output: Seq[Attribute] = List.empty
+  }
+
+  case class BatchUnary(
+      override val batchType0: Convention.BatchType,
+      override val child: SparkPlan)
+    extends UnaryExecNode
+    with GlutenPlan {
+    override def supportsColumnar: Boolean = true
+
+    override protected def withNewChildInternal(newChild: SparkPlan): 
SparkPlan =
+      copy(child = newChild)
+
+    override protected def doExecute(): RDD[InternalRow] = throw new 
UnsupportedOperationException()
+
+    override def output: Seq[Attribute] = child.output
+  }
+
+  case class BatchBinary(
+      override val batchType0: Convention.BatchType,
+      override val left: SparkPlan,
+      override val right: SparkPlan)
+    extends BinaryExecNode
+    with GlutenPlan {
+    override def supportsColumnar: Boolean = true
+
+    override protected def withNewChildrenInternal(
+        newLeft: SparkPlan,
+        newRight: SparkPlan): SparkPlan = copy(left = newLeft, right = 
newRight)
+
+    override protected def doExecute(): RDD[InternalRow] = throw new 
UnsupportedOperationException()
+
+    override def output: Seq[Attribute] = left.output ++ right.output
+  }
+
+  case class RowLeaf() extends LeafExecNode {
+    override protected def doExecute(): RDD[InternalRow] = throw new 
UnsupportedOperationException()
+
+    override def output: Seq[Attribute] = List.empty
+  }
+
+  case class RowUnary(override val child: SparkPlan) extends UnaryExecNode {
+    override protected def withNewChildInternal(newChild: SparkPlan): 
SparkPlan =
+      copy(child = newChild)
+
+    override protected def doExecute(): RDD[InternalRow] = throw new 
UnsupportedOperationException()
+
+    override def output: Seq[Attribute] = child.output
+  }
+
+  case class RowBinary(override val left: SparkPlan, override val right: 
SparkPlan)
+    extends BinaryExecNode {
+    override protected def withNewChildrenInternal(
+        newLeft: SparkPlan,
+        newRight: SparkPlan): SparkPlan = copy(left = newLeft, right = 
newRight)
+
+    override protected def doExecute(): RDD[InternalRow] = throw new 
UnsupportedOperationException()
+
+    override def output: Seq[Attribute] = left.output ++ right.output
+  }
+
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to