This is an automated email from the ASF dual-hosted git repository.
hongze pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-gluten.git
The following commit(s) were added to refs/heads/main by this push:
new efb81e284 [VL] Gluten-it: Improve test report table rendering (#5889)
efb81e284 is described below
commit efb81e2847da13cf51f5df11cea41b083b8f0475
Author: Hongze Zhang <[email protected]>
AuthorDate: Wed May 29 16:39:35 2024 +0800
[VL] Gluten-it: Improve test report table rendering (#5889)
---
.../gluten/integration/command/SparkRunModes.java | 3 +
.../gluten/integration/action/Parameterized.scala | 28 +-
.../apache/gluten/integration/action/Queries.scala | 23 +-
.../gluten/integration/action/QueriesCompare.scala | 50 ++--
.../gluten/integration/action/TableFormatter.scala | 78 ------
.../gluten/integration/action/TableRender.scala | 308 +++++++++++++++++++++
.../integration/action/TableRenderTest.scala | 91 ++++++
7 files changed, 454 insertions(+), 127 deletions(-)
diff --git
a/tools/gluten-it/common/src/main/java/org/apache/gluten/integration/command/SparkRunModes.java
b/tools/gluten-it/common/src/main/java/org/apache/gluten/integration/command/SparkRunModes.java
index f5a5c73a6..cfd3848d8 100644
---
a/tools/gluten-it/common/src/main/java/org/apache/gluten/integration/command/SparkRunModes.java
+++
b/tools/gluten-it/common/src/main/java/org/apache/gluten/integration/command/SparkRunModes.java
@@ -140,6 +140,9 @@ public final class SparkRunModes {
Optional<String> extraClassPath =
Arrays.stream(classPathValues).filter(classPath -> {
File file = new File(classPath);
return file.exists() && file.isFile() &&
extraJarSet.contains(file.getName());
+ }).map(classPath -> {
+ File file = new File(classPath);
+ return file.getAbsolutePath();
}).reduce((s1, s2) -> s1 + File.pathSeparator + s2);
final Map<String, String> extras = new HashMap<>();
diff --git
a/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/Parameterized.scala
b/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/Parameterized.scala
index 2871ef2de..799b7632e 100644
---
a/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/Parameterized.scala
+++
b/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/Parameterized.scala
@@ -18,13 +18,14 @@ package org.apache.gluten.integration.action
import org.apache.commons.lang3.exception.ExceptionUtils
import org.apache.gluten.integration.action.Actions.QuerySelector
+import
org.apache.gluten.integration.action.TableRender.RowParser.FieldAppender.RowAppender
import org.apache.gluten.integration.stat.RamStat
import org.apache.gluten.integration.{QueryRunner, Suite, TableCreator}
import org.apache.spark.sql.ConfUtils.ConfImplicits._
import org.apache.spark.sql.SparkSessionSwitcher
import scala.collection.mutable
-import scala.collection.mutable.{ArrayBuffer, ListBuffer}
+import scala.collection.mutable.ListBuffer
class Parameterized(
scale: Double,
@@ -198,24 +199,25 @@ case class TestResultLine(
object TestResultLine {
class Parser(dimNames: Seq[String], metricNames: Seq[String])
- extends TableFormatter.RowParser[TestResultLine] {
- override def parse(line: TestResultLine): Seq[Any] = {
- val values = ArrayBuffer[Any](line.queryId, line.succeed)
+ extends TableRender.RowParser[TestResultLine] {
+ override def parse(rowAppender: RowAppender, line: TestResultLine): Unit =
{
+ val inc = rowAppender.incremental()
+ inc.next().write(line.queryId)
+ inc.next().write(line.succeed)
dimNames.foreach { dimName =>
val coordinate = line.coordinate.coordinate
if (!coordinate.contains(dimName)) {
throw new IllegalStateException("Dimension name not found" + dimName)
}
- values.append(coordinate(dimName))
+ inc.next().write(coordinate(dimName))
}
metricNames.foreach { metricName =>
val metrics = line.metrics
- values.append(metrics.getOrElse(metricName, "N/A"))
+ inc.next().write(metrics.getOrElse(metricName, "N/A"))
}
- values.append(line.rowCount.getOrElse("N/A"))
- values.append(line.planningTimeMillis.getOrElse("N/A"))
- values.append(line.executionTimeMillis.getOrElse("N/A"))
- values
+ inc.next().write(line.rowCount.getOrElse("N/A"))
+ inc.next().write(line.planningTimeMillis.getOrElse("N/A"))
+ inc.next().write(line.executionTimeMillis.getOrElse("N/A"))
}
}
}
@@ -231,14 +233,14 @@ case class TestResultLines(
fields.append("Row Count")
fields.append("Planning Time (Millis)")
fields.append("Query Time (Millis)")
- val formatter = TableFormatter.create[TestResultLine](fields: _*)(
+ val render = TableRender.plain[TestResultLine](fields: _*)(
new TestResultLine.Parser(dimNames, metricNames))
lines.foreach { line =>
- formatter.appendRow(line)
+ render.appendRow(line)
}
- formatter.print(System.out)
+ render.print(System.out)
}
}
diff --git
a/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/Queries.scala
b/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/Queries.scala
index cf24b906b..540abbf45 100644
---
a/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/Queries.scala
+++
b/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/Queries.scala
@@ -18,6 +18,7 @@ package org.apache.gluten.integration.action
import org.apache.commons.lang3.exception.ExceptionUtils
import org.apache.gluten.integration.action.Actions.QuerySelector
+import
org.apache.gluten.integration.action.TableRender.RowParser.FieldAppender.RowAppender
import org.apache.gluten.integration.stat.RamStat
import org.apache.gluten.integration.{QueryRunner, Suite}
@@ -108,20 +109,20 @@ object Queries {
errorMessage: Option[String])
object TestResultLine {
- implicit object Parser extends TableFormatter.RowParser[TestResultLine] {
- override def parse(line: TestResultLine): Seq[Any] = {
- Seq(
- line.queryId,
- line.testPassed,
- line.rowCount.getOrElse("N/A"),
- line.planningTimeMillis.getOrElse("N/A"),
- line.executionTimeMillis.getOrElse("N/A"))
+ implicit object Parser extends TableRender.RowParser[TestResultLine] {
+ override def parse(rowAppender: RowAppender, line: TestResultLine): Unit
= {
+ val inc = rowAppender.incremental()
+ inc.next().write(line.queryId)
+ inc.next().write(line.testPassed)
+ inc.next().write(line.rowCount.getOrElse("N/A"))
+ inc.next().write(line.planningTimeMillis.getOrElse("N/A"))
+ inc.next().write(line.executionTimeMillis.getOrElse("N/A"))
}
}
}
private def printResults(results: List[TestResultLine]): Unit = {
- val formatter = TableFormatter.create[TestResultLine](
+ val render = TableRender.plain[TestResultLine](
"Query ID",
"Was Passed",
"Row Count",
@@ -129,10 +130,10 @@ object Queries {
"Query Time (Millis)")
results.foreach { line =>
- formatter.appendRow(line)
+ render.appendRow(line)
}
- formatter.print(System.out)
+ render.print(System.out)
}
private def aggregate(succeed: List[TestResultLine], name: String):
List[TestResultLine] = {
diff --git
a/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/QueriesCompare.scala
b/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/QueriesCompare.scala
index 320bd61b6..596c293e4 100644
---
a/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/QueriesCompare.scala
+++
b/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/QueriesCompare.scala
@@ -18,6 +18,7 @@ package org.apache.gluten.integration.action
import org.apache.commons.lang3.exception.ExceptionUtils
import org.apache.gluten.integration.action.Actions.QuerySelector
+import
org.apache.gluten.integration.action.TableRender.RowParser.FieldAppender.RowAppender
import org.apache.gluten.integration.stat.RamStat
import org.apache.gluten.integration.{QueryRunner, Suite, TableCreator}
import org.apache.spark.sql.{SparkSessionSwitcher, TestUtils}
@@ -111,45 +112,44 @@ object QueriesCompare {
errorMessage: Option[String])
object TestResultLine {
- implicit object Parser extends TableFormatter.RowParser[TestResultLine] {
- override def parse(line: TestResultLine): Seq[Any] = {
- val timeVariation =
+ implicit object Parser extends TableRender.RowParser[TestResultLine] {
+ override def parse(rowAppender: RowAppender, line: TestResultLine): Unit
= {
+ val inc = rowAppender.incremental()
+ val speedUp =
if (line.expectedExecutionTimeMillis.nonEmpty &&
line.actualExecutionTimeMillis.nonEmpty) {
Some(
((line.expectedExecutionTimeMillis.get -
line.actualExecutionTimeMillis.get).toDouble
/ line.actualExecutionTimeMillis.get.toDouble) * 100)
} else None
- Seq(
- line.queryId,
- line.testPassed,
- line.expectedRowCount.getOrElse("N/A"),
- line.actualRowCount.getOrElse("N/A"),
- line.expectedPlanningTimeMillis.getOrElse("N/A"),
- line.actualPlanningTimeMillis.getOrElse("N/A"),
- line.expectedExecutionTimeMillis.getOrElse("N/A"),
- line.actualExecutionTimeMillis.getOrElse("N/A"),
- timeVariation.map("%15.2f%%".format(_)).getOrElse("N/A"))
+ inc.next().write(line.queryId)
+ inc.next().write(line.testPassed)
+ inc.next().write(line.expectedRowCount.getOrElse("N/A"))
+ inc.next().write(line.actualRowCount.getOrElse("N/A"))
+ inc.next().write(line.expectedPlanningTimeMillis.getOrElse("N/A"))
+ inc.next().write(line.actualPlanningTimeMillis.getOrElse("N/A"))
+ inc.next().write(line.expectedExecutionTimeMillis.getOrElse("N/A"))
+ inc.next().write(line.actualExecutionTimeMillis.getOrElse("N/A"))
+ inc.next().write(speedUp.map("%.2f%%".format(_)).getOrElse("N/A"))
}
}
}
private def printResults(results: List[TestResultLine]): Unit = {
- val formatter = TableFormatter.create[TestResultLine](
- "Query ID",
- "Was Passed",
- "Expected Row Count",
- "Actual Row Count",
- "Baseline Planning Time (Millis)",
- "Planning Time (Millis)",
- "Baseline Query Time (Millis)",
- "Query Time (Millis)",
- "Query Time Variation")
+ import org.apache.gluten.integration.action.TableRender.Field._
+
+ val render = TableRender.create[TestResultLine](
+ Leaf("Query ID"),
+ Leaf("Passed"),
+ Branch("Row Count", List(Leaf("Vanilla"), Leaf("Gluten"))),
+ Branch("Planning Time (Millis)", List(Leaf("Vanilla"), Leaf("Gluten"))),
+ Branch("Query Time (Millis)", List(Leaf("Vanilla"), Leaf("Gluten"))),
+ Leaf("Speedup"))
results.foreach { line =>
- formatter.appendRow(line)
+ render.appendRow(line)
}
- formatter.print(System.out)
+ render.print(System.out)
}
private def aggregate(succeed: List[TestResultLine], name: String):
List[TestResultLine] = {
diff --git
a/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/TableFormatter.scala
b/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/TableFormatter.scala
deleted file mode 100644
index 07e253d5e..000000000
---
a/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/TableFormatter.scala
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.gluten.integration.action
-
-import java.io.{OutputStream, PrintStream}
-import scala.collection.mutable
-
-trait TableFormatter[ROW <: Any] {
- import TableFormatter._
- def appendRow(row: ROW): Unit
- def print(s: OutputStream): Unit
-}
-
-object TableFormatter {
- def create[ROW <: Any](fields: String*)(
- implicit parser: RowParser[ROW]): TableFormatter[ROW] = {
- assert(fields.nonEmpty)
- new Impl[ROW](Schema(fields), parser)
- }
-
- private case class Schema(fields: Seq[String])
-
- private class Impl[ROW <: Any](schema: Schema, parser: RowParser[ROW])
- extends TableFormatter[ROW] {
- private val rows = mutable.ListBuffer[Seq[String]]()
-
- override def appendRow(row: ROW): Unit = {
- val parsed = parser.parse(row)
- assert(parsed.size == schema.fields.size)
- rows += parsed.map(_.toString)
- }
-
- override def print(s: OutputStream): Unit = {
- val printer = new PrintStream(s)
- if (rows.isEmpty) {
- printer.println("(N/A)")
- printer.flush()
- return
- }
- val numFields = schema.fields.size
- val widths = (0 until numFields)
- .map { i =>
- rows.map(_(i).length).max max schema.fields(i).length
- }
- .map(_ + 1)
- val pBuilder = StringBuilder.newBuilder
- pBuilder ++= "|"
- widths.foreach { w =>
- pBuilder ++= s"%${w}s|"
- }
- val pattern = pBuilder.toString()
- printer.println(String.format(pattern, schema.fields: _*))
- rows.foreach { r =>
- printer.println(String.format(pattern, r: _*))
- }
- printer.flush()
- }
- }
-
- trait RowParser[ROW <: Any] {
- def parse(row: ROW): Seq[Any]
- }
-}
diff --git
a/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/TableRender.scala
b/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/TableRender.scala
new file mode 100644
index 000000000..b25a5db93
--- /dev/null
+++
b/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/TableRender.scala
@@ -0,0 +1,308 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.gluten.integration.action
+
+import org.apache.commons.lang3.StringUtils
+import
org.apache.gluten.integration.action.TableRender.RowParser.FieldAppender.RowAppender
+
+import java.io.{OutputStream, PrintStream}
+import scala.collection.mutable
+
+trait TableRender[ROW <: Any] {
+ def appendRow(row: ROW): Unit
+ def print(s: OutputStream): Unit
+}
+
+object TableRender {
+ def create[ROW <: Any](fields: Field*)(implicit parser: RowParser[ROW]):
TableRender[ROW] = {
+ assert(fields.nonEmpty)
+ new Impl[ROW](Schema(fields), parser)
+ }
+
+ def plain[ROW <: Any](fields: String*)(implicit parser: RowParser[ROW]):
TableRender[ROW] = {
+ assert(fields.nonEmpty)
+ new Impl[ROW](Schema(fields.map(Field.Leaf)), parser)
+ }
+
+ trait Field {
+ def name: String
+ def leafs: Seq[Field.Leaf]
+ }
+
+ object Field {
+ case class Branch(override val name: String, children: Seq[Field]) extends
Field {
+ override val leafs: Seq[Leaf] = {
+ children.map(leafsOf).reduce(_ ++ _)
+ }
+
+ private def leafsOf(field: Field): Seq[Field.Leaf] = {
+ field match {
+ case l @ Field.Leaf(_) => List(l)
+ case b @ Field.Branch(_, children) =>
+ children.map(child => leafsOf(child)).reduce(_ ++ _)
+ }
+ }
+ }
+ case class Leaf(override val name: String) extends Field {
+ override val leafs: Seq[Leaf] = List(this)
+ }
+ }
+
+ private case class Schema(fields: Seq[Field]) {
+ val leafs: Seq[Field.Leaf] = {
+ fields.map(_.leafs).reduce(_ ++ _)
+ }
+
+ val maxNestingLevel: Int = {
+ fields.map(maxNestingLevelOf).max
+ }
+
+ private def maxNestingLevelOf(field: Field): Int = {
+ field match {
+ case _: Field.Leaf => 1
+ case Field.Branch(_, children) => children.map(maxNestingLevelOf).max
+ 1
+ }
+ }
+ }
+
+ private class Impl[ROW <: Any](schema: Schema, parser: RowParser[ROW])
+ extends TableRender[ROW] {
+ private val appenderFactory =
RowParser.FieldAppender.TableAppender.create(schema)
+
+ override def appendRow(row: ROW): Unit = {
+ parser.parse(appenderFactory.newRow(), row)
+ }
+
+ override def print(s: OutputStream): Unit = {
+ val data = appenderFactory.data()
+ val printer = new PrintStream(s)
+ if (data.isEmpty) {
+ printer.println("(N/A)")
+ printer.flush()
+ return
+ }
+
+ // The map is incrementally updated while walking the schema tree from
top down.
+ val widthMap: mutable.Map[Int, Int] = mutable.Map()
+
+ val dataWidths = schema.leafs.indices
+ .map { i =>
+ data.map(_(i).length).max
+ }
+ .map(_ + 2)
+
+ schema.leafs.zipWithIndex.foreach {
+ case (leaf, i) =>
+ val dataWidth = dataWidths(i)
+ widthMap += (System.identityHashCode(leaf) -> (dataWidth max
(leaf.name.length + 2)))
+ }
+
+ schema.fields.foreach { root =>
+ def updateWidth(field: Field, lowerBound: Int): Unit = {
+ field match {
+ case branch @ Field.Branch(name, children) =>
+ val childLowerBound =
+ Math.ceil((lowerBound max name.length + 2).toDouble /
children.size.toDouble).toInt
+ children.foreach(child => updateWidth(child, childLowerBound))
+ val childrenWidth =
+ children.map(child =>
widthMap(System.identityHashCode(child))).sum
+ val width = childLowerBound * children.size max childrenWidth +
children.size - 1
+ val hash = System.identityHashCode(branch)
+ widthMap += hash -> width
+ case leaf @ Field.Leaf(name) =>
+ val hash = System.identityHashCode(leaf)
+ val newWidth = widthMap(hash) max lowerBound
+ widthMap.put(hash, newWidth)
+ case _ => new IllegalStateException()
+ }
+ }
+
+ updateWidth(root, 0)
+ }
+
+ trait SchemaCell
+ case class Given(field: Field) extends SchemaCell
+ case class PlaceHolder(leaf: Field.Leaf) extends SchemaCell
+
+ (0 until
schema.maxNestingLevel).foldRight[Seq[SchemaCell]](schema.fields.map(Given)) {
+ case (_, cells) =>
+ val schemaLine = cells
+ .map {
+ case Given(field) =>
+ (field.name, widthMap(System.identityHashCode(field)))
+ case PlaceHolder(leaf) =>
+ ("", widthMap(System.identityHashCode(leaf)))
+ }
+ .map {
+ case (name, width) =>
+ StringUtils.center(name, width)
+ }
+ .mkString("|", "|", "|")
+ printer.println(schemaLine)
+ cells.flatMap { f =>
+ f match {
+ case Given(Field.Branch(name, children)) => children.map(Given)
+ case Given(l @ Field.Leaf(name)) => List(PlaceHolder(l))
+ case p: PlaceHolder => List(p)
+ case _ => throw new IllegalStateException()
+ }
+ }
+ }
+
+ val separationLine = schema.leafs
+ .map { leaf =>
+ widthMap(System.identityHashCode(leaf))
+ }
+ .map { width =>
+ new String(Array.tabulate(width)(_ => '-'))
+ }
+ .mkString("|", "|", "|")
+
+ printer.println(separationLine)
+
+ data.foreach { row =>
+ val dataLine = row
+ .zip(schema.leafs)
+ .map {
+ case (value, leaf) =>
+ (value, widthMap(System.identityHashCode(leaf)))
+ }
+ .map {
+ case (value, width) =>
+ StringUtils.leftPad(value, width)
+ }
+ .mkString("|", "|", "|")
+ printer.println(dataLine)
+ }
+
+ printer.flush()
+ }
+ }
+
+ trait RowParser[ROW <: Any] {
+ def parse(rowFactory: RowAppender, row: ROW): Unit
+ }
+
+ object RowParser {
+ trait FieldAppender {
+ def child(name: String): FieldAppender
+ def write(value: Any): Unit
+ }
+
+ object FieldAppender {
+ trait RowAppender {
+ def field(name: String): FieldAppender
+ def field(offset: Int): FieldAppender
+ def incremental(): RowAppender.Incremental
+ }
+
+ object RowAppender {
+ def create(
+ schema: Schema,
+ mutableRows: mutable.ListBuffer[Array[String]]): RowAppender = {
+ new RowAppenderImpl(schema, mutableRows)
+ }
+
+ trait Incremental {
+ def next(): FieldAppender
+ }
+
+ private class RowAppenderImpl(
+ schema: Schema,
+ mutableRows: mutable.ListBuffer[Array[String]])
+ extends RowAppender {
+ private val mutableRow = Array.tabulate(schema.leafs.size) { _ =>
+ "UNFILLED"
+ }
+ mutableRows += mutableRow
+
+ override def field(name: String): FieldAppender = {
+ val fields = schema.fields
+ assert(fields.count(_.name == name) == 1)
+ val field = fields.zipWithIndex.find(_._1.name == name).getOrElse {
+ throw new IllegalArgumentException(s"Field $name not found in
$schema")
+ }
+ val column = field._2
+ new FieldAppenderImpl(field._1, mutableRow, column)
+ }
+
+ override def field(offset: Int): FieldAppender = {
+ new FieldAppenderImpl(schema.fields(offset), mutableRow, offset)
+ }
+
+ override def incremental(): Incremental = {
+ new Incremental {
+ private var offset = 0
+ override def next(): FieldAppender = {
+ val out = new FieldAppenderImpl(schema.leafs(offset),
mutableRow, offset)
+ offset += 1
+ out
+ }
+ }
+ }
+ }
+ }
+
+ trait TableAppender {
+ def newRow(): RowAppender
+ def data(): Seq[Seq[String]]
+ }
+
+ object TableAppender {
+ def create(schema: Schema): TableAppender = {
+ new TableAppenderImpl(schema)
+ }
+
+ private class TableAppenderImpl(schema: Schema) extends TableAppender {
+ private val mutableRows: mutable.ListBuffer[Array[String]] =
mutable.ListBuffer()
+
+ override def newRow(): RowAppender = {
+ RowAppender.create(schema, mutableRows)
+ }
+
+ override def data(): Seq[Seq[String]] = {
+ mutableRows.map(_.toSeq)
+ }
+ }
+ }
+
+ private class FieldAppenderImpl(field: Field, mutableRow: Array[String],
column: Int)
+ extends FieldAppender {
+ override def child(name: String): FieldAppender = {
+ field match {
+ case Field.Branch(_, children) =>
+ assert(children.count(_.name == name) == 1)
+ val child = children.zipWithIndex.find(_._1.name ==
name).getOrElse {
+ throw new IllegalArgumentException(s"Field $name not found in
$field")
+ }
+ val childField = child._1
+ val childOffset = child._2
+ new FieldAppenderImpl(childField, mutableRow, column +
childOffset)
+ case _ =>
+ throw new IllegalArgumentException(s"Field $field is not a
branch")
+ }
+ }
+
+ override def write(value: Any): Unit = {
+ assert(field.isInstanceOf[Field.Leaf])
+ mutableRow(column) = value.toString
+ }
+ }
+ }
+ }
+}
diff --git
a/tools/gluten-it/common/src/test/java/org/apache/gluten/integration/action/TableRenderTest.scala
b/tools/gluten-it/common/src/test/java/org/apache/gluten/integration/action/TableRenderTest.scala
new file mode 100644
index 000000000..87ad23f36
--- /dev/null
+++
b/tools/gluten-it/common/src/test/java/org/apache/gluten/integration/action/TableRenderTest.scala
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.gluten.integration.action
+
+import org.apache.gluten.integration.action.TableRender.Field._
+import org.apache.gluten.integration.action.TableRender.RowParser
+import org.apache.gluten.integration.action.TableRender.RowParser.FieldAppender
+
+// The tests are manually run.
+object TableRenderTest {
+ def case0(): Unit = {
+ val render: TableRender[Seq[String]] = TableRender.create(
+ Branch("ABC", List(Branch("AB", List(Leaf("A"), Leaf("B"))), Leaf("C"))),
+ Branch("DE", List(Leaf("D"), Leaf("E"))))(new RowParser[Seq[String]] {
+ override def parse(rowFactory: FieldAppender.RowAppender, row:
Seq[String]): Unit = {
+ val inc = rowFactory.incremental()
+ row.foreach(ceil => inc.next().write(ceil))
+ }
+ })
+ render.print(Console.out)
+ Console.out.println()
+ }
+
+ def case1(): Unit = {
+ val render: TableRender[Seq[String]] = TableRender.create(
+ Branch("ABC", List(Branch("AB", List(Leaf("A"), Leaf("B"))), Leaf("C"))),
+ Branch("DE", List(Leaf("D"), Leaf("E"))))(new RowParser[Seq[String]] {
+ override def parse(rowFactory: FieldAppender.RowAppender, row:
Seq[String]): Unit = {
+ val inc = rowFactory.incremental()
+ row.foreach(ceil => inc.next().write(ceil))
+ }
+ })
+
+ render.appendRow(List("aaaa", "b", "cccccc", "d", "eeeee"))
+ render.print(Console.out)
+ Console.out.println()
+ }
+
+ def case2(): Unit = {
+ val render: TableRender[Seq[String]] = TableRender.create(
+ Branch("ABC", List(Branch("AAAAAAAAABBBBBB", List(Leaf("A"),
Leaf("B"))), Leaf("C"))),
+ Branch("DE", List(Leaf("D"), Leaf("E"))))(new RowParser[Seq[String]] {
+ override def parse(rowFactory: FieldAppender.RowAppender, row:
Seq[String]): Unit = {
+ val inc = rowFactory.incremental()
+ row.foreach(ceil => inc.next().write(ceil))
+ }
+ })
+
+ render.appendRow(List("aaaa", "b", "cccccc", "d", "eeeee"))
+ render.print(Console.out)
+ Console.out.println()
+ }
+
+ def case3(): Unit = {
+ val render: TableRender[Seq[String]] = TableRender.create(
+ Branch("ABC", List(Branch("AB", List(Leaf("A"), Leaf("B"))),
Leaf("CCCCCCCCCCCCC"))),
+ Branch("DE", List(Leaf("D"), Leaf("E"))))(new RowParser[Seq[String]] {
+ override def parse(rowFactory: FieldAppender.RowAppender, row:
Seq[String]): Unit = {
+ val inc = rowFactory.incremental()
+ row.foreach(ceil => inc.next().write(ceil))
+ }
+ })
+
+ render.appendRow(List("aaaa", "b", "cccccc", "d", "eeeee"))
+ render.appendRow(List("aaaaaaaaaaaaa", "b", "cccccc", "ddddddddddd",
"eeeee"))
+ render.print(Console.out)
+ Console.out.println()
+ }
+
+ def main(args: Array[String]): Unit = {
+ case0()
+ case1()
+ case2()
+ case3()
+ }
+}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]