Repository: spark
Updated Branches:
  refs/heads/master 8bceb899d -> 4c587eb48


[SPARK-23590][SQL] Add interpreted execution to CreateExternalRow

## What changes were proposed in this pull request?

The PR adds interpreted execution to CreateExternalRow

## How was this patch tested?

added UT

Author: Marco Gaido <marcogaid...@gmail.com>

Closes #20749 from mgaido91/SPARK-23590.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/4c587eb4
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/4c587eb4
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/4c587eb4

Branch: refs/heads/master
Commit: 4c587eb4887623c839854c1505f495de42898229
Parents: 8bceb89
Author: Marco Gaido <marcogaid...@gmail.com>
Authored: Tue Mar 6 17:42:17 2018 +0100
Committer: Herman van Hovell <hvanhov...@databricks.com>
Committed: Tue Mar 6 17:42:17 2018 +0100

----------------------------------------------------------------------
 .../spark/sql/catalyst/expressions/objects/objects.scala     | 6 ++++--
 .../sql/catalyst/expressions/ExpressionEvalHelper.scala      | 4 +++-
 .../sql/catalyst/expressions/ObjectExpressionsSuite.scala    | 8 +++++++-
 3 files changed, 14 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/4c587eb4/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
index 97e3ff8..721d589 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
@@ -1111,8 +1111,10 @@ case class CreateExternalRow(children: Seq[Expression], 
schema: StructType)
 
   override def nullable: Boolean = false
 
-  override def eval(input: InternalRow): Any =
-    throw new UnsupportedOperationException("Only code-generated evaluation is 
supported")
+  override def eval(input: InternalRow): Any = {
+    val values = children.map(_.eval(input)).toArray
+    new GenericRowWithSchema(values, schema)
+  }
 
   override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
     val rowClass = classOf[GenericRowWithSchema].getName

http://git-wip-us.apache.org/repos/asf/spark/blob/4c587eb4/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
index b4c8eab..29f0cc0 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
@@ -24,6 +24,7 @@ import org.scalatest.prop.GeneratorDrivenPropertyChecks
 
 import org.apache.spark.{SparkConf, SparkFunSuite}
 import org.apache.spark.serializer.JavaSerializer
+import org.apache.spark.sql.Row
 import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow}
 import org.apache.spark.sql.catalyst.analysis.{ResolveTimeZone, SimpleAnalyzer}
 import org.apache.spark.sql.catalyst.expressions.codegen._
@@ -60,7 +61,7 @@ trait ExpressionEvalHelper extends 
GeneratorDrivenPropertyChecks {
 
   /**
    * Check the equality between result of expression and expected value, it 
will handle
-   * Array[Byte], Spread[Double], and MapData.
+   * Array[Byte], Spread[Double], MapData and Row.
    */
   protected def checkResult(result: Any, expected: Any, dataType: DataType): 
Boolean = {
     (result, expected) match {
@@ -88,6 +89,7 @@ trait ExpressionEvalHelper extends 
GeneratorDrivenPropertyChecks {
         if (expected.isNaN) result.isNaN else expected == result
       case (result: Float, expected: Float) =>
         if (expected.isNaN) result.isNaN else expected == result
+      case (result: Row, expected: InternalRow) => result.toSeq == 
expected.toSeq(result.schema)
       case _ =>
         result == expected
     }

http://git-wip-us.apache.org/repos/asf/spark/blob/4c587eb4/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ObjectExpressionsSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ObjectExpressionsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ObjectExpressionsSuite.scala
index 0f376c4..50e5773 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ObjectExpressionsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ObjectExpressionsSuite.scala
@@ -23,7 +23,7 @@ import org.apache.spark.sql.catalyst.InternalRow
 import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
 import org.apache.spark.sql.catalyst.expressions.objects._
 import org.apache.spark.sql.catalyst.util.{ArrayBasedMapData, GenericArrayData}
-import org.apache.spark.sql.types.{IntegerType, ObjectType}
+import org.apache.spark.sql.types._
 
 
 class ObjectExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
@@ -86,6 +86,12 @@ class ObjectExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
     }
   }
 
+  test("SPARK-23590: CreateExternalRow should support interpreted execution") {
+    val schema = new StructType().add("a", IntegerType).add("b", StringType)
+    val createExternalRow = CreateExternalRow(Seq(Literal(1), Literal("x")), 
schema)
+    checkEvaluation(createExternalRow, Row.fromSeq(Seq(1, "x")), 
InternalRow.fromSeq(Seq()))
+  }
+
   test("SPARK-23594 GetExternalRowField should support interpreted execution") 
{
     val inputObject = BoundReference(0, ObjectType(classOf[Row]), nullable = 
true)
     val getRowField = GetExternalRowField(inputObject, index = 0, fieldName = 
"c0")


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to