Repository: spark
Updated Branches:
  refs/heads/master 5ad68ba5c -> c377e49e3


[SPARK-16489][SQL] Guard against variable reuse mistakes in expression code 
generation

## What changes were proposed in this pull request?
In code generation, it is incorrect for expressions to reuse variable names 
across different instances of itself. As an example, SPARK-16488 reports a bug 
in which pmod expression reuses variable name "r".

This patch updates ExpressionEvalHelper test harness to always project two 
instances of the same expression, which will help us catch variable reuse 
problems in expression unit tests. This patch also fixes the bug in crc32 
expression.

## How was this patch tested?
This is a test harness change, but I also created a new test suite for testing 
the test harness.

Author: Reynold Xin <r...@databricks.com>

Closes #14146 from rxin/SPARK-16489.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/c377e49e
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/c377e49e
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/c377e49e

Branch: refs/heads/master
Commit: c377e49e38a290e5c4fbc178278069788674dfb7
Parents: 5ad68ba
Author: Reynold Xin <r...@databricks.com>
Authored: Tue Jul 12 10:07:23 2016 -0700
Committer: Reynold Xin <r...@databricks.com>
Committed: Tue Jul 12 10:07:23 2016 -0700

----------------------------------------------------------------------
 .../spark/sql/catalyst/expressions/misc.scala   |  7 +--
 .../expressions/ExpressionEvalHelper.scala      | 15 ++++--
 .../expressions/ExpressionEvalHelperSuite.scala | 54 ++++++++++++++++++++
 .../sql/test/DataFrameReaderWriterSuite.scala   | 14 -----
 4 files changed, 68 insertions(+), 22 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/c377e49e/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala
index 1c0787b..d2c94ec 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala
@@ -175,11 +175,12 @@ case class Crc32(child: Expression) extends 
UnaryExpression with ImplicitCastInp
 
   override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
     val CRC32 = "java.util.zip.CRC32"
+    val checksum = ctx.freshName("checksum")
     nullSafeCodeGen(ctx, ev, value => {
       s"""
-        $CRC32 checksum = new $CRC32();
-        checksum.update($value, 0, $value.length);
-        ${ev.value} = checksum.getValue();
+        $CRC32 $checksum = new $CRC32();
+        $checksum.update($value, 0, $value.length);
+        ${ev.value} = $checksum.getValue();
       """
     })
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/c377e49e/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
index 58e9d6f..d6a9672 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
@@ -132,9 +132,13 @@ trait ExpressionEvalHelper extends 
GeneratorDrivenPropertyChecks {
       expression: Expression,
       expected: Any,
       inputRow: InternalRow = EmptyRow): Unit = {
-
+    // SPARK-16489 Explicitly doing code generation twice so code gen will 
fail if
+    // some expression is reusing variable names across different instances.
+    // This behavior is tested in ExpressionEvalHelperSuite.
     val plan = generateProject(
-      GenerateUnsafeProjection.generate(Alias(expression, 
s"Optimized($expression)")() :: Nil),
+      GenerateUnsafeProjection.generate(
+        Alias(expression, s"Optimized($expression)1")() ::
+          Alias(expression, s"Optimized($expression)2")() :: Nil),
       expression)
 
     val unsafeRow = plan(inputRow)
@@ -142,13 +146,14 @@ trait ExpressionEvalHelper extends 
GeneratorDrivenPropertyChecks {
 
     if (expected == null) {
       if (!unsafeRow.isNullAt(0)) {
-        val expectedRow = InternalRow(expected)
+        val expectedRow = InternalRow(expected, expected)
         fail("Incorrect evaluation in unsafe mode: " +
           s"$expression, actual: $unsafeRow, expected: $expectedRow$input")
       }
     } else {
-      val lit = InternalRow(expected)
-      val expectedRow = 
UnsafeProjection.create(Array(expression.dataType)).apply(lit)
+      val lit = InternalRow(expected, expected)
+      val expectedRow =
+        UnsafeProjection.create(Array(expression.dataType, 
expression.dataType)).apply(lit)
       if (unsafeRow != expectedRow) {
         fail("Incorrect evaluation in unsafe mode: " +
           s"$expression, actual: $unsafeRow, expected: $expectedRow$input")

http://git-wip-us.apache.org/repos/asf/spark/blob/c377e49e/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelperSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelperSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelperSuite.scala
new file mode 100644
index 0000000..64b65e2
--- /dev/null
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelperSuite.scala
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.expressions
+
+import org.apache.spark.SparkFunSuite
+import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, 
ExprCode}
+import org.apache.spark.sql.types.{DataType, IntegerType}
+
+/**
+ * A test suite for testing [[ExpressionEvalHelper]].
+ *
+ * Yes, we should write test cases for test harnesses, in case
+ * they have behaviors that are easy to break.
+ */
+class ExpressionEvalHelperSuite extends SparkFunSuite with 
ExpressionEvalHelper {
+
+  test("SPARK-16489 checkEvaluation should fail if expression reuses variable 
names") {
+    val e = intercept[RuntimeException] { 
checkEvaluation(BadCodegenExpression(), 10) }
+    assert(e.getMessage.contains("some_variable"))
+  }
+}
+
+/**
+ * An expression that generates bad code (variable name "some_variable" is not 
unique across
+ * instances of the expression.
+ */
+case class BadCodegenExpression() extends LeafExpression {
+  override def nullable: Boolean = false
+  override def eval(input: InternalRow): Any = 10
+  override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): 
ExprCode = {
+    ev.copy(code =
+      s"""
+        |int some_variable = 11;
+        |int ${ev.value} = 10;
+      """.stripMargin)
+  }
+  override def dataType: DataType = IntegerType
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/c377e49e/sql/core/src/test/scala/org/apache/spark/sql/test/DataFrameReaderWriterSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/test/DataFrameReaderWriterSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/test/DataFrameReaderWriterSuite.scala
index f706b20..05935ce 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/test/DataFrameReaderWriterSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/test/DataFrameReaderWriterSuite.scala
@@ -449,20 +449,6 @@ class DataFrameReaderWriterSuite extends QueryTest with 
SharedSQLContext with Be
     }
   }
 
-  test("pmod with partitionBy") {
-    val spark = this.spark
-    import spark.implicits._
-
-    case class Test(a: Int, b: String)
-    val data = Seq((0, "a"), (1, "b"), (1, "a"))
-    spark.createDataset(data).createOrReplaceTempView("test")
-    sql("select * from test distribute by pmod(_1, 2)")
-      .write
-      .partitionBy("_2")
-      .mode("overwrite")
-      .parquet(dir)
-  }
-
   private def testRead(
       df: => DataFrame,
       expectedResult: Seq[String],


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to