Repository: spark
Updated Branches:
  refs/heads/master fa244e5a9 -> de726b0d5


Revert "[SPARK-15285][SQL] Generated SpecificSafeProjection.apply method grows 
beyond 64 KB"

This reverts commit fa244e5a90690d6a31be50f2aa203ae1a2e9a1cf.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/de726b0d
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/de726b0d
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/de726b0d

Branch: refs/heads/master
Commit: de726b0d533158d3ca08841bd6976bcfa26ca79d
Parents: fa244e5
Author: Andrew Or <and...@databricks.com>
Authored: Mon May 23 21:43:11 2016 -0700
Committer: Andrew Or <and...@databricks.com>
Committed: Mon May 23 21:43:11 2016 -0700

----------------------------------------------------------------------
 .../catalyst/expressions/objects/objects.scala  | 32 ++++--------------
 .../spark/sql/DataFrameComplexTypeSuite.scala   | 35 --------------------
 2 files changed, 6 insertions(+), 61 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/de726b0d/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
index cc094f2..2f2323f 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
@@ -232,47 +232,27 @@ case class NewInstance(
 
   override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
     val javaType = ctx.javaType(dataType)
-    val argIsNulls = ctx.freshName("argIsNulls")
-    ctx.addMutableState("boolean[]", argIsNulls,
-      s"$argIsNulls = new boolean[${arguments.size}];")
-    val argValues = arguments.zipWithIndex.map { case (e, i) =>
-      val argValue = ctx.freshName("argValue")
-      ctx.addMutableState(ctx.javaType(e.dataType), argValue, "")
-      argValue
-    }
-
-    val argCodes = arguments.zipWithIndex.map { case (e, i) =>
-      val expr = e.genCode(ctx)
-      expr.code + s"""
-       $argIsNulls[$i] = ${expr.isNull};
-       ${argValues(i)} = ${expr.value};
-     """
-    }
-    val argCode = ctx.splitExpressions(ctx.INPUT_ROW, argCodes)
+    val argGen = arguments.map(_.genCode(ctx))
+    val argString = argGen.map(_.value).mkString(", ")
 
     val outer = outerPointer.map(func => 
Literal.fromObject(func()).genCode(ctx))
 
     var isNull = ev.isNull
     val setIsNull = if (propagateNull && arguments.nonEmpty) {
-      s"""
-       boolean $isNull = false;
-       for (int idx = 0; idx < ${arguments.length}; idx++) {
-         if ($argIsNulls[idx]) { $isNull = true; break; }
-       }
-      """
+      s"final boolean $isNull = ${argGen.map(_.isNull).mkString(" || ")};"
     } else {
       isNull = "false"
       ""
     }
 
     val constructorCall = outer.map { gen =>
-      s"""${gen.value}.new ${cls.getSimpleName}(${argValues.mkString(", ")})"""
+      s"""${gen.value}.new ${cls.getSimpleName}($argString)"""
     }.getOrElse {
-      s"new $className(${argValues.mkString(", ")})"
+      s"new $className($argString)"
     }
 
     val code = s"""
-      $argCode
+      ${argGen.map(_.code).mkString("\n")}
       ${outer.map(_.code).getOrElse("")}
       $setIsNull
       final $javaType ${ev.value} = $isNull ? ${ctx.defaultValue(javaType)} : 
$constructorCall;

http://git-wip-us.apache.org/repos/asf/spark/blob/de726b0d/sql/core/src/test/scala/org/apache/spark/sql/DataFrameComplexTypeSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameComplexTypeSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameComplexTypeSuite.scala
index 07fbaba..72f676e 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameComplexTypeSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameComplexTypeSuite.scala
@@ -58,39 +58,4 @@ class DataFrameComplexTypeSuite extends QueryTest with 
SharedSQLContext {
     val nullIntRow = df.selectExpr("i[1]").collect()(0)
     assert(nullIntRow == org.apache.spark.sql.Row(null))
   }
-
-  test("SPARK-15285 Generated SpecificSafeProjection.apply method grows beyond 
64KB") {
-    val ds100_5 = Seq(S100_5()).toDS()
-    ds100_5.rdd.count
-  }
 }
-
-case class S100(
-  s1: String = "1", s2: String = "2", s3: String = "3", s4: String = "4",
-  s5: String = "5", s6: String = "6", s7: String = "7", s8: String = "8",
-  s9: String = "9", s10: String = "10", s11: String = "11", s12: String = "12",
-  s13: String = "13", s14: String = "14", s15: String = "15", s16: String = 
"16",
-  s17: String = "17", s18: String = "18", s19: String = "19", s20: String = 
"20",
-  s21: String = "21", s22: String = "22", s23: String = "23", s24: String = 
"24",
-  s25: String = "25", s26: String = "26", s27: String = "27", s28: String = 
"28",
-  s29: String = "29", s30: String = "30", s31: String = "31", s32: String = 
"32",
-  s33: String = "33", s34: String = "34", s35: String = "35", s36: String = 
"36",
-  s37: String = "37", s38: String = "38", s39: String = "39", s40: String = 
"40",
-  s41: String = "41", s42: String = "42", s43: String = "43", s44: String = 
"44",
-  s45: String = "45", s46: String = "46", s47: String = "47", s48: String = 
"48",
-  s49: String = "49", s50: String = "50", s51: String = "51", s52: String = 
"52",
-  s53: String = "53", s54: String = "54", s55: String = "55", s56: String = 
"56",
-  s57: String = "57", s58: String = "58", s59: String = "59", s60: String = 
"60",
-  s61: String = "61", s62: String = "62", s63: String = "63", s64: String = 
"64",
-  s65: String = "65", s66: String = "66", s67: String = "67", s68: String = 
"68",
-  s69: String = "69", s70: String = "70", s71: String = "71", s72: String = 
"72",
-  s73: String = "73", s74: String = "74", s75: String = "75", s76: String = 
"76",
-  s77: String = "77", s78: String = "78", s79: String = "79", s80: String = 
"80",
-  s81: String = "81", s82: String = "82", s83: String = "83", s84: String = 
"84",
-  s85: String = "85", s86: String = "86", s87: String = "87", s88: String = 
"88",
-  s89: String = "89", s90: String = "90", s91: String = "91", s92: String = 
"92",
-  s93: String = "93", s94: String = "94", s95: String = "95", s96: String = 
"96",
-  s97: String = "97", s98: String = "98", s99: String = "99", s100: String = 
"100")
-
-case class S100_5(
-  s1: S100 = S100(), s2: S100 = S100(), s3: S100 = S100(), s4: S100 = S100(), 
s5: S100 = S100())


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to