git commit: [SQL] Convert arguments to Scala UDFs

2014-11-03 Thread marmbrus
Repository: spark
Updated Branches:
  refs/heads/master 28128150e - 15b58a223


[SQL] Convert arguments to Scala UDFs

Author: Michael Armbrust mich...@databricks.com

Closes #3077 from marmbrus/udfsWithUdts and squashes the following commits:

34b5f27 [Michael Armbrust] style
504adef [Michael Armbrust] Convert arguments to Scala UDFs


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/15b58a22
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/15b58a22
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/15b58a22

Branch: refs/heads/master
Commit: 15b58a2234ab7ba30c9c0cbb536177a3c725e350
Parents: 2812815
Author: Michael Armbrust mich...@databricks.com
Authored: Mon Nov 3 18:04:51 2014 -0800
Committer: Michael Armbrust mich...@databricks.com
Committed: Mon Nov 3 18:04:51 2014 -0800

--
 .../sql/catalyst/expressions/ScalaUdf.scala | 560 ++-
 .../apache/spark/sql/UserDefinedTypeSuite.scala |  18 +-
 2 files changed, 316 insertions(+), 262 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/15b58a22/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala
--
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala
index fa1786e..18c96da 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala
@@ -34,320 +34,366 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, 
children: Seq[Expressi
 
   override def toString = sscalaUDF(${children.mkString(,)})
 
+  // scalastyle:off
+
   /** This method has been generated by this script
 
 (1 to 22).map { x =
   val anys = (1 to x).map(x = Any).reduce(_ + ,  + _)
-  val evals = (0 to x - 1).map(x = schildren($x).eval(input)).reduce(_ 
+ ,\n + _)
+  val evals = (0 to x - 1).map(x = s
ScalaReflection.convertToScala(children($x).eval(input), 
children($x).dataType)).reduce(_ + ,\n + _)
 
 s
 case $x =
   function.asInstanceOf[($anys) = Any](
-  $evals)
+$evals)
 
-}
+}.foreach(println)
 
   */
 
-  // scalastyle:off
   override def eval(input: Row): Any = {
 val result = children.size match {
   case 0 = function.asInstanceOf[() = Any]()
-  case 1 = function.asInstanceOf[(Any) = Any](children(0).eval(input))
+  case 1 =
+function.asInstanceOf[(Any) = Any](
+  ScalaReflection.convertToScala(children(0).eval(input), 
children(0).dataType))
+
+
   case 2 =
 function.asInstanceOf[(Any, Any) = Any](
-  children(0).eval(input),
-  children(1).eval(input))
+  ScalaReflection.convertToScala(children(0).eval(input), 
children(0).dataType),
+  ScalaReflection.convertToScala(children(1).eval(input), 
children(1).dataType))
+
+
   case 3 =
 function.asInstanceOf[(Any, Any, Any) = Any](
-  children(0).eval(input),
-  children(1).eval(input),
-  children(2).eval(input))
+  ScalaReflection.convertToScala(children(0).eval(input), 
children(0).dataType),
+  ScalaReflection.convertToScala(children(1).eval(input), 
children(1).dataType),
+  ScalaReflection.convertToScala(children(2).eval(input), 
children(2).dataType))
+
+
   case 4 =
 function.asInstanceOf[(Any, Any, Any, Any) = Any](
-  children(0).eval(input),
-  children(1).eval(input),
-  children(2).eval(input),
-  children(3).eval(input))
+  ScalaReflection.convertToScala(children(0).eval(input), 
children(0).dataType),
+  ScalaReflection.convertToScala(children(1).eval(input), 
children(1).dataType),
+  ScalaReflection.convertToScala(children(2).eval(input), 
children(2).dataType),
+  ScalaReflection.convertToScala(children(3).eval(input), 
children(3).dataType))
+
+
   case 5 =
 function.asInstanceOf[(Any, Any, Any, Any, Any) = Any](
-  children(0).eval(input),
-  children(1).eval(input),
-  children(2).eval(input),
-  children(3).eval(input),
-  children(4).eval(input))
+  ScalaReflection.convertToScala(children(0).eval(input), 
children(0).dataType),
+  ScalaReflection.convertToScala(children(1).eval(input), 
children(1).dataType),
+  ScalaReflection.convertToScala(children(2).eval(input), 
children(2).dataType),
+  ScalaReflection.convertToScala(children(3).eval(input), 
children(3).dataType),
+  ScalaReflection.convertToScala(children(4).eval(input), 

git commit: [SQL] Convert arguments to Scala UDFs

2014-11-03 Thread marmbrus
Repository: spark
Updated Branches:
  refs/heads/branch-1.2 fa86d862f - 52db2b942


[SQL] Convert arguments to Scala UDFs

Author: Michael Armbrust mich...@databricks.com

Closes #3077 from marmbrus/udfsWithUdts and squashes the following commits:

34b5f27 [Michael Armbrust] style
504adef [Michael Armbrust] Convert arguments to Scala UDFs

(cherry picked from commit 15b58a2234ab7ba30c9c0cbb536177a3c725e350)
Signed-off-by: Michael Armbrust mich...@databricks.com


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/52db2b94
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/52db2b94
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/52db2b94

Branch: refs/heads/branch-1.2
Commit: 52db2b9429e00d8ed398a2432ad6a26cd1e5920c
Parents: fa86d86
Author: Michael Armbrust mich...@databricks.com
Authored: Mon Nov 3 18:04:51 2014 -0800
Committer: Michael Armbrust mich...@databricks.com
Committed: Mon Nov 3 18:05:02 2014 -0800

--
 .../sql/catalyst/expressions/ScalaUdf.scala | 560 ++-
 .../apache/spark/sql/UserDefinedTypeSuite.scala |  18 +-
 2 files changed, 316 insertions(+), 262 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/52db2b94/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala
--
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala
index fa1786e..18c96da 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala
@@ -34,320 +34,366 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, 
children: Seq[Expressi
 
   override def toString = sscalaUDF(${children.mkString(,)})
 
+  // scalastyle:off
+
   /** This method has been generated by this script
 
 (1 to 22).map { x =
   val anys = (1 to x).map(x = Any).reduce(_ + ,  + _)
-  val evals = (0 to x - 1).map(x = schildren($x).eval(input)).reduce(_ 
+ ,\n + _)
+  val evals = (0 to x - 1).map(x = s
ScalaReflection.convertToScala(children($x).eval(input), 
children($x).dataType)).reduce(_ + ,\n + _)
 
 s
 case $x =
   function.asInstanceOf[($anys) = Any](
-  $evals)
+$evals)
 
-}
+}.foreach(println)
 
   */
 
-  // scalastyle:off
   override def eval(input: Row): Any = {
 val result = children.size match {
   case 0 = function.asInstanceOf[() = Any]()
-  case 1 = function.asInstanceOf[(Any) = Any](children(0).eval(input))
+  case 1 =
+function.asInstanceOf[(Any) = Any](
+  ScalaReflection.convertToScala(children(0).eval(input), 
children(0).dataType))
+
+
   case 2 =
 function.asInstanceOf[(Any, Any) = Any](
-  children(0).eval(input),
-  children(1).eval(input))
+  ScalaReflection.convertToScala(children(0).eval(input), 
children(0).dataType),
+  ScalaReflection.convertToScala(children(1).eval(input), 
children(1).dataType))
+
+
   case 3 =
 function.asInstanceOf[(Any, Any, Any) = Any](
-  children(0).eval(input),
-  children(1).eval(input),
-  children(2).eval(input))
+  ScalaReflection.convertToScala(children(0).eval(input), 
children(0).dataType),
+  ScalaReflection.convertToScala(children(1).eval(input), 
children(1).dataType),
+  ScalaReflection.convertToScala(children(2).eval(input), 
children(2).dataType))
+
+
   case 4 =
 function.asInstanceOf[(Any, Any, Any, Any) = Any](
-  children(0).eval(input),
-  children(1).eval(input),
-  children(2).eval(input),
-  children(3).eval(input))
+  ScalaReflection.convertToScala(children(0).eval(input), 
children(0).dataType),
+  ScalaReflection.convertToScala(children(1).eval(input), 
children(1).dataType),
+  ScalaReflection.convertToScala(children(2).eval(input), 
children(2).dataType),
+  ScalaReflection.convertToScala(children(3).eval(input), 
children(3).dataType))
+
+
   case 5 =
 function.asInstanceOf[(Any, Any, Any, Any, Any) = Any](
-  children(0).eval(input),
-  children(1).eval(input),
-  children(2).eval(input),
-  children(3).eval(input),
-  children(4).eval(input))
+  ScalaReflection.convertToScala(children(0).eval(input), 
children(0).dataType),
+  ScalaReflection.convertToScala(children(1).eval(input), 
children(1).dataType),
+  ScalaReflection.convertToScala(children(2).eval(input), 
children(2).dataType),
+