Repository: spark
Updated Branches:
  refs/heads/master 4ee79c71a -> 5b9760de8


http://git-wip-us.apache.org/repos/asf/spark/blob/5b9760de/sql/core/src/main/scala/org/apache/spark/sql/dsl/package.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/dsl/package.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/dsl/package.scala
deleted file mode 100644
index 4c44e17..0000000
--- a/sql/core/src/main/scala/org/apache/spark/sql/dsl/package.scala
+++ /dev/null
@@ -1,496 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql
-
-import java.sql.{Timestamp, Date}
-
-import scala.language.implicitConversions
-import scala.reflect.runtime.universe.{TypeTag, typeTag}
-
-import org.apache.spark.sql.catalyst.ScalaReflection
-import org.apache.spark.sql.catalyst.expressions._
-import org.apache.spark.sql.types.DataType
-
-
-package object dsl {
-
-  implicit def symbolToColumn(s: Symbol): ColumnName = new ColumnName(s.name)
-
-  /** Converts $"col name" into an [[Column]]. */
-  implicit class StringToColumn(val sc: StringContext) extends AnyVal {
-    def $(args: Any*): ColumnName = {
-      new ColumnName(sc.s(args :_*))
-    }
-  }
-
-  private[this] implicit def toColumn(expr: Expression): Column = new 
Column(expr)
-
-  def sum(e: Column): Column = Sum(e.expr)
-  def sumDistinct(e: Column): Column = SumDistinct(e.expr)
-  def count(e: Column): Column = Count(e.expr)
-
-  @scala.annotation.varargs
-  def countDistinct(expr: Column, exprs: Column*): Column =
-    CountDistinct((expr +: exprs).map(_.expr))
-
-  def avg(e: Column): Column = Average(e.expr)
-  def first(e: Column): Column = First(e.expr)
-  def last(e: Column): Column = Last(e.expr)
-  def min(e: Column): Column = Min(e.expr)
-  def max(e: Column): Column = Max(e.expr)
-
-  def upper(e: Column): Column = Upper(e.expr)
-  def lower(e: Column): Column = Lower(e.expr)
-  def sqrt(e: Column): Column = Sqrt(e.expr)
-  def abs(e: Column): Column = Abs(e.expr)
-
-  // scalastyle:off
-
-  object literals {
-
-    implicit def booleanToLiteral(b: Boolean): Column = Literal(b)
-
-    implicit def byteToLiteral(b: Byte): Column = Literal(b)
-
-    implicit def shortToLiteral(s: Short): Column = Literal(s)
-
-    implicit def intToLiteral(i: Int): Column = Literal(i)
-
-    implicit def longToLiteral(l: Long): Column = Literal(l)
-
-    implicit def floatToLiteral(f: Float): Column = Literal(f)
-
-    implicit def doubleToLiteral(d: Double): Column = Literal(d)
-
-    implicit def stringToLiteral(s: String): Column = Literal(s)
-
-    implicit def dateToLiteral(d: Date): Column = Literal(d)
-
-    implicit def bigDecimalToLiteral(d: BigDecimal): Column = 
Literal(d.underlying())
-
-    implicit def bigDecimalToLiteral(d: java.math.BigDecimal): Column = 
Literal(d)
-
-    implicit def timestampToLiteral(t: Timestamp): Column = Literal(t)
-
-    implicit def binaryToLiteral(a: Array[Byte]): Column = Literal(a)
-  }
-
-
-  /* Use the following code to generate:
-  (0 to 22).map { x =>
-    val types = (1 to x).foldRight("RT")((i, s) => {s"A$i, $s"})
-    val typeTags = (1 to x).map(i => s"A$i: TypeTag").foldLeft("RT: 
TypeTag")(_ + ", " + _)
-    val args = (1 to x).map(i => s"arg$i: Column").mkString(", ")
-    val argsInUdf = (1 to x).map(i => s"arg$i.expr").mkString(", ")
-    println(s"""
-    /**
-     * Call a Scala function of ${x} arguments as user-defined function (UDF), 
and automatically
-     * infer the data types based on the function's signature.
-     */
-    def callUDF[$typeTags](f: Function$x[$types]${if (args.length > 0) ", " + 
args else ""}): Column = {
-      ScalaUdf(f, ScalaReflection.schemaFor(typeTag[RT]).dataType, 
Seq($argsInUdf))
-    }""")
-  }
-
-  (0 to 22).map { x =>
-    val args = (1 to x).map(i => s"arg$i: Column").mkString(", ")
-    val fTypes = Seq.fill(x + 1)("_").mkString(", ")
-    val argsInUdf = (1 to x).map(i => s"arg$i.expr").mkString(", ")
-    println(s"""
-    /**
-     * Call a Scala function of ${x} arguments as user-defined function (UDF). 
This requires
-     * you to specify the return data type.
-     */
-    def callUDF(f: Function$x[$fTypes], returnType: DataType${if (args.length 
> 0) ", " + args else ""}): Column = {
-      ScalaUdf(f, returnType, Seq($argsInUdf))
-    }""")
-  }
-  }
-  */
-  /**
-   * Call a Scala function of 0 arguments as user-defined function (UDF), and 
automatically
-   * infer the data types based on the function's signature.
-   */
-  def callUDF[RT: TypeTag](f: Function0[RT]): Column = {
-    ScalaUdf(f, ScalaReflection.schemaFor(typeTag[RT]).dataType, Seq())
-  }
-
-  /**
-   * Call a Scala function of 1 arguments as user-defined function (UDF), and 
automatically
-   * infer the data types based on the function's signature.
-   */
-  def callUDF[RT: TypeTag, A1: TypeTag](f: Function1[A1, RT], arg1: Column): 
Column = {
-    ScalaUdf(f, ScalaReflection.schemaFor(typeTag[RT]).dataType, 
Seq(arg1.expr))
-  }
-
-  /**
-   * Call a Scala function of 2 arguments as user-defined function (UDF), and 
automatically
-   * infer the data types based on the function's signature.
-   */
-  def callUDF[RT: TypeTag, A1: TypeTag, A2: TypeTag](f: Function2[A1, A2, RT], 
arg1: Column, arg2: Column): Column = {
-    ScalaUdf(f, ScalaReflection.schemaFor(typeTag[RT]).dataType, 
Seq(arg1.expr, arg2.expr))
-  }
-
-  /**
-   * Call a Scala function of 3 arguments as user-defined function (UDF), and 
automatically
-   * infer the data types based on the function's signature.
-   */
-  def callUDF[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag](f: 
Function3[A1, A2, A3, RT], arg1: Column, arg2: Column, arg3: Column): Column = {
-    ScalaUdf(f, ScalaReflection.schemaFor(typeTag[RT]).dataType, 
Seq(arg1.expr, arg2.expr, arg3.expr))
-  }
-
-  /**
-   * Call a Scala function of 4 arguments as user-defined function (UDF), and 
automatically
-   * infer the data types based on the function's signature.
-   */
-  def callUDF[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: 
TypeTag](f: Function4[A1, A2, A3, A4, RT], arg1: Column, arg2: Column, arg3: 
Column, arg4: Column): Column = {
-    ScalaUdf(f, ScalaReflection.schemaFor(typeTag[RT]).dataType, 
Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr))
-  }
-
-  /**
-   * Call a Scala function of 5 arguments as user-defined function (UDF), and 
automatically
-   * infer the data types based on the function's signature.
-   */
-  def callUDF[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, 
A5: TypeTag](f: Function5[A1, A2, A3, A4, A5, RT], arg1: Column, arg2: Column, 
arg3: Column, arg4: Column, arg5: Column): Column = {
-    ScalaUdf(f, ScalaReflection.schemaFor(typeTag[RT]).dataType, 
Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, arg5.expr))
-  }
-
-  /**
-   * Call a Scala function of 6 arguments as user-defined function (UDF), and 
automatically
-   * infer the data types based on the function's signature.
-   */
-  def callUDF[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, 
A5: TypeTag, A6: TypeTag](f: Function6[A1, A2, A3, A4, A5, A6, RT], arg1: 
Column, arg2: Column, arg3: Column, arg4: Column, arg5: Column, arg6: Column): 
Column = {
-    ScalaUdf(f, ScalaReflection.schemaFor(typeTag[RT]).dataType, 
Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, arg5.expr, arg6.expr))
-  }
-
-  /**
-   * Call a Scala function of 7 arguments as user-defined function (UDF), and 
automatically
-   * infer the data types based on the function's signature.
-   */
-  def callUDF[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, 
A5: TypeTag, A6: TypeTag, A7: TypeTag](f: Function7[A1, A2, A3, A4, A5, A6, A7, 
RT], arg1: Column, arg2: Column, arg3: Column, arg4: Column, arg5: Column, 
arg6: Column, arg7: Column): Column = {
-    ScalaUdf(f, ScalaReflection.schemaFor(typeTag[RT]).dataType, 
Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, arg5.expr, arg6.expr, 
arg7.expr))
-  }
-
-  /**
-   * Call a Scala function of 8 arguments as user-defined function (UDF), and 
automatically
-   * infer the data types based on the function's signature.
-   */
-  def callUDF[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, 
A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag](f: Function8[A1, A2, A3, 
A4, A5, A6, A7, A8, RT], arg1: Column, arg2: Column, arg3: Column, arg4: 
Column, arg5: Column, arg6: Column, arg7: Column, arg8: Column): Column = {
-    ScalaUdf(f, ScalaReflection.schemaFor(typeTag[RT]).dataType, 
Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, arg5.expr, arg6.expr, 
arg7.expr, arg8.expr))
-  }
-
-  /**
-   * Call a Scala function of 9 arguments as user-defined function (UDF), and 
automatically
-   * infer the data types based on the function's signature.
-   */
-  def callUDF[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, 
A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag](f: 
Function9[A1, A2, A3, A4, A5, A6, A7, A8, A9, RT], arg1: Column, arg2: Column, 
arg3: Column, arg4: Column, arg5: Column, arg6: Column, arg7: Column, arg8: 
Column, arg9: Column): Column = {
-    ScalaUdf(f, ScalaReflection.schemaFor(typeTag[RT]).dataType, 
Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, arg5.expr, arg6.expr, 
arg7.expr, arg8.expr, arg9.expr))
-  }
-
-  /**
-   * Call a Scala function of 10 arguments as user-defined function (UDF), and 
automatically
-   * infer the data types based on the function's signature.
-   */
-  def callUDF[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, 
A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: 
TypeTag](f: Function10[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, RT], arg1: 
Column, arg2: Column, arg3: Column, arg4: Column, arg5: Column, arg6: Column, 
arg7: Column, arg8: Column, arg9: Column, arg10: Column): Column = {
-    ScalaUdf(f, ScalaReflection.schemaFor(typeTag[RT]).dataType, 
Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, arg5.expr, arg6.expr, 
arg7.expr, arg8.expr, arg9.expr, arg10.expr))
-  }
-
-  /**
-   * Call a Scala function of 11 arguments as user-defined function (UDF), and 
automatically
-   * infer the data types based on the function's signature.
-   */
-  def callUDF[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, 
A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, 
A11: TypeTag](f: Function11[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, RT], 
arg1: Column, arg2: Column, arg3: Column, arg4: Column, arg5: Column, arg6: 
Column, arg7: Column, arg8: Column, arg9: Column, arg10: Column, arg11: 
Column): Column = {
-    ScalaUdf(f, ScalaReflection.schemaFor(typeTag[RT]).dataType, 
Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, arg5.expr, arg6.expr, 
arg7.expr, arg8.expr, arg9.expr, arg10.expr, arg11.expr))
-  }
-
-  /**
-   * Call a Scala function of 12 arguments as user-defined function (UDF), and 
automatically
-   * infer the data types based on the function's signature.
-   */
-  def callUDF[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, 
A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, 
A11: TypeTag, A12: TypeTag](f: Function12[A1, A2, A3, A4, A5, A6, A7, A8, A9, 
A10, A11, A12, RT], arg1: Column, arg2: Column, arg3: Column, arg4: Column, 
arg5: Column, arg6: Column, arg7: Column, arg8: Column, arg9: Column, arg10: 
Column, arg11: Column, arg12: Column): Column = {
-    ScalaUdf(f, ScalaReflection.schemaFor(typeTag[RT]).dataType, 
Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, arg5.expr, arg6.expr, 
arg7.expr, arg8.expr, arg9.expr, arg10.expr, arg11.expr, arg12.expr))
-  }
-
-  /**
-   * Call a Scala function of 13 arguments as user-defined function (UDF), and 
automatically
-   * infer the data types based on the function's signature.
-   */
-  def callUDF[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, 
A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, 
A11: TypeTag, A12: TypeTag, A13: TypeTag](f: Function13[A1, A2, A3, A4, A5, A6, 
A7, A8, A9, A10, A11, A12, A13, RT], arg1: Column, arg2: Column, arg3: Column, 
arg4: Column, arg5: Column, arg6: Column, arg7: Column, arg8: Column, arg9: 
Column, arg10: Column, arg11: Column, arg12: Column, arg13: Column): Column = {
-    ScalaUdf(f, ScalaReflection.schemaFor(typeTag[RT]).dataType, 
Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, arg5.expr, arg6.expr, 
arg7.expr, arg8.expr, arg9.expr, arg10.expr, arg11.expr, arg12.expr, 
arg13.expr))
-  }
-
-  /**
-   * Call a Scala function of 14 arguments as user-defined function (UDF), and 
automatically
-   * infer the data types based on the function's signature.
-   */
-  def callUDF[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, 
A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, 
A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag](f: Function14[A1, A2, 
A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, RT], arg1: Column, arg2: 
Column, arg3: Column, arg4: Column, arg5: Column, arg6: Column, arg7: Column, 
arg8: Column, arg9: Column, arg10: Column, arg11: Column, arg12: Column, arg13: 
Column, arg14: Column): Column = {
-    ScalaUdf(f, ScalaReflection.schemaFor(typeTag[RT]).dataType, 
Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, arg5.expr, arg6.expr, 
arg7.expr, arg8.expr, arg9.expr, arg10.expr, arg11.expr, arg12.expr, 
arg13.expr, arg14.expr))
-  }
-
-  /**
-   * Call a Scala function of 15 arguments as user-defined function (UDF), and 
automatically
-   * infer the data types based on the function's signature.
-   */
-  def callUDF[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, 
A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, 
A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag, A15: TypeTag](f: 
Function15[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, 
RT], arg1: Column, arg2: Column, arg3: Column, arg4: Column, arg5: Column, 
arg6: Column, arg7: Column, arg8: Column, arg9: Column, arg10: Column, arg11: 
Column, arg12: Column, arg13: Column, arg14: Column, arg15: Column): Column = {
-    ScalaUdf(f, ScalaReflection.schemaFor(typeTag[RT]).dataType, 
Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, arg5.expr, arg6.expr, 
arg7.expr, arg8.expr, arg9.expr, arg10.expr, arg11.expr, arg12.expr, 
arg13.expr, arg14.expr, arg15.expr))
-  }
-
-  /**
-   * Call a Scala function of 16 arguments as user-defined function (UDF), and 
automatically
-   * infer the data types based on the function's signature.
-   */
-  def callUDF[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, 
A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, 
A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag, A15: TypeTag, A16: 
TypeTag](f: Function16[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, 
A14, A15, A16, RT], arg1: Column, arg2: Column, arg3: Column, arg4: Column, 
arg5: Column, arg6: Column, arg7: Column, arg8: Column, arg9: Column, arg10: 
Column, arg11: Column, arg12: Column, arg13: Column, arg14: Column, arg15: 
Column, arg16: Column): Column = {
-    ScalaUdf(f, ScalaReflection.schemaFor(typeTag[RT]).dataType, 
Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, arg5.expr, arg6.expr, 
arg7.expr, arg8.expr, arg9.expr, arg10.expr, arg11.expr, arg12.expr, 
arg13.expr, arg14.expr, arg15.expr, arg16.expr))
-  }
-
-  /**
-   * Call a Scala function of 17 arguments as user-defined function (UDF), and 
automatically
-   * infer the data types based on the function's signature.
-   */
-  def callUDF[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, 
A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, 
A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag, A15: TypeTag, A16: 
TypeTag, A17: TypeTag](f: Function17[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, 
A11, A12, A13, A14, A15, A16, A17, RT], arg1: Column, arg2: Column, arg3: 
Column, arg4: Column, arg5: Column, arg6: Column, arg7: Column, arg8: Column, 
arg9: Column, arg10: Column, arg11: Column, arg12: Column, arg13: Column, 
arg14: Column, arg15: Column, arg16: Column, arg17: Column): Column = {
-    ScalaUdf(f, ScalaReflection.schemaFor(typeTag[RT]).dataType, 
Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, arg5.expr, arg6.expr, 
arg7.expr, arg8.expr, arg9.expr, arg10.expr, arg11.expr, arg12.expr, 
arg13.expr, arg14.expr, arg15.expr, arg16.expr, arg17.expr))
-  }
-
-  /**
-   * Call a Scala function of 18 arguments as user-defined function (UDF), and 
automatically
-   * infer the data types based on the function's signature.
-   */
-  def callUDF[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, 
A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, 
A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag, A15: TypeTag, A16: 
TypeTag, A17: TypeTag, A18: TypeTag](f: Function18[A1, A2, A3, A4, A5, A6, A7, 
A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, RT], arg1: Column, arg2: 
Column, arg3: Column, arg4: Column, arg5: Column, arg6: Column, arg7: Column, 
arg8: Column, arg9: Column, arg10: Column, arg11: Column, arg12: Column, arg13: 
Column, arg14: Column, arg15: Column, arg16: Column, arg17: Column, arg18: 
Column): Column = {
-    ScalaUdf(f, ScalaReflection.schemaFor(typeTag[RT]).dataType, 
Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, arg5.expr, arg6.expr, 
arg7.expr, arg8.expr, arg9.expr, arg10.expr, arg11.expr, arg12.expr, 
arg13.expr, arg14.expr, arg15.expr, arg16.expr, arg17.expr, arg18.expr))
-  }
-
-  /**
-   * Call a Scala function of 19 arguments as user-defined function (UDF), and 
automatically
-   * infer the data types based on the function's signature.
-   */
-  def callUDF[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, 
A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, 
A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag, A15: TypeTag, A16: 
TypeTag, A17: TypeTag, A18: TypeTag, A19: TypeTag](f: Function19[A1, A2, A3, 
A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, RT], 
arg1: Column, arg2: Column, arg3: Column, arg4: Column, arg5: Column, arg6: 
Column, arg7: Column, arg8: Column, arg9: Column, arg10: Column, arg11: Column, 
arg12: Column, arg13: Column, arg14: Column, arg15: Column, arg16: Column, 
arg17: Column, arg18: Column, arg19: Column): Column = {
-    ScalaUdf(f, ScalaReflection.schemaFor(typeTag[RT]).dataType, 
Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, arg5.expr, arg6.expr, 
arg7.expr, arg8.expr, arg9.expr, arg10.expr, arg11.expr, arg12.expr, 
arg13.expr, arg14.expr, arg15.expr, arg16.expr, arg17.expr, arg18.expr, 
arg19.expr))
-  }
-
-  /**
-   * Call a Scala function of 20 arguments as user-defined function (UDF), and 
automatically
-   * infer the data types based on the function's signature.
-   */
-  def callUDF[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, 
A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, 
A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag, A15: TypeTag, A16: 
TypeTag, A17: TypeTag, A18: TypeTag, A19: TypeTag, A20: TypeTag](f: 
Function20[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, 
A16, A17, A18, A19, A20, RT], arg1: Column, arg2: Column, arg3: Column, arg4: 
Column, arg5: Column, arg6: Column, arg7: Column, arg8: Column, arg9: Column, 
arg10: Column, arg11: Column, arg12: Column, arg13: Column, arg14: Column, 
arg15: Column, arg16: Column, arg17: Column, arg18: Column, arg19: Column, 
arg20: Column): Column = {
-    ScalaUdf(f, ScalaReflection.schemaFor(typeTag[RT]).dataType, 
Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, arg5.expr, arg6.expr, 
arg7.expr, arg8.expr, arg9.expr, arg10.expr, arg11.expr, arg12.expr, 
arg13.expr, arg14.expr, arg15.expr, arg16.expr, arg17.expr, arg18.expr, 
arg19.expr, arg20.expr))
-  }
-
-  /**
-   * Call a Scala function of 21 arguments as user-defined function (UDF), and 
automatically
-   * infer the data types based on the function's signature.
-   */
-  def callUDF[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, 
A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, 
A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag, A15: TypeTag, A16: 
TypeTag, A17: TypeTag, A18: TypeTag, A19: TypeTag, A20: TypeTag, A21: 
TypeTag](f: Function21[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, 
A14, A15, A16, A17, A18, A19, A20, A21, RT], arg1: Column, arg2: Column, arg3: 
Column, arg4: Column, arg5: Column, arg6: Column, arg7: Column, arg8: Column, 
arg9: Column, arg10: Column, arg11: Column, arg12: Column, arg13: Column, 
arg14: Column, arg15: Column, arg16: Column, arg17: Column, arg18: Column, 
arg19: Column, arg20: Column, arg21: Column): Column = {
-    ScalaUdf(f, ScalaReflection.schemaFor(typeTag[RT]).dataType, 
Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, arg5.expr, arg6.expr, 
arg7.expr, arg8.expr, arg9.expr, arg10.expr, arg11.expr, arg12.expr, 
arg13.expr, arg14.expr, arg15.expr, arg16.expr, arg17.expr, arg18.expr, 
arg19.expr, arg20.expr, arg21.expr))
-  }
-
-  /**
-   * Call a Scala function of 22 arguments as user-defined function (UDF), and 
automatically
-   * infer the data types based on the function's signature.
-   */
-  def callUDF[RT: TypeTag, A1: TypeTag, A2: TypeTag, A3: TypeTag, A4: TypeTag, 
A5: TypeTag, A6: TypeTag, A7: TypeTag, A8: TypeTag, A9: TypeTag, A10: TypeTag, 
A11: TypeTag, A12: TypeTag, A13: TypeTag, A14: TypeTag, A15: TypeTag, A16: 
TypeTag, A17: TypeTag, A18: TypeTag, A19: TypeTag, A20: TypeTag, A21: TypeTag, 
A22: TypeTag](f: Function22[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, 
A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, RT], arg1: Column, arg2: 
Column, arg3: Column, arg4: Column, arg5: Column, arg6: Column, arg7: Column, 
arg8: Column, arg9: Column, arg10: Column, arg11: Column, arg12: Column, arg13: 
Column, arg14: Column, arg15: Column, arg16: Column, arg17: Column, arg18: 
Column, arg19: Column, arg20: Column, arg21: Column, arg22: Column): Column = {
-    ScalaUdf(f, ScalaReflection.schemaFor(typeTag[RT]).dataType, 
Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, arg5.expr, arg6.expr, 
arg7.expr, arg8.expr, arg9.expr, arg10.expr, arg11.expr, arg12.expr, 
arg13.expr, arg14.expr, arg15.expr, arg16.expr, arg17.expr, arg18.expr, 
arg19.expr, arg20.expr, arg21.expr, arg22.expr))
-  }
-
-  
//////////////////////////////////////////////////////////////////////////////////////////////////
-
-  /**
-   * Call a Scala function of 0 arguments as user-defined function (UDF). This 
requires
-   * you to specify the return data type.
-   */
-  def callUDF(f: Function0[_], returnType: DataType): Column = {
-    ScalaUdf(f, returnType, Seq())
-  }
-
-  /**
-   * Call a Scala function of 1 arguments as user-defined function (UDF). This 
requires
-   * you to specify the return data type.
-   */
-  def callUDF(f: Function1[_, _], returnType: DataType, arg1: Column): Column 
= {
-    ScalaUdf(f, returnType, Seq(arg1.expr))
-  }
-
-  /**
-   * Call a Scala function of 2 arguments as user-defined function (UDF). This 
requires
-   * you to specify the return data type.
-   */
-  def callUDF(f: Function2[_, _, _], returnType: DataType, arg1: Column, arg2: 
Column): Column = {
-    ScalaUdf(f, returnType, Seq(arg1.expr, arg2.expr))
-  }
-
-  /**
-   * Call a Scala function of 3 arguments as user-defined function (UDF). This 
requires
-   * you to specify the return data type.
-   */
-  def callUDF(f: Function3[_, _, _, _], returnType: DataType, arg1: Column, 
arg2: Column, arg3: Column): Column = {
-    ScalaUdf(f, returnType, Seq(arg1.expr, arg2.expr, arg3.expr))
-  }
-
-  /**
-   * Call a Scala function of 4 arguments as user-defined function (UDF). This 
requires
-   * you to specify the return data type.
-   */
-  def callUDF(f: Function4[_, _, _, _, _], returnType: DataType, arg1: Column, 
arg2: Column, arg3: Column, arg4: Column): Column = {
-    ScalaUdf(f, returnType, Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr))
-  }
-
-  /**
-   * Call a Scala function of 5 arguments as user-defined function (UDF). This 
requires
-   * you to specify the return data type.
-   */
-  def callUDF(f: Function5[_, _, _, _, _, _], returnType: DataType, arg1: 
Column, arg2: Column, arg3: Column, arg4: Column, arg5: Column): Column = {
-    ScalaUdf(f, returnType, Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, 
arg5.expr))
-  }
-
-  /**
-   * Call a Scala function of 6 arguments as user-defined function (UDF). This 
requires
-   * you to specify the return data type.
-   */
-  def callUDF(f: Function6[_, _, _, _, _, _, _], returnType: DataType, arg1: 
Column, arg2: Column, arg3: Column, arg4: Column, arg5: Column, arg6: Column): 
Column = {
-    ScalaUdf(f, returnType, Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, 
arg5.expr, arg6.expr))
-  }
-
-  /**
-   * Call a Scala function of 7 arguments as user-defined function (UDF). This 
requires
-   * you to specify the return data type.
-   */
-  def callUDF(f: Function7[_, _, _, _, _, _, _, _], returnType: DataType, 
arg1: Column, arg2: Column, arg3: Column, arg4: Column, arg5: Column, arg6: 
Column, arg7: Column): Column = {
-    ScalaUdf(f, returnType, Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, 
arg5.expr, arg6.expr, arg7.expr))
-  }
-
-  /**
-   * Call a Scala function of 8 arguments as user-defined function (UDF). This 
requires
-   * you to specify the return data type.
-   */
-  def callUDF(f: Function8[_, _, _, _, _, _, _, _, _], returnType: DataType, 
arg1: Column, arg2: Column, arg3: Column, arg4: Column, arg5: Column, arg6: 
Column, arg7: Column, arg8: Column): Column = {
-    ScalaUdf(f, returnType, Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, 
arg5.expr, arg6.expr, arg7.expr, arg8.expr))
-  }
-
-  /**
-   * Call a Scala function of 9 arguments as user-defined function (UDF). This 
requires
-   * you to specify the return data type.
-   */
-  def callUDF(f: Function9[_, _, _, _, _, _, _, _, _, _], returnType: 
DataType, arg1: Column, arg2: Column, arg3: Column, arg4: Column, arg5: Column, 
arg6: Column, arg7: Column, arg8: Column, arg9: Column): Column = {
-    ScalaUdf(f, returnType, Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, 
arg5.expr, arg6.expr, arg7.expr, arg8.expr, arg9.expr))
-  }
-
-  /**
-   * Call a Scala function of 10 arguments as user-defined function (UDF). 
This requires
-   * you to specify the return data type.
-   */
-  def callUDF(f: Function10[_, _, _, _, _, _, _, _, _, _, _], returnType: 
DataType, arg1: Column, arg2: Column, arg3: Column, arg4: Column, arg5: Column, 
arg6: Column, arg7: Column, arg8: Column, arg9: Column, arg10: Column): Column 
= {
-    ScalaUdf(f, returnType, Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, 
arg5.expr, arg6.expr, arg7.expr, arg8.expr, arg9.expr, arg10.expr))
-  }
-
-  /**
-   * Call a Scala function of 11 arguments as user-defined function (UDF). 
This requires
-   * you to specify the return data type.
-   */
-  def callUDF(f: Function11[_, _, _, _, _, _, _, _, _, _, _, _], returnType: 
DataType, arg1: Column, arg2: Column, arg3: Column, arg4: Column, arg5: Column, 
arg6: Column, arg7: Column, arg8: Column, arg9: Column, arg10: Column, arg11: 
Column): Column = {
-    ScalaUdf(f, returnType, Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, 
arg5.expr, arg6.expr, arg7.expr, arg8.expr, arg9.expr, arg10.expr, arg11.expr))
-  }
-
-  /**
-   * Call a Scala function of 12 arguments as user-defined function (UDF). 
This requires
-   * you to specify the return data type.
-   */
-  def callUDF(f: Function12[_, _, _, _, _, _, _, _, _, _, _, _, _], 
returnType: DataType, arg1: Column, arg2: Column, arg3: Column, arg4: Column, 
arg5: Column, arg6: Column, arg7: Column, arg8: Column, arg9: Column, arg10: 
Column, arg11: Column, arg12: Column): Column = {
-    ScalaUdf(f, returnType, Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, 
arg5.expr, arg6.expr, arg7.expr, arg8.expr, arg9.expr, arg10.expr, arg11.expr, 
arg12.expr))
-  }
-
-  /**
-   * Call a Scala function of 13 arguments as user-defined function (UDF). 
This requires
-   * you to specify the return data type.
-   */
-  def callUDF(f: Function13[_, _, _, _, _, _, _, _, _, _, _, _, _, _], 
returnType: DataType, arg1: Column, arg2: Column, arg3: Column, arg4: Column, 
arg5: Column, arg6: Column, arg7: Column, arg8: Column, arg9: Column, arg10: 
Column, arg11: Column, arg12: Column, arg13: Column): Column = {
-    ScalaUdf(f, returnType, Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, 
arg5.expr, arg6.expr, arg7.expr, arg8.expr, arg9.expr, arg10.expr, arg11.expr, 
arg12.expr, arg13.expr))
-  }
-
-  /**
-   * Call a Scala function of 14 arguments as user-defined function (UDF). 
This requires
-   * you to specify the return data type.
-   */
-  def callUDF(f: Function14[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _], 
returnType: DataType, arg1: Column, arg2: Column, arg3: Column, arg4: Column, 
arg5: Column, arg6: Column, arg7: Column, arg8: Column, arg9: Column, arg10: 
Column, arg11: Column, arg12: Column, arg13: Column, arg14: Column): Column = {
-    ScalaUdf(f, returnType, Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, 
arg5.expr, arg6.expr, arg7.expr, arg8.expr, arg9.expr, arg10.expr, arg11.expr, 
arg12.expr, arg13.expr, arg14.expr))
-  }
-
-  /**
-   * Call a Scala function of 15 arguments as user-defined function (UDF). 
This requires
-   * you to specify the return data type.
-   */
-  def callUDF(f: Function15[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _], 
returnType: DataType, arg1: Column, arg2: Column, arg3: Column, arg4: Column, 
arg5: Column, arg6: Column, arg7: Column, arg8: Column, arg9: Column, arg10: 
Column, arg11: Column, arg12: Column, arg13: Column, arg14: Column, arg15: 
Column): Column = {
-    ScalaUdf(f, returnType, Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, 
arg5.expr, arg6.expr, arg7.expr, arg8.expr, arg9.expr, arg10.expr, arg11.expr, 
arg12.expr, arg13.expr, arg14.expr, arg15.expr))
-  }
-
-  /**
-   * Call a Scala function of 16 arguments as user-defined function (UDF). 
This requires
-   * you to specify the return data type.
-   */
-  def callUDF(f: Function16[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, 
_], returnType: DataType, arg1: Column, arg2: Column, arg3: Column, arg4: 
Column, arg5: Column, arg6: Column, arg7: Column, arg8: Column, arg9: Column, 
arg10: Column, arg11: Column, arg12: Column, arg13: Column, arg14: Column, 
arg15: Column, arg16: Column): Column = {
-    ScalaUdf(f, returnType, Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, 
arg5.expr, arg6.expr, arg7.expr, arg8.expr, arg9.expr, arg10.expr, arg11.expr, 
arg12.expr, arg13.expr, arg14.expr, arg15.expr, arg16.expr))
-  }
-
-  /**
-   * Call a Scala function of 17 arguments as user-defined function (UDF). 
This requires
-   * you to specify the return data type.
-   */
-  def callUDF(f: Function17[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, 
_], returnType: DataType, arg1: Column, arg2: Column, arg3: Column, arg4: 
Column, arg5: Column, arg6: Column, arg7: Column, arg8: Column, arg9: Column, 
arg10: Column, arg11: Column, arg12: Column, arg13: Column, arg14: Column, 
arg15: Column, arg16: Column, arg17: Column): Column = {
-    ScalaUdf(f, returnType, Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, 
arg5.expr, arg6.expr, arg7.expr, arg8.expr, arg9.expr, arg10.expr, arg11.expr, 
arg12.expr, arg13.expr, arg14.expr, arg15.expr, arg16.expr, arg17.expr))
-  }
-
-  /**
-   * Call a Scala function of 18 arguments as user-defined function (UDF). 
This requires
-   * you to specify the return data type.
-   */
-  def callUDF(f: Function18[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, 
_, _], returnType: DataType, arg1: Column, arg2: Column, arg3: Column, arg4: 
Column, arg5: Column, arg6: Column, arg7: Column, arg8: Column, arg9: Column, 
arg10: Column, arg11: Column, arg12: Column, arg13: Column, arg14: Column, 
arg15: Column, arg16: Column, arg17: Column, arg18: Column): Column = {
-    ScalaUdf(f, returnType, Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, 
arg5.expr, arg6.expr, arg7.expr, arg8.expr, arg9.expr, arg10.expr, arg11.expr, 
arg12.expr, arg13.expr, arg14.expr, arg15.expr, arg16.expr, arg17.expr, 
arg18.expr))
-  }
-
-  /**
-   * Call a Scala function of 19 arguments as user-defined function (UDF). 
This requires
-   * you to specify the return data type.
-   */
-  def callUDF(f: Function19[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, 
_, _, _], returnType: DataType, arg1: Column, arg2: Column, arg3: Column, arg4: 
Column, arg5: Column, arg6: Column, arg7: Column, arg8: Column, arg9: Column, 
arg10: Column, arg11: Column, arg12: Column, arg13: Column, arg14: Column, 
arg15: Column, arg16: Column, arg17: Column, arg18: Column, arg19: Column): 
Column = {
-    ScalaUdf(f, returnType, Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, 
arg5.expr, arg6.expr, arg7.expr, arg8.expr, arg9.expr, arg10.expr, arg11.expr, 
arg12.expr, arg13.expr, arg14.expr, arg15.expr, arg16.expr, arg17.expr, 
arg18.expr, arg19.expr))
-  }
-
-  /**
-   * Call a Scala function of 20 arguments as user-defined function (UDF). 
This requires
-   * you to specify the return data type.
-   */
-  def callUDF(f: Function20[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, 
_, _, _, _], returnType: DataType, arg1: Column, arg2: Column, arg3: Column, 
arg4: Column, arg5: Column, arg6: Column, arg7: Column, arg8: Column, arg9: 
Column, arg10: Column, arg11: Column, arg12: Column, arg13: Column, arg14: 
Column, arg15: Column, arg16: Column, arg17: Column, arg18: Column, arg19: 
Column, arg20: Column): Column = {
-    ScalaUdf(f, returnType, Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, 
arg5.expr, arg6.expr, arg7.expr, arg8.expr, arg9.expr, arg10.expr, arg11.expr, 
arg12.expr, arg13.expr, arg14.expr, arg15.expr, arg16.expr, arg17.expr, 
arg18.expr, arg19.expr, arg20.expr))
-  }
-
-  /**
-   * Call a Scala function of 21 arguments as user-defined function (UDF). 
This requires
-   * you to specify the return data type.
-   */
-  def callUDF(f: Function21[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, 
_, _, _, _, _], returnType: DataType, arg1: Column, arg2: Column, arg3: Column, 
arg4: Column, arg5: Column, arg6: Column, arg7: Column, arg8: Column, arg9: 
Column, arg10: Column, arg11: Column, arg12: Column, arg13: Column, arg14: 
Column, arg15: Column, arg16: Column, arg17: Column, arg18: Column, arg19: 
Column, arg20: Column, arg21: Column): Column = {
-    ScalaUdf(f, returnType, Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, 
arg5.expr, arg6.expr, arg7.expr, arg8.expr, arg9.expr, arg10.expr, arg11.expr, 
arg12.expr, arg13.expr, arg14.expr, arg15.expr, arg16.expr, arg17.expr, 
arg18.expr, arg19.expr, arg20.expr, arg21.expr))
-  }
-
-  /**
-   * Call a Scala function of 22 arguments as user-defined function (UDF). 
This requires
-   * you to specify the return data type.
-   */
-  def callUDF(f: Function22[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, 
_, _, _, _, _, _], returnType: DataType, arg1: Column, arg2: Column, arg3: 
Column, arg4: Column, arg5: Column, arg6: Column, arg7: Column, arg8: Column, 
arg9: Column, arg10: Column, arg11: Column, arg12: Column, arg13: Column, 
arg14: Column, arg15: Column, arg16: Column, arg17: Column, arg18: Column, 
arg19: Column, arg20: Column, arg21: Column, arg22: Column): Column = {
-    ScalaUdf(f, returnType, Seq(arg1.expr, arg2.expr, arg3.expr, arg4.expr, 
arg5.expr, arg6.expr, arg7.expr, arg8.expr, arg9.expr, arg10.expr, arg11.expr, 
arg12.expr, arg13.expr, arg14.expr, arg15.expr, arg16.expr, arg17.expr, 
arg18.expr, arg19.expr, arg20.expr, arg21.expr, arg22.expr))
-  }
-
-  // scalastyle:on
-}

http://git-wip-us.apache.org/repos/asf/spark/blob/5b9760de/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
index e1e9692..cccc547 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
 
 import org.apache.spark.sql.TestData._
 import org.apache.spark.sql.columnar._
-import org.apache.spark.sql.dsl._
+import org.apache.spark.sql.api.scala.dsl._
 import org.apache.spark.sql.test.TestSQLContext._
 import org.apache.spark.storage.{StorageLevel, RDDBlockId}
 

http://git-wip-us.apache.org/repos/asf/spark/blob/5b9760de/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala
index 701950f..8202931 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql
 
-import org.apache.spark.sql.dsl._
+import org.apache.spark.sql.api.scala.dsl._
 import org.apache.spark.sql.test.TestSQLContext
 import org.apache.spark.sql.types.{BooleanType, IntegerType, StructField, 
StructType}
 
@@ -244,7 +244,7 @@ class ColumnExpressionSuite extends QueryTest {
     )
 
     checkAnswer(
-      testData.select(sqrt(Literal(null))),
+      testData.select(sqrt(lit(null))),
       (1 to 100).map(_ => Row(null))
     )
   }
@@ -261,7 +261,7 @@ class ColumnExpressionSuite extends QueryTest {
     )
 
     checkAnswer(
-      testData.select(abs(Literal(null))),
+      testData.select(abs(lit(null))),
       (1 to 100).map(_ => Row(null))
     )
   }
@@ -278,7 +278,7 @@ class ColumnExpressionSuite extends QueryTest {
     )
 
     checkAnswer(
-      testData.select(upper(Literal(null))),
+      testData.select(upper(lit(null))),
       (1 to 100).map(n => Row(null))
     )
   }
@@ -295,7 +295,7 @@ class ColumnExpressionSuite extends QueryTest {
     )
 
     checkAnswer(
-      testData.select(lower(Literal(null))),
+      testData.select(lower(lit(null))),
       (1 to 100).map(n => Row(null))
     )
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/5b9760de/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
index ec3770b..b1fb1bd 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql
 
-import org.apache.spark.sql.dsl._
+import org.apache.spark.sql.api.scala.dsl._
 import org.apache.spark.sql.types._
 
 /* Implicits */
@@ -57,13 +57,13 @@ class DataFrameSuite extends QueryTest {
 
   test("convert $\"attribute name\" into unresolved attribute") {
     checkAnswer(
-      testData.where($"key" === Literal(1)).select($"value"),
+      testData.where($"key" === lit(1)).select($"value"),
       Row("1"))
   }
 
   test("convert Scala Symbol 'attrname into unresolved attribute") {
     checkAnswer(
-      testData.where('key === Literal(1)).select('value),
+      testData.where('key === lit(1)).select('value),
       Row("1"))
   }
 
@@ -75,13 +75,13 @@ class DataFrameSuite extends QueryTest {
 
   test("simple select") {
     checkAnswer(
-      testData.where('key === Literal(1)).select('value),
+      testData.where('key === lit(1)).select('value),
       Row("1"))
   }
 
   test("select with functions") {
     checkAnswer(
-      testData.select(sum('value), avg('value), count(Literal(1))),
+      testData.select(sum('value), avg('value), count(lit(1))),
       Row(5050.0, 50.5, 100))
 
     checkAnswer(
@@ -215,7 +215,7 @@ class DataFrameSuite extends QueryTest {
     )
 
     checkAnswer(
-      testData3.agg(count('a), count('b), count(Literal(1)), 
countDistinct('a), countDistinct('b)),
+      testData3.agg(count('a), count('b), count(lit(1)), countDistinct('a), 
countDistinct('b)),
       Row(2, 1, 2, 2, 1)
     )
 

http://git-wip-us.apache.org/repos/asf/spark/blob/5b9760de/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala
index 561db59..bb95248 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql
 import org.scalatest.BeforeAndAfterEach
 
 import org.apache.spark.sql.TestData._
-import org.apache.spark.sql.dsl._
+import org.apache.spark.sql.api.scala.dsl._
 import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation
 import org.apache.spark.sql.execution.joins._
 import org.apache.spark.sql.test.TestSQLContext._
@@ -136,8 +136,8 @@ class JoinSuite extends QueryTest with BeforeAndAfterEach {
   }
 
   test("inner join, where, multiple matches") {
-    val x = testData2.where($"a" === Literal(1)).as("x")
-    val y = testData2.where($"a" === Literal(1)).as("y")
+    val x = testData2.where($"a" === 1).as("x")
+    val y = testData2.where($"a" === 1).as("y")
     checkAnswer(
       x.join(y).where($"x.a" === $"y.a"),
       Row(1,1,1,1) ::
@@ -148,8 +148,8 @@ class JoinSuite extends QueryTest with BeforeAndAfterEach {
   }
 
   test("inner join, no matches") {
-    val x = testData2.where($"a" === Literal(1)).as("x")
-    val y = testData2.where($"a" === Literal(2)).as("y")
+    val x = testData2.where($"a" === 1).as("x")
+    val y = testData2.where($"a" === 2).as("y")
     checkAnswer(
       x.join(y).where($"x.a" === $"y.a"),
       Nil)
@@ -185,7 +185,7 @@ class JoinSuite extends QueryTest with BeforeAndAfterEach {
         Row(6, "F", null, null) :: Nil)
 
     checkAnswer(
-      upperCaseData.join(lowerCaseData, $"n" === $"N" && $"n" > Literal(1), 
"left"),
+      upperCaseData.join(lowerCaseData, $"n" === $"N" && $"n" > 1, "left"),
       Row(1, "A", null, null) ::
         Row(2, "B", 2, "b") ::
         Row(3, "C", 3, "c") ::
@@ -194,7 +194,7 @@ class JoinSuite extends QueryTest with BeforeAndAfterEach {
         Row(6, "F", null, null) :: Nil)
 
     checkAnswer(
-      upperCaseData.join(lowerCaseData, $"n" === $"N" && $"N" > Literal(1), 
"left"),
+      upperCaseData.join(lowerCaseData, $"n" === $"N" && $"N" > 1, "left"),
       Row(1, "A", null, null) ::
         Row(2, "B", 2, "b") ::
         Row(3, "C", 3, "c") ::
@@ -247,7 +247,7 @@ class JoinSuite extends QueryTest with BeforeAndAfterEach {
         Row(null, null, 5, "E") ::
         Row(null, null, 6, "F") :: Nil)
     checkAnswer(
-      lowerCaseData.join(upperCaseData, $"n" === $"N" && $"n" > Literal(1), 
"right"),
+      lowerCaseData.join(upperCaseData, $"n" === $"N" && $"n" > 1, "right"),
       Row(null, null, 1, "A") ::
         Row(2, "b", 2, "B") ::
         Row(3, "c", 3, "C") ::
@@ -255,7 +255,7 @@ class JoinSuite extends QueryTest with BeforeAndAfterEach {
         Row(null, null, 5, "E") ::
         Row(null, null, 6, "F") :: Nil)
     checkAnswer(
-      lowerCaseData.join(upperCaseData, $"n" === $"N" && $"N" > Literal(1), 
"right"),
+      lowerCaseData.join(upperCaseData, $"n" === $"N" && $"N" > 1, "right"),
       Row(null, null, 1, "A") ::
         Row(2, "b", 2, "B") ::
         Row(3, "c", 3, "C") ::
@@ -298,8 +298,8 @@ class JoinSuite extends QueryTest with BeforeAndAfterEach {
   }
 
   test("full outer join") {
-    upperCaseData.where('N <= Literal(4)).registerTempTable("left")
-    upperCaseData.where('N >= Literal(3)).registerTempTable("right")
+    upperCaseData.where('N <= 4).registerTempTable("left")
+    upperCaseData.where('N >= 3).registerTempTable("right")
 
     val left = UnresolvedRelation(Seq("left"), None)
     val right = UnresolvedRelation(Seq("right"), None)
@@ -314,7 +314,7 @@ class JoinSuite extends QueryTest with BeforeAndAfterEach {
         Row(null, null, 6, "F") :: Nil)
 
     checkAnswer(
-      left.join(right, ($"left.N" === $"right.N") && ($"left.N" !== 
Literal(3)), "full"),
+      left.join(right, ($"left.N" === $"right.N") && ($"left.N" !== 3), 
"full"),
       Row(1, "A", null, null) ::
         Row(2, "B", null, null) ::
         Row(3, "C", null, null) ::
@@ -324,7 +324,7 @@ class JoinSuite extends QueryTest with BeforeAndAfterEach {
         Row(null, null, 6, "F") :: Nil)
 
     checkAnswer(
-      left.join(right, ($"left.N" === $"right.N") && ($"right.N" !== 
Literal(3)), "full"),
+      left.join(right, ($"left.N" === $"right.N") && ($"right.N" !== 3), 
"full"),
       Row(1, "A", null, null) ::
         Row(2, "B", null, null) ::
         Row(3, "C", null, null) ::

http://git-wip-us.apache.org/repos/asf/spark/blob/5b9760de/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index c00ae0a..9bb6403 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -21,7 +21,7 @@ import java.util.TimeZone
 
 import org.scalatest.BeforeAndAfterAll
 
-import org.apache.spark.sql.dsl._
+import org.apache.spark.sql.api.scala.dsl._
 import org.apache.spark.sql.catalyst.errors.TreeNodeException
 import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
 import org.apache.spark.sql.types._

http://git-wip-us.apache.org/repos/asf/spark/blob/5b9760de/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
index 82dd669..eae6acf 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql
 import java.sql.Timestamp
 
 import org.apache.spark.sql.catalyst.plans.logical
-import org.apache.spark.sql.dsl._
+import org.apache.spark.sql.api.scala.dsl._
 import org.apache.spark.sql.test._
 
 /* Implicits */

http://git-wip-us.apache.org/repos/asf/spark/blob/5b9760de/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala
index 5abd7b9..b122d7d 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql
 
-import org.apache.spark.sql.dsl.StringToColumn
+import org.apache.spark.sql.api.scala.dsl.StringToColumn
 import org.apache.spark.sql.test._
 
 /* Implicits */

http://git-wip-us.apache.org/repos/asf/spark/blob/5b9760de/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala
index 62b2e89..59e6f00 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql
 import scala.beans.{BeanInfo, BeanProperty}
 
 import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.dsl._
+import org.apache.spark.sql.api.scala.dsl._
 import org.apache.spark.sql.test.TestSQLContext._
 import org.apache.spark.sql.types._
 

http://git-wip-us.apache.org/repos/asf/spark/blob/5b9760de/sql/core/src/test/scala/org/apache/spark/sql/columnar/InMemoryColumnarQuerySuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/columnar/InMemoryColumnarQuerySuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/columnar/InMemoryColumnarQuerySuite.scala
index 6f051df..2698a59 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/columnar/InMemoryColumnarQuerySuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/columnar/InMemoryColumnarQuerySuite.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.columnar
 
-import org.apache.spark.sql.dsl._
+import org.apache.spark.sql.api.scala.dsl._
 import org.apache.spark.sql.TestData._
 import org.apache.spark.sql.catalyst.expressions.Row
 import org.apache.spark.sql.test.TestSQLContext._

http://git-wip-us.apache.org/repos/asf/spark/blob/5b9760de/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala
index be5e63c..1f701e2 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql.execution
 import org.scalatest.FunSuite
 
 import org.apache.spark.sql.{SQLConf, execution}
-import org.apache.spark.sql.dsl._
+import org.apache.spark.sql.api.scala.dsl._
 import org.apache.spark.sql.TestData._
 import org.apache.spark.sql.catalyst.expressions._
 import org.apache.spark.sql.catalyst.plans._

http://git-wip-us.apache.org/repos/asf/spark/blob/5b9760de/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala
index 5a75326..634792c 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala
@@ -21,15 +21,16 @@ import java.sql.{Date, Timestamp}
 
 import org.apache.spark.sql.TestData._
 import org.apache.spark.sql.catalyst.util._
-import org.apache.spark.sql.dsl._
+import org.apache.spark.sql.api.scala.dsl._
 import org.apache.spark.sql.json.JsonRDD.{compatibleType, enforceCorrectType}
 import org.apache.spark.sql.test.TestSQLContext
 import org.apache.spark.sql.test.TestSQLContext._
 import org.apache.spark.sql.types._
-import org.apache.spark.sql.{Literal, QueryTest, Row, SQLConf}
+import org.apache.spark.sql.{QueryTest, Row, SQLConf}
 
 class JsonSuite extends QueryTest {
   import org.apache.spark.sql.json.TestJsonData._
+
   TestJsonData
 
   test("Type promotion") {
@@ -464,8 +465,8 @@ class JsonSuite extends QueryTest {
     // in the Project.
     checkAnswer(
       jsonDF.
-        where('num_str > Literal(BigDecimal("92233720368547758060"))).
-        select(('num_str + Literal(1.2)).as("num")),
+        where('num_str > BigDecimal("92233720368547758060")).
+        select(('num_str + 1.2).as("num")),
       Row(new java.math.BigDecimal("92233720368547758061.2"))
     )
 

http://git-wip-us.apache.org/repos/asf/spark/blob/5b9760de/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetIOSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetIOSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetIOSuite.scala
index f03b3a3..0e91834 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetIOSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetIOSuite.scala
@@ -33,7 +33,7 @@ import parquet.schema.{MessageType, MessageTypeParser}
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.{FileSystem, Path}
 import org.apache.spark.sql.{DataFrame, QueryTest, SQLConf}
-import org.apache.spark.sql.dsl._
+import org.apache.spark.sql.api.scala.dsl._
 import org.apache.spark.sql.catalyst.ScalaReflection
 import org.apache.spark.sql.catalyst.expressions.Row
 import org.apache.spark.sql.test.TestSQLContext

http://git-wip-us.apache.org/repos/asf/spark/blob/5b9760de/sql/hive/src/main/scala/org/apache/spark/sql/hive/package-info.java
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/package-info.java 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/package-info.java
index 8b29fa7..4b23fbf 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/package-info.java
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/package-info.java
@@ -15,4 +15,4 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.hive;
\ No newline at end of file
+package org.apache.spark.sql.hive;

http://git-wip-us.apache.org/repos/asf/spark/blob/5b9760de/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
index 0c8a113..a485158 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
@@ -29,7 +29,7 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars
 import org.apache.spark.{SparkFiles, SparkException}
 import org.apache.spark.sql.{DataFrame, Row}
 import org.apache.spark.sql.catalyst.plans.logical.Project
-import org.apache.spark.sql.dsl._
+import org.apache.spark.sql.api.scala.dsl._
 import org.apache.spark.sql.hive._
 import org.apache.spark.sql.hive.test.TestHive
 import org.apache.spark.sql.hive.test.TestHive._

http://git-wip-us.apache.org/repos/asf/spark/blob/5b9760de/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala
index a081227..efea3d8 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala
@@ -18,7 +18,7 @@
 package org.apache.spark.sql.hive.execution
 
 import org.apache.spark.sql.Row
-import org.apache.spark.sql.dsl._
+import org.apache.spark.sql.api.scala.dsl._
 import org.apache.spark.sql.hive.test.TestHive
 import org.apache.spark.sql.hive.test.TestHive._
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to