Repository: spark
Updated Branches:
  refs/heads/master 03d46aafe -> 2eedc00b0


[SPARK-16828][SQL] remove MaxOf and MinOf

## What changes were proposed in this pull request?

These 2 expressions are not needed anymore after we have `Greatest` and 
`Least`. This PR removes them and related tests.

## How was this patch tested?

N/A

Author: Wenchen Fan <wenc...@databricks.com>

Closes #14434 from cloud-fan/minor1.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/2eedc00b
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/2eedc00b
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/2eedc00b

Branch: refs/heads/master
Commit: 2eedc00b04ef8ca771ff64c4f834c25f835f5f44
Parents: 03d46aa
Author: Wenchen Fan <wenc...@databricks.com>
Authored: Mon Aug 1 17:54:41 2016 -0700
Committer: Yin Huai <yh...@databricks.com>
Committed: Mon Aug 1 17:54:41 2016 -0700

----------------------------------------------------------------------
 .../sql/catalyst/expressions/arithmetic.scala   | 110 -------------------
 .../sql/catalyst/optimizer/Optimizer.scala      |   4 -
 .../analysis/ExpressionTypeCheckingSuite.scala  |   7 --
 .../expressions/ArithmeticExpressionSuite.scala |  54 ---------
 4 files changed, 175 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/2eedc00b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
index 7ff8795..77d40a5 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
@@ -361,116 +361,6 @@ case class Remainder(left: Expression, right: Expression)
   }
 }
 
-case class MaxOf(left: Expression, right: Expression)
-  extends BinaryArithmetic with NonSQLExpression {
-
-  // TODO: Remove MaxOf and MinOf, and replace its usage with Greatest and 
Least.
-
-  override def inputType: AbstractDataType = TypeCollection.Ordered
-
-  override def nullable: Boolean = left.nullable && right.nullable
-
-  private lazy val ordering = TypeUtils.getInterpretedOrdering(dataType)
-
-  override def eval(input: InternalRow): Any = {
-    val input1 = left.eval(input)
-    val input2 = right.eval(input)
-    if (input1 == null) {
-      input2
-    } else if (input2 == null) {
-      input1
-    } else {
-      if (ordering.compare(input1, input2) < 0) {
-        input2
-      } else {
-        input1
-      }
-    }
-  }
-
-  override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
-    val eval1 = left.genCode(ctx)
-    val eval2 = right.genCode(ctx)
-    val compCode = ctx.genComp(dataType, eval1.value, eval2.value)
-
-    ev.copy(code = eval1.code + eval2.code + s"""
-      boolean ${ev.isNull} = false;
-      ${ctx.javaType(left.dataType)} ${ev.value} =
-        ${ctx.defaultValue(left.dataType)};
-
-      if (${eval1.isNull}) {
-        ${ev.isNull} = ${eval2.isNull};
-        ${ev.value} = ${eval2.value};
-      } else if (${eval2.isNull}) {
-        ${ev.isNull} = ${eval1.isNull};
-        ${ev.value} = ${eval1.value};
-      } else {
-        if ($compCode > 0) {
-          ${ev.value} = ${eval1.value};
-        } else {
-          ${ev.value} = ${eval2.value};
-        }
-      }""")
-  }
-
-  override def symbol: String = "max"
-}
-
-case class MinOf(left: Expression, right: Expression)
-  extends BinaryArithmetic with NonSQLExpression {
-
-  // TODO: Remove MaxOf and MinOf, and replace its usage with Greatest and 
Least.
-
-  override def inputType: AbstractDataType = TypeCollection.Ordered
-
-  override def nullable: Boolean = left.nullable && right.nullable
-
-  private lazy val ordering = TypeUtils.getInterpretedOrdering(dataType)
-
-  override def eval(input: InternalRow): Any = {
-    val input1 = left.eval(input)
-    val input2 = right.eval(input)
-    if (input1 == null) {
-      input2
-    } else if (input2 == null) {
-      input1
-    } else {
-      if (ordering.compare(input1, input2) < 0) {
-        input1
-      } else {
-        input2
-      }
-    }
-  }
-
-  override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
-    val eval1 = left.genCode(ctx)
-    val eval2 = right.genCode(ctx)
-    val compCode = ctx.genComp(dataType, eval1.value, eval2.value)
-
-    ev.copy(code = eval1.code + eval2.code + s"""
-      boolean ${ev.isNull} = false;
-      ${ctx.javaType(left.dataType)} ${ev.value} =
-        ${ctx.defaultValue(left.dataType)};
-
-      if (${eval1.isNull}) {
-        ${ev.isNull} = ${eval2.isNull};
-        ${ev.value} = ${eval2.value};
-      } else if (${eval2.isNull}) {
-        ${ev.isNull} = ${eval1.isNull};
-        ${ev.value} = ${eval1.value};
-      } else {
-        if ($compCode < 0) {
-          ${ev.value} = ${eval1.value};
-        } else {
-          ${ev.value} = ${eval2.value};
-        }
-      }""")
-  }
-
-  override def symbol: String = "min"
-}
-
 @ExpressionDescription(
   usage = "_FUNC_(a, b) - Returns the positive modulo",
   extended = "> SELECT _FUNC_(10,3);\n 1")

http://git-wip-us.apache.org/repos/asf/spark/blob/2eedc00b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
index fe328fd..7513000 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
@@ -662,10 +662,6 @@ object NullPropagation extends Rule[LogicalPlan] {
       case e @ Substring(_, Literal(null, _), _) => Literal.create(null, 
e.dataType)
       case e @ Substring(_, _, Literal(null, _)) => Literal.create(null, 
e.dataType)
 
-      // MaxOf and MinOf can't do null propagation
-      case e: MaxOf => e
-      case e: MinOf => e
-
       // Put exceptional cases above if any
       case e @ BinaryArithmetic(Literal(null, _), _) => Literal.create(null, 
e.dataType)
       case e @ BinaryArithmetic(_, Literal(null, _)) => Literal.create(null, 
e.dataType)

http://git-wip-us.apache.org/repos/asf/spark/blob/2eedc00b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala
index 76e42d9..35f7569 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala
@@ -78,8 +78,6 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite {
     assertErrorForDifferingTypes(BitwiseAnd('intField, 'booleanField))
     assertErrorForDifferingTypes(BitwiseOr('intField, 'booleanField))
     assertErrorForDifferingTypes(BitwiseXor('intField, 'booleanField))
-    assertErrorForDifferingTypes(MaxOf('intField, 'booleanField))
-    assertErrorForDifferingTypes(MinOf('intField, 'booleanField))
 
     assertError(Add('booleanField, 'booleanField), "requires (numeric or 
calendarinterval) type")
     assertError(Subtract('booleanField, 'booleanField),
@@ -91,11 +89,6 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite {
     assertError(BitwiseAnd('booleanField, 'booleanField), "requires integral 
type")
     assertError(BitwiseOr('booleanField, 'booleanField), "requires integral 
type")
     assertError(BitwiseXor('booleanField, 'booleanField), "requires integral 
type")
-
-    assertError(MaxOf('mapField, 'mapField),
-      s"requires ${TypeCollection.Ordered.simpleString} type")
-    assertError(MinOf('mapField, 'mapField),
-      s"requires ${TypeCollection.Ordered.simpleString} type")
   }
 
   test("check types for predicates") {

http://git-wip-us.apache.org/repos/asf/spark/blob/2eedc00b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala
index 2e37887..321d820 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala
@@ -194,56 +194,6 @@ class ArithmeticExpressionSuite extends SparkFunSuite with 
ExpressionEvalHelper
     }
   }
 
-  test("MaxOf basic") {
-    testNumericDataTypes { convert =>
-      val small = Literal(convert(1))
-      val large = Literal(convert(2))
-      checkEvaluation(MaxOf(small, large), convert(2))
-      checkEvaluation(MaxOf(large, small), convert(2))
-      checkEvaluation(MaxOf(Literal.create(null, small.dataType), large), 
convert(2))
-      checkEvaluation(MaxOf(large, Literal.create(null, small.dataType)), 
convert(2))
-    }
-    checkEvaluation(MaxOf(positiveShortLit, negativeShortLit), 
(positiveShort).toShort)
-    checkEvaluation(MaxOf(positiveIntLit, negativeIntLit), positiveInt)
-    checkEvaluation(MaxOf(positiveLongLit, negativeLongLit), positiveLong)
-
-    DataTypeTestUtils.ordered.foreach { tpe =>
-      checkConsistencyBetweenInterpretedAndCodegen(MaxOf, tpe, tpe)
-    }
-  }
-
-  test("MaxOf for atomic type") {
-    checkEvaluation(MaxOf(true, false), true)
-    checkEvaluation(MaxOf("abc", "bcd"), "bcd")
-    checkEvaluation(MaxOf(Array(1.toByte, 2.toByte), Array(1.toByte, 
3.toByte)),
-      Array(1.toByte, 3.toByte))
-  }
-
-  test("MinOf basic") {
-    testNumericDataTypes { convert =>
-      val small = Literal(convert(1))
-      val large = Literal(convert(2))
-      checkEvaluation(MinOf(small, large), convert(1))
-      checkEvaluation(MinOf(large, small), convert(1))
-      checkEvaluation(MinOf(Literal.create(null, small.dataType), large), 
convert(2))
-      checkEvaluation(MinOf(small, Literal.create(null, small.dataType)), 
convert(1))
-    }
-    checkEvaluation(MinOf(positiveShortLit, negativeShortLit), 
(negativeShort).toShort)
-    checkEvaluation(MinOf(positiveIntLit, negativeIntLit), negativeInt)
-    checkEvaluation(MinOf(positiveLongLit, negativeLongLit), negativeLong)
-
-    DataTypeTestUtils.ordered.foreach { tpe =>
-      checkConsistencyBetweenInterpretedAndCodegen(MinOf, tpe, tpe)
-    }
-  }
-
-  test("MinOf for atomic type") {
-    checkEvaluation(MinOf(true, false), false)
-    checkEvaluation(MinOf("abc", "bcd"), "abc")
-    checkEvaluation(MinOf(Array(1.toByte, 2.toByte), Array(1.toByte, 
3.toByte)),
-      Array(1.toByte, 2.toByte))
-  }
-
   test("pmod") {
     testNumericDataTypes { convert =>
       val left = Literal(convert(7))
@@ -261,8 +211,4 @@ class ArithmeticExpressionSuite extends SparkFunSuite with 
ExpressionEvalHelper
     checkEvaluation(Pmod(positiveInt, negativeInt), positiveInt)
     checkEvaluation(Pmod(positiveLong, negativeLong), positiveLong)
   }
-
-  DataTypeTestUtils.numericTypeWithoutDecimal.foreach { tpe =>
-    checkConsistencyBetweenInterpretedAndCodegen(MinOf, tpe, tpe)
-  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to