Repository: spark Updated Branches: refs/heads/branch-2.1 c4cbdc864 -> 1759cf69a
[SPARK-18058][SQL][TRIVIAL] Use dataType.sameResult(...) instead equality on asNullable datatypes ## What changes were proposed in this pull request? This is absolutely minor. PR https://github.com/apache/spark/pull/15595 uses `dt1.asNullable == dt2.asNullable` expressions in a few places. It is however more efficient to call `dt1.sameType(dt2)`. I have replaced every instance of the first pattern with the second pattern (3/5 were introduced by #15595). ## How was this patch tested? Existing tests. Author: Herman van Hovell <hvanhov...@databricks.com> Closes #16041 from hvanhovell/SPARK-18058. (cherry picked from commit d449988b8819775fcfd27da53bb5143a7aab01f7) Signed-off-by: Reynold Xin <r...@databricks.com> Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/1759cf69 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/1759cf69 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/1759cf69 Branch: refs/heads/branch-2.1 Commit: 1759cf69aa1a7059a5fe78d012a54bc0ba02677c Parents: c4cbdc8 Author: Herman van Hovell <hvanhov...@databricks.com> Authored: Mon Nov 28 21:43:33 2016 -0800 Committer: Reynold Xin <r...@databricks.com> Committed: Mon Nov 28 21:43:38 2016 -0800 ---------------------------------------------------------------------- .../org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala | 2 +- .../sql/catalyst/expressions/conditionalExpressions.scala | 2 +- .../sql/catalyst/plans/logical/basicLogicalOperators.scala | 6 +++--- .../spark/sql/execution/datasources/DataSourceStrategy.scala | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/1759cf69/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala ---------------------------------------------------------------------- diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala index 26d2638..db41752 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala @@ -306,7 +306,7 @@ trait CheckAnalysis extends PredicateHelper { // Check if the data types match. dataTypes(child).zip(ref).zipWithIndex.foreach { case ((dt1, dt2), ci) => // SPARK-18058: we shall not care about the nullability of columns - if (dt1.asNullable != dt2.asNullable) { + if (!dt1.sameType(dt2)) { failAnalysis( s""" |${operator.nodeName} can only be performed on tables with the compatible http://git-wip-us.apache.org/repos/asf/spark/blob/1759cf69/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala ---------------------------------------------------------------------- diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala index a7d9e2d..afc190e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala @@ -41,7 +41,7 @@ case class If(predicate: Expression, trueValue: Expression, falseValue: Expressi if (predicate.dataType != BooleanType) { TypeCheckResult.TypeCheckFailure( s"type of predicate expression in If should be boolean, not ${predicate.dataType}") - } else if (trueValue.dataType.asNullable != falseValue.dataType.asNullable) { + } else if (!trueValue.dataType.sameType(falseValue.dataType)) { TypeCheckResult.TypeCheckFailure(s"differing types in '$sql' " + s"(${trueValue.dataType.simpleString} and ${falseValue.dataType.simpleString}).") } else { http://git-wip-us.apache.org/repos/asf/spark/blob/1759cf69/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala ---------------------------------------------------------------------- diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala index dd6c8fd..da42df3 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala @@ -135,7 +135,7 @@ abstract class SetOperation(left: LogicalPlan, right: LogicalPlan) extends Binar childrenResolved && left.output.length == right.output.length && left.output.zip(right.output).forall { case (l, r) => - l.dataType.asNullable == r.dataType.asNullable + l.dataType.sameType(r.dataType) } && duplicateResolved } @@ -212,8 +212,8 @@ case class Union(children: Seq[LogicalPlan]) extends LogicalPlan { child.output.length == children.head.output.length && // compare the data types with the first child child.output.zip(children.head.output).forall { - case (l, r) => l.dataType.asNullable == r.dataType.asNullable } - ) + case (l, r) => l.dataType.sameType(r.dataType) + }) children.length > 1 && childrenResolved && allChildrenCompatible } http://git-wip-us.apache.org/repos/asf/spark/blob/1759cf69/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala ---------------------------------------------------------------------- diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala index 4f19a2d..f3d92bf 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala @@ -163,7 +163,7 @@ case class DataSourceAnalysis(conf: CatalystConf) extends Rule[LogicalPlan] { case i @ logical.InsertIntoTable( l @ LogicalRelation(t: HadoopFsRelation, _, table), part, query, overwrite, false) - if query.resolved && t.schema.asNullable == query.schema.asNullable => + if query.resolved && t.schema.sameType(query.schema) => // Sanity checks if (t.location.rootPaths.size != 1) { --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org