Repository: spark
Updated Branches:
  refs/heads/master e28a8c589 -> 4a2c375be


[SPARK-17084][SQL] Rename ParserUtils.assert to validate

## What changes were proposed in this pull request?
This PR renames `ParserUtils.assert` to `ParserUtils.validate`. This is done 
because this method is used to check requirements, and not to check if the 
program is in an invalid state.

## How was this patch tested?
Simple rename. Compilation should do.

Author: Herman van Hovell <hvanhov...@databricks.com>

Closes #14665 from hvanhovell/SPARK-17084.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/4a2c375b
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/4a2c375b
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/4a2c375b

Branch: refs/heads/master
Commit: 4a2c375be2bcd98cc7e00bea920fd6a0f68a4e14
Parents: e28a8c5
Author: Herman van Hovell <hvanhov...@databricks.com>
Authored: Tue Aug 16 21:35:39 2016 -0700
Committer: Reynold Xin <r...@databricks.com>
Committed: Tue Aug 16 21:35:39 2016 -0700

----------------------------------------------------------------------
 .../apache/spark/sql/catalyst/parser/AstBuilder.scala | 14 +++++++-------
 .../spark/sql/catalyst/parser/ParserUtils.scala       |  4 ++--
 .../apache/spark/sql/execution/SparkSqlParser.scala   |  5 ++---
 3 files changed, 11 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/4a2c375b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index 25c8445..09b650c 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -132,7 +132,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with 
Logging {
     // Build the insert clauses.
     val inserts = ctx.multiInsertQueryBody.asScala.map {
       body =>
-        assert(body.querySpecification.fromClause == null,
+        validate(body.querySpecification.fromClause == null,
           "Multi-Insert queries cannot have a FROM clause in their individual 
SELECT statements",
           body)
 
@@ -596,7 +596,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with 
Logging {
       // function takes X PERCENT as the input and the range of X is [0, 100], 
we need to
       // adjust the fraction.
       val eps = RandomSampler.roundingEpsilon
-      assert(fraction >= 0.0 - eps && fraction <= 1.0 + eps,
+      validate(fraction >= 0.0 - eps && fraction <= 1.0 + eps,
         s"Sampling fraction ($fraction) must be on interval [0, 1]",
         ctx)
       Sample(0.0, fraction, withReplacement = false, (math.random * 
1000).toInt, query)(true)
@@ -664,7 +664,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with 
Logging {
     // Get the backing expressions.
     val expressions = ctx.expression.asScala.map { eCtx =>
       val e = expression(eCtx)
-      assert(e.foldable, "All expressions in an inline table must be 
constants.", eCtx)
+      validate(e.foldable, "All expressions in an inline table must be 
constants.", eCtx)
       e
     }
 
@@ -686,7 +686,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with 
Logging {
     val baseAttributes = structType.toAttributes.map(_.withNullability(true))
     val attributes = if (ctx.identifierList != null) {
       val aliases = visitIdentifierList(ctx.identifierList)
-      assert(aliases.size == baseAttributes.size,
+      validate(aliases.size == baseAttributes.size,
         "Number of aliases must match the number of fields in an inline 
table.", ctx)
       baseAttributes.zip(aliases).map(p => p._1.withName(p._2))
     } else {
@@ -1094,7 +1094,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with 
Logging {
     // We currently only allow foldable integers.
     def value: Int = {
       val e = expression(ctx.expression)
-      assert(e.resolved && e.foldable && e.dataType == IntegerType,
+      validate(e.resolved && e.foldable && e.dataType == IntegerType,
         "Frame bound value must be a constant integer.",
         ctx)
       e.eval().asInstanceOf[Int]
@@ -1347,7 +1347,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with 
Logging {
    */
   override def visitInterval(ctx: IntervalContext): Literal = withOrigin(ctx) {
     val intervals = ctx.intervalField.asScala.map(visitIntervalField)
-    assert(intervals.nonEmpty, "at least one time unit should be given for 
interval literal", ctx)
+    validate(intervals.nonEmpty, "at least one time unit should be given for 
interval literal", ctx)
     Literal(intervals.reduce(_.add(_)))
   }
 
@@ -1374,7 +1374,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with 
Logging {
         case (from, Some(t)) =>
           throw new ParseException(s"Intervals FROM $from TO $t are not 
supported.", ctx)
       }
-      assert(interval != null, "No interval can be constructed", ctx)
+      validate(interval != null, "No interval can be constructed", ctx)
       interval
     } catch {
       // Handle Exceptions thrown by CalendarInterval

http://git-wip-us.apache.org/repos/asf/spark/blob/4a2c375b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParserUtils.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParserUtils.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParserUtils.scala
index b04ce58..bc35ae2 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParserUtils.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParserUtils.scala
@@ -77,8 +77,8 @@ object ParserUtils {
     Origin(Option(token.getLine), Option(token.getCharPositionInLine))
   }
 
-  /** Assert if a condition holds. If it doesn't throw a parse exception. */
-  def assert(f: => Boolean, message: String, ctx: ParserRuleContext): Unit = {
+  /** Validate the condition. If it doesn't throw a parse exception. */
+  def validate(f: => Boolean, message: String, ctx: ParserRuleContext): Unit = 
{
     if (!f) {
       throw new ParseException(message, ctx)
     }

http://git-wip-us.apache.org/repos/asf/spark/blob/4a2c375b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index 9da2b5a..71c3bd3 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -18,7 +18,6 @@
 package org.apache.spark.sql.execution
 
 import scala.collection.JavaConverters._
-import scala.util.Try
 
 import org.antlr.v4.runtime.{ParserRuleContext, Token}
 import org.antlr.v4.runtime.tree.TerminalNode
@@ -799,7 +798,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
   }
 
   /**
-   * Create an [[AlterTableDiscoverPartitionsCommand]] command
+   * Create an [[AlterTableRecoverPartitionsCommand]] command
    *
    * For example:
    * {{{
@@ -1182,7 +1181,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends 
AstBuilder {
         entry("mapkey.delim", ctx.keysTerminatedBy) ++
         Option(ctx.linesSeparatedBy).toSeq.map { token =>
           val value = string(token)
-          assert(
+          validate(
             value == "\n",
             s"LINES TERMINATED BY only supports newline '\\n' right now: 
$value",
             ctx)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to