Github user rxin commented on a diff in the pull request:

    https://github.com/apache/spark/pull/11557#discussion_r55417347
  
    --- Diff: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ng/AstBuilder.scala
 ---
    @@ -0,0 +1,1128 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *    http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.spark.sql.catalyst.parser.ng
    +
    +import java.sql.{Date, Timestamp}
    +
    +import scala.collection.JavaConverters._
    +
    +import org.antlr.v4.runtime.{ParserRuleContext, Token}
    +import org.antlr.v4.runtime.tree.{ParseTree, TerminalNode}
    +
    +import org.apache.spark.Logging
    +import org.apache.spark.sql.AnalysisException
    +import org.apache.spark.sql.catalyst.{InternalRow, TableIdentifier}
    +import org.apache.spark.sql.catalyst.analysis._
    +import org.apache.spark.sql.catalyst.expressions._
    +import org.apache.spark.sql.catalyst.parser.ParseUtils
    +import org.apache.spark.sql.catalyst.parser.ng.SqlBaseParser._
    +import org.apache.spark.sql.catalyst.plans._
    +import org.apache.spark.sql.catalyst.plans.logical._
    +import org.apache.spark.sql.catalyst.trees.CurrentOrigin
    +import org.apache.spark.sql.types._
    +import org.apache.spark.unsafe.types.CalendarInterval
    +import org.apache.spark.util.random.RandomSampler
    +
    +/**
    + * The AstBuilder converts an ANTLR4 ParseTree into a catalyst Expression, 
LogicalPlan or
    + * TableIdentifier.
    + */
    +class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
    +  import AstBuilder._
    +
    +  protected def typedVisit[T](ctx: ParseTree): T = {
    +    ctx.accept(this).asInstanceOf[T]
    +  }
    +
    +  override def visitSingleStatement(ctx: SingleStatementContext): 
LogicalPlan = withOrigin(ctx) {
    +    visit(ctx.statement).asInstanceOf[LogicalPlan]
    +  }
    +
    +  override def visitSingleExpression(ctx: SingleExpressionContext): 
Expression = withOrigin(ctx) {
    +    visitNamedExpression(ctx.namedExpression)
    +  }
    +
    +  override def visitSingleTableIdentifier(
    +      ctx: SingleTableIdentifierContext): TableIdentifier = 
withOrigin(ctx) {
    +    visitTableIdentifier(ctx.tableIdentifier)
    +  }
    +
    +  /* 
********************************************************************************************
    +   * Plan parsing
    +   * 
********************************************************************************************
 */
    +  protected def plan(tree: ParserRuleContext): LogicalPlan = 
typedVisit(tree)
    +
    +  /**
    +   * Create a plan for a SHOW FUNCTIONS command.
    +   */
    +  override def visitShowFunctions(ctx: ShowFunctionsContext): LogicalPlan 
= withOrigin(ctx) {
    +    import ctx._
    +    if (qualifiedName != null) {
    +      val names = 
qualifiedName().identifier().asScala.map(_.getText).toList
    +      names match {
    +        case db :: name :: Nil =>
    +          ShowFunctions(Some(db), Some(name))
    +        case name :: Nil =>
    +          ShowFunctions(None, Some(name))
    +        case _ =>
    +          notSupported("SHOW FUNCTIONS unsupported name", ctx)
    +      }
    +    } else if (pattern != null) {
    +      ShowFunctions(None, Some(unquote(pattern.getText)))
    +    } else {
    +      ShowFunctions(None, None)
    +    }
    +  }
    +
    +  /**
    +   * Create a plan for a DESCRIBE FUNCTION command.
    +   */
    +  override def visitDescribeFunction(ctx: DescribeFunctionContext): 
LogicalPlan = withOrigin(ctx) {
    +    val functionName = 
ctx.qualifiedName().identifier().asScala.map(_.getText).mkString(".")
    +    DescribeFunction(functionName, ctx.EXTENDED != null)
    +  }
    +
    +  /**
    +   * Create a top-level plan with Common Table Expressions.
    +   */
    +  override def visitQuery(ctx: QueryContext): LogicalPlan = 
withOrigin(ctx) {
    +    val query = plan(ctx.queryNoWith)
    +
    +    // Apply CTEs
    +    query.optional(ctx.ctes) {
    +      val ctes = ctx.ctes.namedQuery.asScala.map {
    +        case nCtx =>
    +          val namedQuery = visitNamedQuery(nCtx)
    +          (namedQuery.alias, namedQuery)
    +      }.toMap
    +      With(query, ctes)
    +    }
    +  }
    +
    +  /**
    +   * Create a named logical plan.
    +   *
    +   * This is only used for Common Table Expressions.
    +   */
    +  override def visitNamedQuery(ctx: NamedQueryContext): SubqueryAlias = 
withOrigin(ctx) {
    +    SubqueryAlias(ctx.name.getText, plan(ctx.queryNoWith))
    +  }
    +
    +  /**
    +   * Create a logical plan which allows for multiple inserts using one 
'from' statement. These
    +   * queries have the following SQL form:
    +   * {{{
    +   *   [WITH cte...]?
    +   *   FROM src
    +   *   [INSERT INTO tbl1 SELECT *]+
    +   * }}}
    +   * For example:
    +   * {{{
    +   *   FROM db.tbl1 A
    +   *   INSERT INTO dbo.tbl1 SELECT * WHERE A.value = 10 LIMIT 5
    +   *   INSERT INTO dbo.tbl2 SELECT * WHERE A.value = 12
    +   * }}}
    +   * This (Hive) feature cannot be combined with set-operators.
    +   */
    +  override def visitMultiInsertQuery(ctx: MultiInsertQueryContext): 
LogicalPlan = withOrigin(ctx) {
    +    val from = visitFromClause(ctx.fromClause)
    +
    +    // Build the insert clauses.
    +    val inserts = ctx.multiInsertQueryBody.asScala.map {
    +      body =>
    +        assert(body.querySpecification.fromClause == null,
    +          "Multi-Insert queries cannot have a FROM clause in their 
individual SELECT statements",
    +          body)
    +
    +        withQuerySpecification(body.querySpecification, from).
    +          // Add organization statements.
    +          optionalMap(body.queryOrganization)(withQueryOrganization).
    +          // Add insert.
    +          optionalMap(body.insertInto())(withInsertInto)
    +    }
    +
    +    // If there are multiple INSERTS just UNION them together into one 
query.
    +    inserts match {
    +      case Seq(query) => query
    +      case queries => Union(queries)
    +    }
    +  }
    +
    +  /**
    +   * Create a logical plan for a regular (single-insert) query.
    +   */
    +  override def visitSingleInsertQuery(
    +      ctx: SingleInsertQueryContext): LogicalPlan = withOrigin(ctx) {
    +    plan(ctx.queryTerm).
    +      // Add organization statements.
    +      optionalMap(ctx.queryOrganization)(withQueryOrganization).
    +      // Add insert.
    +      optionalMap(ctx.insertInto())(withInsertInto)
    +  }
    +
    +  /**
    +   * Add an INSERT INTO [TABLE]/INSERT OVERWRITE TABLE operation to the 
logical plan.
    +   */
    +  private def withInsertInto(
    +      ctx: InsertIntoContext,
    +      query: LogicalPlan): LogicalPlan = withOrigin(ctx) {
    +    val tableIdent = visitTableIdentifier(ctx.tableIdentifier)
    +    val partitionKeys = Option(ctx.partitionSpec).toSeq.flatMap {
    +      _.partitionVal.asScala.map {
    +        pVal => (pVal.identifier.getText, Option(pVal.constant).map(c => 
unquote(c.getText)))
    +      }
    +    }.toMap
    +
    +    InsertIntoTable(
    +      UnresolvedRelation(tableIdent, None),
    +      partitionKeys,
    +      query,
    +      ctx.OVERWRITE != null,
    +      ctx.EXISTS != null)
    +  }
    +
    +  /**
    +   * Add ORDER BY/SORT BY/CLUSTER BY/DISTRIBUTE BY/LIMIT/WINDOWS clauses 
to the logical plan.
    +   */
    +  private def withQueryOrganization(
    +      ctx: QueryOrganizationContext,
    +      query: LogicalPlan): LogicalPlan = withOrigin(ctx) {
    +    import ctx._
    +
    +    // Handle ORDER BY, SORT BY, DISTRIBUTE BY, and CLUSTER BY clause.
    +    val withOrder = if (
    +      !order.isEmpty && sort.isEmpty && distributeBy.isEmpty && 
clusterBy.isEmpty) {
    +      // ORDER BY ...
    +      Sort(order.asScala.map(visitSortItem), global = true, query)
    +    } else if (order.isEmpty && !sort.isEmpty && distributeBy.isEmpty && 
clusterBy.isEmpty) {
    +      // SORT BY ...
    +      Sort(sort.asScala.map(visitSortItem), global = false, query)
    +    } else if (order.isEmpty && sort.isEmpty && !distributeBy.isEmpty && 
clusterBy.isEmpty) {
    +      // DISTRIBUTE BY ...
    +      RepartitionByExpression(expressionList(distributeBy), query)
    +    } else if (order.isEmpty && !sort.isEmpty && !distributeBy.isEmpty && 
clusterBy.isEmpty) {
    +      // SORT BY ... DISTRIBUTE BY ...
    +      Sort(
    +        sort.asScala.map(visitSortItem),
    +        global = false,
    +        RepartitionByExpression(expressionList(distributeBy), query))
    +    } else if (order.isEmpty && sort.isEmpty && distributeBy.isEmpty && 
!clusterBy.isEmpty) {
    +      // CLUSTER BY ...
    +      val expressions = expressionList(clusterBy)
    +      Sort(
    +        expressions.map(SortOrder(_, Ascending)),
    +        global = false,
    +        RepartitionByExpression(expressions, query))
    +    } else if (order.isEmpty && sort.isEmpty && distributeBy.isEmpty && 
clusterBy.isEmpty) {
    +      // [EMPTY]
    +      query
    +    } else {
    +      notSupported("Combination of ORDER BY/SORT BY/DISTRIBUTE BY/CLUSTER 
BY is not supported", ctx)
    +    }
    +
    +    // LIMIT
    +    val withLimit = withOrder.optional(limit) {
    +      Limit(typedVisit(limit), withOrder)
    +    }
    +
    +    // WINDOWS
    +    withLimit.optionalMap(windows)(withWindows)
    +  }
    +
    +  /**
    +   * Create a logical plan using a query specification.
    +   */
    +  override def visitQuerySpecification(
    +      ctx: QuerySpecificationContext): LogicalPlan = withOrigin(ctx) {
    +    val from = OneRowRelation.optional(ctx.fromClause) {
    +      visitFromClause(ctx.fromClause)
    +    }
    +    withQuerySpecification(ctx, from)
    +  }
    +
    +  /**
    +   * Add a query specification to a logical plan. The query specification 
is the core of the logical
    +   * plan, this is where sourcing (FROM clause), transforming (SELECT 
TRANSFORM/MAP/REDUCE),
    +   * projection (SELECT), aggregation (GROUP BY ... HAVING ...) and 
filtering (WHERE) takes place.
    +   *
    +   * Note that query hints are ignored (both by the parser and the 
builder).
    +   */
    +  private def withQuerySpecification(
    +      ctx: QuerySpecificationContext,
    +      relation: LogicalPlan): LogicalPlan = withOrigin(ctx) {
    +    import ctx._
    +
    +    // WHERE
    +    val withFilter = relation.optional(where) {
    +      Filter(expression(where), relation)
    +    }
    +
    +    // Expressions.
    +    val expressions = namedExpression.asScala.map(visit).map {
    +      case e: Expression => UnresolvedAlias(e)
    +    }
    +
    +    // Create either a transform or a regular query.
    +    kind.getType match {
    +      case SqlBaseParser.MAP | SqlBaseParser.REDUCE | 
SqlBaseParser.TRANSFORM =>
    +        // Transform
    +
    +        // Create the attributes.
    +        val attributes = if (colTypeList != null) {
    +          // Typed return columns.
    +          visitColTypeList(colTypeList).toAttributes
    +        } else if (columnAliasList != null) {
    +          // Untyped return columns.
    +          visitColumnAliasList(columnAliasList).map { name =>
    +            AttributeReference(name, StringType, nullable = true)()
    +          }
    +        } else {
    +          Seq.empty
    +        }
    +
    +        // Create the transform.
    +        ScriptTransformation(
    +          expressions,
    +          unquote(script.getText),
    +          attributes,
    +          withFilter,
    +          withScriptIOSchema(inRowFormat, outRowFormat, outRecordReader))
    +
    +      case SqlBaseParser.SELECT =>
    +        // Regular select
    +
    +        // Add lateral views.
    +        val withLateralView = 
ctx.lateralView.asScala.foldLeft(withFilter)(withGenerate)
    +
    +        // Add aggregation with having or a project.
    +        val withProject = if (aggregation != null) {
    +          withAggregation(aggregation, expressions, 
withLateralView).optionalMap(having) {
    +            case (h, p) => Filter(expression(h), p)
    +          }
    +        } else {
    +          Project(expressions, withLateralView)
    +        }
    +
    +        // Distinct
    +        val withDistinct = if (setQuantifier() != null && 
setQuantifier().DISTINCT() != null) {
    +          Distinct(withProject)
    +        } else {
    +          withProject
    +        }
    +
    +        // Window
    +        withDistinct.optionalMap(windows)(withWindows)
    +    }
    +  }
    +
    +  /**
    +   * Create a (Hive based) [[ScriptInputOutputSchema]].
    +   */
    +  protected def withScriptIOSchema(
    +      inRowFormat: RowFormatContext,
    +      outRowFormat: RowFormatContext,
    +      outRecordReader: Token): ScriptInputOutputSchema = null
    +
    +  /**
    +   * Create a logical plan for a given 'FROM' clause. Note that we support 
multiple (comma
    +   * separated) relations here, these get converted into a single plan by 
condition-less inner join.
    +   */
    +  override def visitFromClause(ctx: FromClauseContext): LogicalPlan = 
withOrigin(ctx) {
    +    ctx.relation.asScala.map(plan).reduceLeft(Join(_, _, Inner, None))
    +  }
    +
    +  /**
    +   * Connect two queries by a Set operator.
    +   *
    +   * Supported Set operators are:
    +   * - UNION [DISTINCT]
    +   * - UNION ALL
    +   * - EXCEPT [DISTINCT]
    +   * - INTERSECT [DISTINCT]
    +   */
    +  override def visitSetOperation(ctx: SetOperationContext): LogicalPlan = 
withOrigin(ctx) {
    +    val left = plan(ctx.left)
    +    val right = plan(ctx.right)
    +    val all = Option(ctx.setQuantifier()).exists(_.ALL != null)
    +    ctx.operator.getType match {
    +      case SqlBaseParser.UNION if all =>
    +        Union(left, right)
    +      case SqlBaseParser.UNION =>
    +        Distinct(Union(left, right))
    +      case SqlBaseParser.INTERSECT if all =>
    +        notSupported("INTERSECT ALL is not supported.", ctx)
    +      case SqlBaseParser.INTERSECT =>
    +        Intersect(left, right)
    +      case SqlBaseParser.EXCEPT if all =>
    +        notSupported("EXCEPT ALL is not supported.", ctx)
    +      case SqlBaseParser.EXCEPT =>
    +        Except(left, right)
    +    }
    +  }
    +
    +  /**
    +   * Add a [[WithWindowDefinition]] operator to a logical plan.
    +   */
    +  private def withWindows(
    +      ctx: WindowsContext,
    +      query: LogicalPlan): LogicalPlan = withOrigin(ctx) {
    +    // Collect all window specifications defined in the WINDOW clause.
    +    val baseWindowMap = ctx.namedWindow.asScala.map {
    +      wCtx =>
    +        (wCtx.identifier.getText, typedVisit[WindowSpec](wCtx.windowSpec))
    +    }.toMap
    +
    +    // Handle cases like
    +    // window w1 as (partition by p_mfgr order by p_name
    +    //               range between 2 preceding and 2 following),
    +    //        w2 as w1
    +    val windowMap = baseWindowMap.mapValues {
    +      case WindowSpecReference(name) => 
baseWindowMap(name).asInstanceOf[WindowSpecDefinition]
    +      case spec: WindowSpecDefinition => spec
    +    }
    +    WithWindowDefinition(windowMap, query)
    +  }
    +
    +  /**
    +   * Add an [[Aggregate]] to a logical plan.
    +   */
    +  private def withAggregation(
    +      ctx: AggregationContext,
    +      selectExpressions: Seq[NamedExpression],
    +      query: LogicalPlan): LogicalPlan = withOrigin(ctx) {
    +    import ctx._
    +    val groupByExpressions = expressionList(groupingExpressions)
    +
    +    if (GROUPING != null) {
    +      // GROUP BY .... GROUPING SETS (...)
    +      // TODO use new expression set here?
    +      val expressionMap = groupByExpressions.zipWithIndex.toMap
    +      val numExpressions = expressionMap.size
    +      val mask = (1 << numExpressions) - 1
    +      val masks = ctx.groupingSet.asScala.map {
    +        _.expression.asScala.foldLeft(mask) {
    +          case (bitmap, eCtx) =>
    +            // Find the index of the expression.
    +            val e = typedVisit[Expression](eCtx)
    +            val index = 
expressionMap.find(_._1.semanticEquals(e)).map(_._2).getOrElse(
    +              throw new AnalysisException(
    +                s"${e.treeString} doesn't show up in the GROUP BY list"))
    +            // 0 means that the column at the given index is a grouping 
column, 1 means it is not,
    +            // so we unset the bit in bitmap.
    +            bitmap & ~(1 << (numExpressions - 1 - index))
    +        }
    +      }
    +      GroupingSets(masks, groupByExpressions, query, selectExpressions)
    +    } else {
    +      // GROUP BY .... (WITH CUBE | WITH ROLLUP)?
    +      val mappedGroupByExpressions = if (CUBE != null) {
    +        Seq(Cube(groupByExpressions))
    +      } else if (ROLLUP != null) {
    +        Seq(Rollup(groupByExpressions))
    +      } else {
    +        groupByExpressions
    +      }
    +      Aggregate(mappedGroupByExpressions, selectExpressions, query)
    +    }
    +  }
    +
    +  /**
    +   * Add a [[Generate]] (Lateral View) to a logical plan.
    +   */
    +  private def withGenerate(
    +      query: LogicalPlan,
    +      ctx: LateralViewContext): LogicalPlan = withOrigin(ctx) {
    +    val expressions = expressionList(ctx.expression)
    +
    +    // Create the generator.
    +    // TODO Add support for other generators.
    +    val generator = ctx.qualifiedName.getText.toLowerCase match {
    +      case "explode" if expressions.size == 1 =>
    +        Explode(expressions.head)
    +      case "json_tuple" =>
    +        JsonTuple(expressions)
    +      case other =>
    +        notSupported(s"Generator function '$other' is not supported", ctx)
    +    }
    +
    +    Generate(
    +      generator,
    +      join = true,
    +      outer = ctx.OUTER != null,
    +      Some(ctx.tblName.getText.toLowerCase),
    +      ctx.colName.asScala.map(_.getText).map(UnresolvedAttribute.apply),
    +      query)
    +  }
    +
    +  /**
    +   * Create a join between two logical plans.
    +   */
    +  override def visitJoinRelation(ctx: JoinRelationContext): LogicalPlan = 
withOrigin(ctx) {
    +    val baseJoinType = ctx.joinType match {
    +      case jt if jt.FULL != null => FullOuter
    +      case jt if jt.SEMI != null => LeftSemi
    +      case jt if jt.LEFT != null => LeftOuter
    +      case jt if jt.RIGHT != null => RightOuter
    +      case _ => Inner
    +    }
    +    val joinType = if (ctx.NATURAL != null) {
    +      NaturalJoin(baseJoinType)
    +    } else {
    +      baseJoinType
    +    }
    +
    +    val left = plan(ctx.left)
    +    val right = if (ctx.right != null) {
    +      plan(ctx.right)
    +    } else {
    +      plan(ctx.rightRelation)
    +    }
    +    assert(left != null, "Left side should not be null", ctx)
    +    assert(right != null, "Right side should not be null", ctx)
    +    Join(left, right, joinType, 
Option(ctx.booleanExpression).map(expression))
    +  }
    +
    +  /**
    +   * Create a sampled relation. This returns a [[Sample]] operator when 
sampling is requested.
    +   *
    +   * This currently supports the following sampling methods:
    +   * - TABLESAMPLE(x ROWS): Sample the table down to the given number of 
rows.
    +   * - TABLESAMPLE(x PERCENT): Sample the table down to the given 
percentage. Note that percentages
    +   * are defined as a number between 0 and 100.
    +   * - TABLESAMPLE(BUCKET x OUT OF y): Sample the table down to a 'x' 
divided by 'y' fraction.
    +   */
    +  override def visitSampledRelation(ctx: SampledRelationContext): 
LogicalPlan = withOrigin(ctx) {
    +    val relation = plan(ctx.relationPrimary)
    +
    +    // Create a sampled plan if we need one.
    +    def sample(fraction: Double): Sample = {
    +      Sample(0.0, fraction, withReplacement = false, (math.random * 
1000).toInt, relation)(true)
    +    }
    +
    +    // Sample the relation if we have to.
    +    relation.optional(ctx.sampleType) {
    +      ctx.sampleType.getType match {
    +        case SqlBaseParser.ROWS =>
    +          Limit(expression(ctx.expression), relation)
    +
    +        case SqlBaseParser.PERCENTLIT =>
    +          val fraction = ctx.percentage.getText.toDouble
    +          // The range of fraction accepted by Sample is [0, 1]. Because 
Hive's block sampling
    +          // function takes X PERCENT as the input and the range of X is 
[0, 100], we need to
    +          // adjust the fraction.
    +          val eps = RandomSampler.roundingEpsilon
    +          require(fraction >= 0.0 - eps && fraction <= 100.0 + eps,
    +            s"Sampling fraction ($fraction) must be on interval [0, 100]")
    +          sample(fraction / 100.0d)
    +
    +        case SqlBaseParser.BUCKET if ctx.ON != null =>
    +          notSupported("TABLESAMPLE(BUCKET x OUT OF y ON id) is not 
supported", ctx)
    +
    +        case SqlBaseParser.BUCKET =>
    +          sample(ctx.numerator.getText.toDouble / 
ctx.denominator.getText.toDouble)
    +      }
    +    }
    +  }
    +
    +  /**
    +   * Create a logical plan for a sub-query.
    +   */
    +  override def visitSubquery(ctx: SubqueryContext): LogicalPlan = 
withOrigin(ctx) {
    +    plan(ctx.queryNoWith)
    +  }
    +
    +  /**
    +   * Create an un-aliased table reference. This is typically used for 
top-level table references,
    +   * for example:
    +   * {{{
    +   *   INSERT INTO db.tbl2
    +   *   TABLE db.tbl1
    +   * }}}
    +   */
    +  override def visitTable(ctx: TableContext): LogicalPlan = 
withOrigin(ctx) {
    +    UnresolvedRelation(visitTableIdentifier(ctx.tableIdentifier), None)
    +  }
    +
    +  /**
    +   * Create an aliased table reference. This is typically used in FROM 
clauses.
    +   */
    +  override def visitTableName(ctx: TableNameContext): LogicalPlan = 
withOrigin(ctx) {
    +    UnresolvedRelation(
    +      visitTableIdentifier(ctx.tableIdentifier),
    +      Option(ctx.identifier).map(_.getText))
    +  }
    +
    +  /**
    +   * Create an inline table (a virtual table in Hive parlance).
    +   */
    +  override def visitInlineTable(ctx: InlineTableContext): LogicalPlan = 
withOrigin(ctx) {
    +    // Get the backing expressions.
    +    val expressions = ctx.expression.asScala.map(expression)
    +
    +    // Validate and evaluate the rows.
    +    val (structType, structConstructor) = expressions.head.dataType match {
    +      case st: StructType =>
    +        (st, (e: Expression) => e)
    +      case dt =>
    +        val st = CreateStruct(Seq(expressions.head)).dataType
    +        (st, (e: Expression) => CreateStruct(Seq(e)))
    +    }
    +    val rows = expressions.map {
    +      case expression =>
    +        assert(expression.foldable, "All expressions in an inline table 
must be constants.", ctx)
    +        val safe = Cast(structConstructor(expression), structType)
    +        safe.eval().asInstanceOf[InternalRow]
    +    }
    +
    +    // Construct attributes.
    +    val baseAttributes = structType.toAttributes
    +    val attributes = if (ctx.columnAliases != null) {
    +      val aliases = visitColumnAliases(ctx.columnAliases)
    +      assert(aliases.size == baseAttributes.size,
    +        "Number of aliases must match the number of fields in an inline 
table.", ctx)
    +      baseAttributes.zip(aliases).map(p => p._1.withName(p._2))
    +    } else {
    +      baseAttributes
    +    }
    +
    +    LocalRelation(attributes, rows)
    +  }
    +
    +  /**
    +   * Create an alias (SubqueryAlias) for a join relation. This is 
practically the same as
    +   * visitAliasedQuery and visitNamedExpression, ANTLR4 however requires 
us to use 3 different
    +   * hooks.
    +   */
    +  override def visitAliasedRelation(ctx: AliasedRelationContext): 
LogicalPlan = withOrigin(ctx) {
    +    aliasPlan(ctx.identifier, plan(ctx.relation()))
    +  }
    +
    +  /**
    +   * Create an alias (SubqueryAlias) for a sub-query. This is practically 
the same as
    +   * visitAliasedRelation and visitNamedExpression, ANTLR4 however 
requires us to use 3 different
    +   * hooks.
    +   */
    +  override def visitAliasedQuery(ctx: AliasedQueryContext): LogicalPlan = 
withOrigin(ctx) {
    +    aliasPlan(ctx.identifier, plan(ctx.queryNoWith))
    +  }
    +
    +  /**
    +   * Create an alias (SubqueryAlias) for a LogicalPlan. The alias is 
allowed to be optional.
    +   */
    +  private def aliasPlan(alias: IdentifierContext, plan: LogicalPlan): 
LogicalPlan = {
    +    plan.optional(alias) {
    +      SubqueryAlias(alias.getText, plan)
    +    }
    +  }
    +
    +  /**
    +   * Create a Sequence of Strings for a parenthesis enclosed alias list.
    +   */
    +  override def visitColumnAliases(ctx: ColumnAliasesContext): Seq[String] 
= withOrigin(ctx) {
    +    visitColumnAliasList(ctx.columnAliasList)
    +  }
    +
    +  /**
    +   * Create a Sequence of Strings for an alias list.
    +   */
    +  override def visitColumnAliasList(ctx: ColumnAliasListContext): 
Seq[String] = withOrigin(ctx) {
    +    ctx.identifier.asScala.map(_.getText)
    +  }
    +
    +  /**
    +   * Create a [[TableIdentifier]] from a 'tableName' or 
'databaseName'.'tableName' pattern.
    +   */
    +  override def visitTableIdentifier(
    +      ctx: TableIdentifierContext): TableIdentifier = withOrigin(ctx) {
    +    TableIdentifier(ctx.table.getText, Option(ctx.db).map(_.getText))
    +  }
    +
    +  /* 
********************************************************************************************
    +   * Expression parsing
    +   * 
********************************************************************************************
 */
    +  private def expression(tree: ParserRuleContext): Expression = 
typedVisit(tree)
    +
    +  private def expressionList(trees: java.util.List[ExpressionContext]): 
Seq[Expression] = {
    +    trees.asScala.map(expression)
    +  }
    +
    +  private def invertIfNotDefined(expression: Expression, not: 
TerminalNode): Expression = {
    +    if (not != null) {
    +      Not(expression)
    +    } else {
    +      expression
    +    }
    +  }
    +
    +  override def visitStar(ctx: StarContext): Expression = withOrigin(ctx) {
    +    
UnresolvedStar(Option(ctx.qualifiedName()).map(_.identifier.asScala.map(_.getText)))
    +  }
    +
    +  override def visitNamedExpression(ctx: NamedExpressionContext): 
Expression = withOrigin(ctx) {
    +    val e = expression(ctx.expression)
    +    if (ctx.identifier != null) {
    +      Alias(e, ctx.identifier.getText)()
    +    } else if (ctx.columnAliases != null) {
    +      MultiAlias(e, visitColumnAliases(ctx.columnAliases))
    +    } else {
    +      e
    +    }
    +  }
    +
    +  override def visitLogicalBinary(ctx: LogicalBinaryContext): Expression = 
withOrigin(ctx) {
    +    val left = expression(ctx.left)
    +    val right = expression(ctx.right)
    +    ctx.operator.getType match {
    +      case SqlBaseParser.AND =>
    +        And(left, right)
    +      case SqlBaseParser.OR =>
    +        Or(left, right)
    +    }
    --- End diff --
    
    That one's now merged


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to