Github user dilipbiswal commented on a diff in the pull request:

    https://github.com/apache/spark/pull/12222#discussion_r59687345
  
    --- Diff: 
sql/core/src/main/scala/org/apache/spark/sql/execution/command/commands.scala 
---
    @@ -423,6 +424,100 @@ case class ShowTablePropertiesCommand(
     }
     
     /**
    + * A command for users to list the column names for a table. This function 
creates a
    + * [[ShowColumnsCommand]] logical plan.
    + *
    + * The syntax of using this command in SQL is:
    + * {{{
    + *   SHOW COLUMNS (FROM | IN) table_identifier [(FROM | IN) database];
    + * }}}
    + */
    +case class ShowColumnsCommand(table: TableIdentifier) extends 
RunnableCommand {
    +  // The result of SHOW COLUMNS has one column called 'result'
    +  override val output: Seq[Attribute] = {
    +    AttributeReference("result", StringType, nullable = false)() :: Nil
    +  }
    +
    +  override def run(sqlContext: SQLContext): Seq[Row] = {
    +    val relation = sqlContext.sessionState.catalog.lookupRelation(table, 
None)
    +    relation.schema.fields.map { field =>
    +      Row(field.name)
    +    }
    +  }
    +}
    +
    +/**
    + * A command for users to list the partition names of a table. If the 
partition spec is specified,
    + * partitions that match the spec are returned. [[AnalysisException]] 
exception is thrown under
    + * the following conditions:
    + *
    + * 1. If the command is called for a non partitioned table.
    + * 2. If the partition spec refers to the columns that are not defined as 
partitioning columns.
    + *
    + * This function creates a [[ShowPartitionsCommand]] logical plan
    + *
    + * The syntax of using this command in SQL is:
    + * {{{
    + *   SHOW PARTITIONS [db_name.]table_name [PARTITION(partition_spec)]
    + * }}}
    + */
    +case class ShowPartitionsCommand(
    +    table: TableIdentifier,
    +    partitionSpec: Option[Map[String, String]]) extends RunnableCommand {
    +  // The result of SHOW PARTITIONS has one column called 'result'
    +  override val output: Seq[Attribute] = {
    +    AttributeReference("result", StringType, nullable = false)() :: Nil
    +  }
    +
    +  /**
    +   * This function validates the partitioning spec by making sure all the 
referenced columns are
    +   * defined as partitioning columns in table definition. An 
AnalysisException exception is
    +   * thrown if the partitioning spec is invalid.
    +   */
    +  private def validatePartitionSpec(table: CatalogTable, spec: Map[String, 
String]): Unit = {
    +    if (!spec.keySet.forall(table.partitionColumns.map(_.name).contains)) {
    +      throw new AnalysisException(s"Partition spec ${spec.mkString("(", ", 
", ")")} contains " +
    +        s"non-partition columns")
    +    }
    +  }
    +
    +  /**
    +   * Validates and throws an [[AnalysisException]] exception under the 
following conditions:
    +   * 1. If the table is not partitioned.
    +   * 2. If it is a datasource table.
    +   * 3. If it is a view or index table.
    +   */
    +  private def checkRequirements(table: CatalogTable): Unit = {
    +    if (table.tableType == CatalogTableType.VIRTUAL_VIEW ||
    +      table.tableType == CatalogTableType.INDEX_TABLE) {
    +      throw new AnalysisException("Operation not allowed: view or index 
table")
    +    } else if (!DDLUtils.isTablePartitioned(table)) {
    +      throw new AnalysisException(s"Table ${table.qualifiedName} is not a 
partitioned table")
    +    } else if (DDLUtils.isDatasourceTable(table)) {
    +      throw new AnalysisException("Operation not allowed: datasource 
table")
    --- End diff --
    
    Will make the change


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to