This is an automated email from the ASF dual-hosted git repository.

indhumuthumurugesh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git


The following commit(s) were added to refs/heads/master by this push:
     new f67c8fa  [CARBONDATA-4161] Describe complex columns Why is this PR 
needed? Currently describe formatted displays the column information of a table 
and some additional information. When complex types such as ARRAY, STRUCT, and 
MAP types are present in the table, column definition can be long and it’s 
difficult to read in a nested format.
f67c8fa is described below

commit f67c8fae462409c68486fb1de4b251f5576244aa
Author: ShreelekhyaG <[email protected]>
AuthorDate: Tue Mar 23 15:10:23 2021 +0530

    [CARBONDATA-4161] Describe complex columns
    Why is this PR needed?
    Currently describe formatted displays the column information
    of a table and some additional information. When complex
    types such as ARRAY, STRUCT, and MAP types are present in the
    table, column definition can be long and it’s difficult to
    read in a nested format.
    
    What changes were proposed in this PR?
    The DESCRIBE output can be formatted to avoid long lines
    for multiple fields. We can pass the column name to the
    command and visualize its structure with child fields.
    
    Does this PR introduce any user interface change?
    Yes ,
    DDL Commands:
    DESCRIBE COLUMN fieldname ON [db_name.]table_name;
    DESCRIBE short [db_name.]table_name;
    
    Is any new testcase added?
    Yes
    
    This closes #4113
---
 docs/ddl-of-carbondata.md                          |  76 +++++++++++
 .../spark/sql/catalyst/CarbonDDLSqlParser.scala    |   1 +
 .../command/management/CarbonAddLoadCommand.scala  |  10 +-
 .../spark/sql/execution/command/package.scala      |  14 ++
 .../table/CarbonDescribeFormattedCommand.scala     | 152 ++++++++++++++++++++-
 .../spark/sql/parser/CarbonSpark2SqlParser.scala   |  26 +++-
 .../describeTable/TestDescribeTable.scala          | 147 ++++++++++++++++++++
 7 files changed, 415 insertions(+), 11 deletions(-)

diff --git a/docs/ddl-of-carbondata.md b/docs/ddl-of-carbondata.md
index ee208c4..3bb6c69 100644
--- a/docs/ddl-of-carbondata.md
+++ b/docs/ddl-of-carbondata.md
@@ -43,6 +43,10 @@ CarbonData DDL statements are documented here,which includes:
   * [External Table on non-transactional table 
location](#create-external-table-on-non-transactional-table-data-location)
 * [CREATE DATABASE](#create-database)
 * [TABLE MANAGEMENT](#table-management)
+  * [DESCRIBE COMMAND](#describe-command)
+    * [DESCRIBE TABLE](#describe-table)
+    * [DESCRIBE COLUMN](#describe-column)
+    * [DESCRIBE COLUMN SHORT](#describe-column-short)
   * [SHOW TABLE](#show-table)
   * [ALTER TABLE](#alter-table)
     * [RENAME TABLE](#rename-table)
@@ -646,6 +650,78 @@ CarbonData DDL statements are documented here,which 
includes:
 
 ## TABLE MANAGEMENT  
 
+### DESCRIBE COMMAND
+
+- #### DESCRIBE TABLE
+  Describe table displays the metadata information of a table. Using extended 
or formatted in the syntax will display the detailed information of a table.
+
+  ```
+  [DESC | DESCRIBE] [TABLE] [EXTENDED | Formatted ] [db_name.]table_name
+  ```
+
+- #### DESCRIBE COLUMN
+  The column definitions for complex types will be long in DESCRIBE table 
command, and it becomes difficult to read in nested format.
+  The DESCRIBE COLUMN command uses special formatting for complex type columns 
to make the output readable.
+  
+  ```
+  [DESCRIBE | DESC] COLUMN fieldname[.nestedFieldNames] ON [TABLE] 
[db_name.]table_name;
+  ```
+  Examples: 
+  ```
+    DESCRIBE COLUMN locationinfo on complexcarbontable;
+    
+    
+------------------------------+----------------------------------------------------------------------------------------------+-------------------+
+    |col_name                      |data_type                                  
                                                   |comment            |
+    
+------------------------------+----------------------------------------------------------------------------------------------+-------------------+
+    |locationinfo                  |array                                      
                                                   |this is an array   |
+    |## Children of locationinfo:  |                                           
                                                   |                   |
+    |item                          
|struct<activeprovince:string,activecity:map<string,string>,activestreet:array<string>>
        |null               |
+    
+------------------------------+----------------------------------------------------------------------------------------------+-------------------+
+    
+    DESCRIBE COLUMN locationinfo.item on complexcarbontable
+    
+    +-----------------------------------+------------------+-------+
+    |col_name                           |data_type         |comment|
+    +-----------------------------------+------------------+-------+
+    |locationinfo.item                  |struct            |null   |
+    |## Children of locationinfo.item:  |                  |       |
+    |activeprovince                     |string            |null   |
+    |activecity                         |map<string,string>|null   |
+    |activestreet                       |array<string>     |null   |
+    +-----------------------------------+------------------+-------+
+
+    DESCRIBE COLUMN locationinfo.item.activecity on complexcarbontable
+    
+    +----------------------------------------------+---------+-------+
+    |col_name                                      |data_type|comment|
+    +----------------------------------------------+---------+-------+
+    |locationinfo.item.activecity                  |map      |null   |
+    |## Children of locationinfo.item.activecity:  |         |       |
+    |key                                           |string   |null   |
+    |value                                         |string   |null   |
+    +----------------------------------------------+---------+-------+
+  ```
+- #### DESCRIBE COLUMN SHORT
+
+    This command is used to display short version of table complex columns.
+  ```
+  [DESCRIBE | DESC] SHORT [db_name.]table_name;
+  ```
+  Example: 
+  ```
+    DESCRIBE SHORT complexcarbontable;
+    
+    +-------------------+----------+----------------+
+    |           col_name| data_type|         comment|
+    +-------------------+----------+----------------+
+    |deviceinformationid|   integer|            null|
+    |         channelsid|   map<..>|            null|
+    |             mobile|struct<..>|            null|
+    |       locationinfo| array<..>|            null|
+    |        gamepointid|    double|            null|
+    +-------------------+----------+----------------+
+  ```
+
 ### SHOW TABLE
 
   This command can be used to list all the tables in current database or all 
the tables of a specific database.
diff --git 
a/integration/spark/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
 
b/integration/spark/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
index 155e63d..29ce9fb 100644
--- 
a/integration/spark/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
+++ 
b/integration/spark/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
@@ -43,6 +43,7 @@ abstract class CarbonDDLSqlParser extends 
AbstractCarbonSparkSQLParser {
   protected val CLASS = carbonKeyWord("CLASS")
   protected val CLEAN = carbonKeyWord("CLEAN")
   protected val COLS = carbonKeyWord("COLS")
+  protected val COLUMN = carbonKeyWord("COLUMN")
   protected val COLUMNS = carbonKeyWord("COLUMNS")
   protected val COMPACT = carbonKeyWord("COMPACT")
   protected val FINISH = carbonKeyWord("FINISH")
diff --git 
a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAddLoadCommand.scala
 
b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAddLoadCommand.scala
index 44fe54c..b01c125 100644
--- 
a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAddLoadCommand.scala
+++ 
b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAddLoadCommand.scala
@@ -72,14 +72,8 @@ case class CarbonAddLoadCommand(
   private val LOGGER = 
LogServiceFactory.getLogService(this.getClass.getCanonicalName)
 
   override def processMetadata(sparkSession: SparkSession): Seq[Row] = {
-    Checker.validateTableExists(databaseNameOp, tableName, sparkSession)
-    val relation = CarbonEnv
-      .getInstance(sparkSession)
-      .carbonMetaStore
-      .lookupRelation(databaseNameOp, tableName)(sparkSession)
-      .asInstanceOf[CarbonRelation]
-    val tableSchema = StructType.fromAttributes(relation.output)
-    val carbonTable = relation.carbonTable
+    val (tableSchema, carbonTable) = Checker.getSchemaAndTable(sparkSession, 
databaseNameOp,
+      tableName)
     setAuditTable(carbonTable)
     if (!carbonTable.getTableInfo.isTransactionalTable) {
       throw new MalformedCarbonCommandException("Unsupported operation on non 
transactional table")
diff --git 
a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/package.scala
 
b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/package.scala
index 983e6b4..d721192 100644
--- 
a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/package.scala
+++ 
b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/package.scala
@@ -23,6 +23,9 @@ import scala.language.implicitConversions
 import org.apache.spark.sql._
 import org.apache.spark.sql.catalyst.TableIdentifier
 import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
+import org.apache.spark.sql.hive.CarbonRelation
+import org.apache.spark.sql.types.StructType
+import org.apache.spark.sql.util.SparkSQLUtil
 
 import org.apache.carbondata.common.logging.LogServiceFactory
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable
@@ -42,6 +45,17 @@ object Checker {
       throw new NoSuchTableException(database, tableName)
     }
   }
+
+  def getSchemaAndTable(sparkSession: SparkSession, databaseNameOp: 
Option[String],
+      tableName: String) : (StructType, CarbonTable) = {
+    Checker.validateTableExists(databaseNameOp, tableName, sparkSession)
+    val relation = CarbonEnv
+      .getInstance(sparkSession)
+      .carbonMetaStore
+      .lookupRelation(databaseNameOp, tableName)(sparkSession)
+      .asInstanceOf[CarbonRelation]
+    (StructType.fromAttributes(relation.output), relation.carbonTable)
+  }
 }
 
 /**
diff --git 
a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
 
b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
index 477003a..f85bb1f 100644
--- 
a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
+++ 
b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
@@ -20,17 +20,21 @@ package org.apache.spark.sql.execution.command.table
 import java.util.Date
 
 import scala.collection.JavaConverters._
+import scala.util.control.Breaks.{break, breakable}
 
 import org.apache.spark.sql.{CarbonEnv, EnvHelper, Row, SparkSession}
 import org.apache.spark.sql.catalyst.TableIdentifier
 import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
-import org.apache.spark.sql.catalyst.expressions.Attribute
+import org.apache.spark.sql.catalyst.expressions.{Attribute, 
AttributeReference}
 import org.apache.spark.sql.execution.SparkPlan
+import org.apache.spark.sql.execution.command.Checker
 import org.apache.spark.sql.execution.command.MetadataCommand
 import org.apache.spark.sql.hive.CarbonRelation
+import org.apache.spark.sql.types.{ArrayType, MapType, MetadataBuilder, 
StringType, StructField, StructType}
 
 import org.apache.carbondata.common.Strings
 import org.apache.carbondata.common.exceptions.DeprecatedFeatureException
+import 
org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
 import org.apache.carbondata.core.constants.{CarbonCommonConstants, 
CarbonLoadOptionConstants}
 import org.apache.carbondata.core.metadata.datatype.DataTypes
 import org.apache.carbondata.core.metadata.schema.PartitionInfo
@@ -370,3 +374,149 @@ private[sql] case class CarbonDescribeFormattedCommand(
 
   override protected def opName: String = "DESC FORMATTED"
 }
+
+case class CarbonDescribeColumnCommand(
+    databaseNameOp: Option[String],
+    tableName: String,
+    inputFieldNames: java.util.List[String])
+  extends MetadataCommand {
+
+  override val output: Seq[Attribute] = Seq(
+    // Column names are based on Hive.
+    AttributeReference("col_name", StringType, nullable = false,
+      new MetadataBuilder().putString("comment", "name of the 
column").build())(),
+    AttributeReference("data_type", StringType, nullable = false,
+      new MetadataBuilder().putString("comment", "data type of the 
column").build())(),
+    AttributeReference("comment", StringType, nullable = true,
+      new MetadataBuilder().putString("comment", "comment of the 
column").build())()
+  )
+
+  override def processMetadata(sparkSession: SparkSession): Seq[Row] = {
+    val (tableSchema, carbonTable) = Checker.getSchemaAndTable(sparkSession, 
databaseNameOp,
+      tableName)
+    val inputField = 
tableSchema.find(_.name.equalsIgnoreCase(inputFieldNames.get(0)))
+    if (inputField.isEmpty) {
+      throw new MalformedCarbonCommandException(
+        s"Column ${ inputFieldNames.get(0) } does not exists in the table " +
+        s"${ carbonTable.getDatabaseName }.$tableName")
+    }
+    setAuditTable(carbonTable)
+    var results = Seq[(String, String, String)]()
+    var currField = inputField.get
+    val inputFieldsIterator = inputFieldNames.iterator()
+    var inputColumn = inputFieldsIterator.next()
+    while (results.isEmpty) {
+      breakable {
+        if 
(currField.dataType.typeName.equalsIgnoreCase(CarbonCommonConstants.ARRAY)) {
+          if (inputFieldsIterator.hasNext) {
+            val nextField = inputFieldsIterator.next()
+            // child of an array can be only 'item'
+            if (!nextField.equalsIgnoreCase("item")) {
+              throw handleException(nextField, currField.name, 
carbonTable.getTableName)
+            }
+            // make the child type as current field to describe further nested 
types.
+            currField = StructField("item", 
currField.dataType.asInstanceOf[ArrayType].elementType)
+            inputColumn += "." + currField.name
+            break()
+          }
+          // if no further nested column given, display field information and 
its children.
+          results = Seq((inputColumn,
+            currField.dataType.typeName, 
currField.getComment().getOrElse("null")),
+            ("## Children of " + inputColumn + ":  ", "", ""))
+          results ++= Seq(("item", currField.dataType.asInstanceOf[ArrayType]
+            .elementType.simpleString, "null"))
+        }
+        else if 
(currField.dataType.typeName.equalsIgnoreCase(CarbonCommonConstants.STRUCT)) {
+          if (inputFieldsIterator.hasNext) {
+            val nextField = inputFieldsIterator.next()
+            val nextCurrField = 
currField.dataType.asInstanceOf[StructType].fields
+              .find(_.name.equalsIgnoreCase(nextField))
+            // verify if the input child name exists in the schema
+            if (!nextCurrField.isDefined) {
+              throw handleException(nextField, currField.name, 
carbonTable.getTableName)
+            }
+            // make the child type as current field to describe further nested 
types.
+            currField = nextCurrField.get
+            inputColumn += "." + currField.name
+            break()
+          }
+          // if no further nested column given, display field information and 
its children.
+          results = Seq((inputColumn,
+            currField.dataType.typeName, 
currField.getComment().getOrElse("null")),
+            ("## Children of " + inputColumn + ":  ", "", ""))
+          results ++= 
currField.dataType.asInstanceOf[StructType].fields.map(child => {
+            (child.name, child.dataType.simpleString, "null")
+          })
+        } else if 
(currField.dataType.typeName.equalsIgnoreCase(CarbonCommonConstants.MAP)) {
+          val children = currField.dataType.asInstanceOf[MapType]
+          if (inputFieldsIterator.hasNext) {
+            val nextField = inputFieldsIterator.next().toLowerCase()
+            // children of map can be only 'key' and 'value'
+            val nextCurrField = nextField match {
+              case "key" => StructField("key", children.keyType)
+              case "value" => StructField("value", children.valueType)
+              case _ => throw handleException(nextField, currField.name, 
carbonTable.getTableName)
+            }
+            // make the child type as current field to describe further nested 
types.
+            currField = nextCurrField
+            inputColumn += "." + currField.name
+            break()
+          }
+          // if no further nested column given, display field information and 
its children.
+          results = Seq((inputColumn,
+            currField.dataType.typeName, 
currField.getComment().getOrElse("null")),
+            ("## Children of " + inputColumn + ":  ", "", ""))
+          results ++= Seq(("key", children.keyType.simpleString, "null"),
+            ("value", children.valueType.simpleString, "null"))
+        } else {
+          results = Seq((inputColumn,
+            currField.dataType.typeName, 
currField.getComment().getOrElse("null")))
+        }
+      }
+    }
+    results.map { case (c1, c2, c3) => Row(c1, c2, c3) }
+  }
+
+  def handleException(nextField: String, currField: String, tableName: 
String): Throwable = {
+    new MalformedCarbonCommandException(
+      s"$nextField is invalid child name for column $currField " +
+      s"of table: $tableName")
+  }
+
+  override protected def opName: String = "DESC COLUMN"
+}
+
+case class CarbonDescribeShortCommand(
+    databaseNameOp: Option[String],
+    tableName: String)
+  extends MetadataCommand {
+
+  override val output: Seq[Attribute] = Seq(
+    // Column names are based on Hive.
+    AttributeReference("col_name", StringType, nullable = false,
+      new MetadataBuilder().putString("comment", "name of the 
column").build())(),
+    AttributeReference("data_type", StringType, nullable = false,
+      new MetadataBuilder().putString("comment", "data type of the 
column").build())(),
+    AttributeReference("comment", StringType, nullable = true,
+      new MetadataBuilder().putString("comment", "comment of the 
column").build())()
+  )
+
+  override def processMetadata(sparkSession: SparkSession): Seq[Row] = {
+    val (tableSchema, carbonTable) = Checker.getSchemaAndTable(sparkSession, 
databaseNameOp,
+      tableName)
+    setAuditTable(carbonTable)
+    var results = Seq[(String, String, String)]()
+    results = tableSchema.map { field =>
+      val colComment = field.getComment().getOrElse("null")
+      var datatypeName = field.dataType.typeName
+      if (field.dataType.isInstanceOf[ArrayType] || 
field.dataType.isInstanceOf[StructType] ||
+         field.dataType.isInstanceOf[MapType]) {
+        datatypeName += "<..>"
+      }
+      (field.name, datatypeName, colComment)
+    }
+    results.map { case (c1, c2, c3) => Row(c1, c2, c3) }
+  }
+
+  override protected def opName: String = "DESC SHORT"
+}
diff --git 
a/integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala
 
b/integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala
index d16170f..b77f16f 100644
--- 
a/integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala
+++ 
b/integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala
@@ -17,6 +17,7 @@
 
 package org.apache.spark.sql.parser
 
+import scala.collection.JavaConverters.seqAsJavaListConverter
 import scala.collection.mutable
 import scala.language.implicitConversions
 
@@ -33,7 +34,7 @@ import 
org.apache.spark.sql.execution.command.index.{CarbonCreateIndexCommand, C
 import org.apache.spark.sql.execution.command.management._
 import 
org.apache.spark.sql.execution.command.schema.CarbonAlterTableDropColumnCommand
 import 
org.apache.spark.sql.execution.command.stream.{CarbonCreateStreamCommand, 
CarbonDropStreamCommand, CarbonShowStreamsCommand}
-import org.apache.spark.sql.execution.command.table.CarbonCreateTableCommand
+import org.apache.spark.sql.execution.command.table.{CarbonCreateTableCommand, 
CarbonDescribeColumnCommand, CarbonDescribeShortCommand}
 import org.apache.spark.sql.execution.command.view.{CarbonCreateMVCommand, 
CarbonDropMVCommand, CarbonRefreshMVCommand, CarbonShowMVCommand}
 import 
org.apache.spark.sql.secondaryindex.command.{CarbonCreateSecondaryIndexCommand, 
_}
 import org.apache.spark.sql.types.StructField
@@ -73,7 +74,7 @@ class CarbonSpark2SqlParser extends CarbonDDLSqlParser {
   protected lazy val startCommand: Parser[LogicalPlan] =
     segmentManagement | alterTable | restructure | updateTable | deleteRecords 
|
     alterTableFinishStreaming | stream | cli |
-    cacheManagement | insertStageData | indexCommands | mvCommands
+    cacheManagement | insertStageData | indexCommands | mvCommands | 
describeCommands
 
   protected lazy val segmentManagement: Parser[LogicalPlan] =
     deleteSegmentByID | deleteSegmentByLoadDate | deleteStage | cleanFiles | 
addSegment |
@@ -98,6 +99,8 @@ class CarbonSpark2SqlParser extends CarbonDDLSqlParser {
     createIndex | dropIndex | showIndexes | registerIndexes | refreshIndex | 
repairIndex |
       repairIndexDatabase
 
+  protected lazy val describeCommands: Parser[LogicalPlan] = describeColumn | 
describeShort
+
   protected lazy val alterTable: Parser[LogicalPlan] =
     ALTER ~> TABLE ~> (ident <~ ".").? ~ ident ~ (COMPACT ~ stringLit) ~
       (WHERE ~> (SEGMENT ~ "." ~ ID) ~> IN ~> "(" ~> repsep(segmentId, ",") <~ 
")").? <~
@@ -109,6 +112,25 @@ class CarbonSpark2SqlParser extends CarbonDDLSqlParser {
     }
 
   /**
+   * describe complex column of table
+   */
+  protected lazy val describeColumn: Parser[LogicalPlan] =
+    (DESCRIBE | DESC) ~> COLUMN ~> repsep(ident, ".") ~ ontable <~ opt(";") ^^ 
{
+      case fields ~ table =>
+        CarbonDescribeColumnCommand(
+          table.database, table.table, fields.asJava)
+    }
+
+  /**
+   * describe short version of table complex columns
+   */
+  protected lazy val describeShort: Parser[LogicalPlan] =
+    (DESCRIBE | DESC) ~> SHORT ~> (ident <~ ".").? ~ ident <~ opt(";") ^^ {
+      case dbName ~ table =>
+        CarbonDescribeShortCommand(dbName, table)
+    }
+
+  /**
    * The below syntax is used to change the status of the segment
    * from "streaming" to "streaming finish".
    * ALTER TABLE tableName FINISH STREAMING
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/describeTable/TestDescribeTable.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/describeTable/TestDescribeTable.scala
index 8526af9..8b9bbc9 100644
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/describeTable/TestDescribeTable.scala
+++ 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/describeTable/TestDescribeTable.scala
@@ -17,9 +17,11 @@
 package org.apache.carbondata.spark.testsuite.describeTable
 
 import org.apache.spark.sql.Row
+import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
 import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
+import 
org.apache.carbondata.common.exceptions.sql.{MalformedCarbonCommandException, 
MalformedIndexCommandException}
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.util.CarbonProperties
 
@@ -40,6 +42,18 @@ class TestDescribeTable extends QueryTest with 
BeforeAndAfterAll {
         "Dec2Col1 BigInt, Dec2Col2 String, Dec2Col3 Bigint, Dec2Col4 Decimal) 
STORED AS carbondata")
     sql("CREATE TABLE Desc2(" +
         "Dec2Col1 BigInt, Dec2Col2 String, Dec2Col3 Bigint, Dec2Col4 Decimal) 
STORED AS carbondata")
+    sql("drop table if exists complexcarbontable")
+    sql("create table complexcarbontable(deviceInformationId int, " +
+        "channelsId map<string,string>, mobile struct<imei:string, 
imsi:string>," +
+        "MAC array<string> COMMENT 'this is an array'," +
+        "locationinfo array<struct<ActiveAreaId:int, ActiveCountry:string, " +
+        "ActiveProvince:string, Activecity:map<string,string>, " +
+        "ActiveDistrict:string, ActiveStreet:array<string>>>," +
+        "proddate 
struct<productionDate:string,activeDeactivedate:array<string>>, gamePointId " +
+        "double,contractNumber 
struct<num:double,contract:map<string,array<string>>>," +
+        "decimalColumn map<string,struct<d:decimal(10,3), 
s:struct<im:string>>>) " +
+        "STORED AS carbondata"
+    )
   }
 
   test("test describe table") {
@@ -72,11 +86,144 @@ class TestDescribeTable extends QueryTest with 
BeforeAndAfterAll {
     assert(descPar.exists(_.toString().contains("Partition Parameters:")))
   }
 
+  test("test describe column field name") {
+    // describe primitive column
+    var desc = sql("describe column deviceInformationId on 
complexcarbontable").collect()
+    
assert(desc(0).get(0).asInstanceOf[String].trim.equals("deviceInformationId"))
+    assert(desc(0).get(1).asInstanceOf[String].trim.equals("integer"))
+
+    // describe simple map
+    /*
+    +----------------------------+---------+-------+
+    |col_name                    |data_type|comment|
+    +----------------------------+---------+-------+
+    |channelsId                  |map      |null   |
+    |## Children of channelsId:  |         |       |
+    |key                         |string   |null   |
+    |value                       |string   |null   |
+    +----------------------------+---------+-------+
+    */
+    desc = sql("desc column channelsId on complexcarbontable").collect()
+    assert(desc(2).get(0).asInstanceOf[String].trim.equals("key"))
+    assert(desc(2).get(1).asInstanceOf[String].trim.equals("string"))
+    assert(desc(3).get(0).asInstanceOf[String].trim.equals("value"))
+    assert(desc(3).get(1).asInstanceOf[String].trim.equals("string"))
+
+    // describe struct
+    desc = sql("describe column mobile on complexcarbontable").collect()
+    assert(desc(2).get(0).asInstanceOf[String].trim.equals("imei"))
+    assert(desc(3).get(0).asInstanceOf[String].trim.equals("imsi"))
+
+    // describe array
+    desc = sql("describe column MAC on complexcarbontable").collect()
+    assert(desc(0).get(2).asInstanceOf[String].trim.equals("this is an array"))
+    assert(desc(2).get(0).asInstanceOf[String].trim.equals("item"))
+    assert(desc(2).get(1).asInstanceOf[String].trim.equals("string"))
+
+    // describe struct<string,array<string>>
+    desc = sql("describe column proddate on complexcarbontable").collect()
+    assert(desc(2).get(0).asInstanceOf[String].trim.equals("productiondate"))
+    assert(desc(2).get(1).asInstanceOf[String].trim.equals("string"))
+    
assert(desc(3).get(0).asInstanceOf[String].trim.equals("activedeactivedate"))
+    assert(desc(3).get(1).asInstanceOf[String].trim.equals("array<string>"))
+
+    desc = sql("describe column proddate.activeDeactivedate on 
complexcarbontable").collect()
+    assert(desc(2).get(0).asInstanceOf[String].trim.equals("item"))
+    assert(desc(2).get(1).asInstanceOf[String].trim.equals("string"))
+
+    // describe 
array<struct<int,string,string,map<string,string>,string,array<string>>>
+    desc = sql("describe column locationinfo on complexcarbontable").collect()
+    assert(desc(2).get(0).asInstanceOf[String].trim.equals("item"))
+    assert(desc(2).get(1).asInstanceOf[String].trim.equals(
+        "struct<activeareaid:int,activecountry:string,activeprovince:string," +
+        
"activecity:map<string,string>,activedistrict:string,activestreet:array<string>>"))
+
+    desc = sql("describe column locationinfo.item on 
complexcarbontable").collect()
+    assert(desc(5).get(0).asInstanceOf[String].trim.equals("activecity"))
+    
assert(desc(5).get(1).asInstanceOf[String].trim.equals("map<string,string>"))
+    assert(desc(7).get(0).asInstanceOf[String].trim.equals("activestreet"))
+    assert(desc(7).get(1).asInstanceOf[String].trim.equals("array<string>"))
+
+    desc = sql("describe column locationinfo.item.Activecity on 
complexcarbontable").collect()
+    assert(desc(2).get(0).asInstanceOf[String].trim.equals("key"))
+    assert(desc(3).get(0).asInstanceOf[String].trim.equals("value"))
+
+    desc = sql("describe column locationinfo.item.ActiveStreet on 
complexcarbontable").collect()
+    assert(desc(2).get(0).asInstanceOf[String].trim.equals("item"))
+    assert(desc(2).get(1).asInstanceOf[String].trim.equals("string"))
+
+    // describe struct<double,map<string,array<string>>>
+    desc = sql("describe column contractNumber on 
complexcarbontable").collect()
+    assert(desc(2).get(0).asInstanceOf[String].trim.equals("num"))
+    assert(desc(2).get(1).asInstanceOf[String].trim.equals("double"))
+    assert(desc(3).get(0).asInstanceOf[String].trim.equals("contract"))
+    
assert(desc(3).get(1).asInstanceOf[String].trim.equals("map<string,array<string>>"))
+
+    desc = sql("describe column contractNumber.contract on 
complexcarbontable").collect()
+    assert(desc(3).get(0).asInstanceOf[String].trim.equals("value"))
+    assert(desc(3).get(1).asInstanceOf[String].trim.equals("array<string>"))
+
+    desc = sql("describe column contractNumber.contract.value on 
complexcarbontable").collect()
+    assert(desc(2).get(0).asInstanceOf[String].trim.equals("item"))
+    assert(desc(2).get(1).asInstanceOf[String].trim.equals("string"))
+
+    // describe map<string,struct<decimal(10,3),struct<string>>>
+    desc = sql("describe column decimalcolumn on complexcarbontable").collect()
+    assert(desc(3).get(0).asInstanceOf[String].trim.equals("value"))
+    assert(desc(3).get(1).asInstanceOf[String].trim
+      .equals("struct<d:decimal(10,3),s:struct<im:string>>"))
+
+    desc = sql("describe column decimalcolumn.value on 
complexcarbontable").collect()
+    assert(desc(3).get(0).asInstanceOf[String].trim.equals("s"))
+    
assert(desc(3).get(1).asInstanceOf[String].trim.equals("struct<im:string>"))
+
+    desc = sql("describe column decimalcolumn.value.s on 
complexcarbontable").collect()
+    assert(desc(2).get(0).asInstanceOf[String].trim.equals("im"))
+    assert(desc(2).get(1).asInstanceOf[String].trim.equals("string"))
+  }
+
+  test("test describe with invalid table and field names") {
+    // describe column with invalid table name
+    var exception1 = intercept[NoSuchTableException](sql(
+      "describe column decimalcolumn on invalidTable"))
+    assert(exception1.getMessage.contains(
+      "Table or view 'invalidTable' not found in database 'default'"))
+
+    // describe short with invalid table name
+    exception1 = intercept[NoSuchTableException](sql(
+      "describe short invalidTable"))
+    assert(exception1.getMessage.contains(
+      "Table or view 'invalidTable' not found in database 'default'"))
+
+    // describe column with invalid field name
+    var exception2 = intercept[MalformedCarbonCommandException](sql(
+      "describe column invalidField on complexcarbontable"))
+    assert(exception2.getMessage.contains(
+      "Column invalidField does not exists in the table 
default.complexcarbontable"))
+
+    // describe column with invalid child names
+    exception2 = intercept[MalformedCarbonCommandException](sql(
+      "describe column MAC.one on complexcarbontable"))
+    assert(exception2.getMessage.contains(
+      "one is invalid child name for column mac of table: complexcarbontable"))
+  }
+
+  test("test describe short table format") {
+    val desc = sql("desc short complexcarbontable").collect()
+    assert(desc(1).get(0).asInstanceOf[String].trim.equals("channelsid"))
+    assert(desc(1).get(1).asInstanceOf[String].trim.equals("map<..>"))
+    assert(desc(2).get(0).asInstanceOf[String].trim.equals("mobile"))
+    assert(desc(2).get(1).asInstanceOf[String].trim.equals("struct<..>"))
+    assert(desc(3).get(0).asInstanceOf[String].trim.equals("mac"))
+    assert(desc(3).get(1).asInstanceOf[String].trim.equals("array<..>"))
+  }
+
   override def afterAll: Unit = {
     sql("DROP TABLE Desc1")
     sql("DROP TABLE Desc2")
     sql("drop table if exists a")
     sql("drop table if exists b")
+    sql("drop table if exists complexcarbontable")
     
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.COMPRESSOR,
       CarbonCommonConstants.DEFAULT_COMPRESSOR)
   }

Reply via email to