This is an automated email from the ASF dual-hosted git repository.

jackylk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git


The following commit(s) were added to refs/heads/master by this push:
     new 3b85e9f  Revert "wip"
3b85e9f is described below

commit 3b85e9f1c6c4d13da80b8fb6094461d9a1f404eb
Author: Jacky Li <jacky.li...@qq.com>
AuthorDate: Mon Dec 30 09:36:00 2019 +0800

    Revert "wip"
    
    This reverts commit 32bd37fe082daa413ae7a80c9bcde7e859a5df67.
---
 .../carbondata/mv/rewrite/TestAllOperationsOnMV.scala |  2 +-
 .../complexType/TestCreateTableWithDouble.scala       |  3 ++-
 .../createTable/TestCreateTableAsSelect.scala         | 19 +++++++++++++++++++
 .../spark/sql/parser/CarbonSparkSqlParserUtil.scala   |  6 ------
 4 files changed, 22 insertions(+), 8 deletions(-)

diff --git 
a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestAllOperationsOnMV.scala
 
b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestAllOperationsOnMV.scala
index 1750ce7..19170c5 100644
--- 
a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestAllOperationsOnMV.scala
+++ 
b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestAllOperationsOnMV.scala
@@ -392,7 +392,7 @@ class TestAllOperationsOnMV extends QueryTest with 
BeforeAndAfterEach {
     sql("insert into table maintable select 'abc',21,2000")
     sql("drop datamap if exists dm ")
     intercept[MalformedCarbonCommandException] {
-      sql("create datamap dm using 'mv' dmproperties('sort_columns'='name') as 
select name from maintable")
+      sql("create datamap dm using 'mv' 
dmproperties('dictionary_include'='name', 'sort_columns'='name') as select name 
from maintable")
     }.getMessage.contains("DMProperties dictionary_include,sort_columns are 
not allowed for this datamap")
   }
 
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestCreateTableWithDouble.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestCreateTableWithDouble.scala
index e46e3ba..f08aa20 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestCreateTableWithDouble.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestCreateTableWithDouble.scala
@@ -64,7 +64,8 @@ class TestCreateTableWithDouble extends QueryTest with 
BeforeAndAfterAll {
     try {
       sql("CREATE TABLE doubleComplex2 (Id int, number double, name string, " +
         "gamePoint array<double>, mac struct<num:double>) " +
-        "STORED BY 'org.apache.carbondata.format' ")
+        "STORED BY 'org.apache.carbondata.format' " +
+        "TBLPROPERTIES('DICTIONARY_INCLUDE'='number,gamePoint,mac')")
       sql(s"LOAD DATA LOCAL INPATH '$dataPath' INTO TABLE doubleComplex2")
       countNum = sql(s"SELECT COUNT(*) FROM doubleComplex2").collect
       doubleField = sql(s"SELECT number FROM doubleComplex2 SORT BY 
Id").collect
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableAsSelect.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableAsSelect.scala
index 7591cd0..8e4d8fa 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableAsSelect.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableAsSelect.scala
@@ -145,6 +145,25 @@ class TestCreateTableAsSelect extends QueryTest with 
BeforeAndAfterAll {
     checkAnswer(sql("select * from ctas_select_direct_data"), Seq(Row(300, 
"carbondata")))
   }
 
+  test("test create table as select with TBLPROPERTIES") {
+    sql("DROP TABLE IF EXISTS ctas_tblproperties_testt")
+    sql(
+      "create table ctas_tblproperties_testt stored by 'carbondata' 
TBLPROPERTIES" +
+        "('DICTIONARY_INCLUDE'='key', 'sort_scope'='global_sort') as select * 
from carbon_ctas_test")
+    checkAnswer(sql("select * from ctas_tblproperties_testt"), sql("select * 
from carbon_ctas_test"))
+    val carbonTable = 
CarbonEnv.getInstance(Spark2TestQueryExecutor.spark).carbonMetaStore
+      .lookupRelation(Option("default"), 
"ctas_tblproperties_testt")(Spark2TestQueryExecutor.spark)
+      .asInstanceOf[CarbonRelation].carbonTable
+    val metadataFolderPath: CarbonFile = 
FileFactory.getCarbonFile(carbonTable.getMetadataPath)
+    assert(metadataFolderPath.exists())
+    val dictFiles: Array[CarbonFile] = metadataFolderPath.listFiles(new 
CarbonFileFilter {
+      override def accept(file: CarbonFile): Boolean = {
+        file.getName.contains(".dict") || file.getName.contains(".sortindex")
+      }
+    })
+    assert(dictFiles.length == 3)
+  }
+
   test("test create table as select with column name as tupleid") {
     intercept[Exception] {
       sql("create table t2 stored by 'carbondata' as select count(value) AS 
tupleid from carbon_ctas_test")
diff --git 
a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
 
b/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
index 92c1283..60e5cad 100644
--- 
a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
+++ 
b/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
@@ -30,7 +30,6 @@ import 
org.apache.spark.sql.execution.command.{PartitionerField, TableModel, Tab
 import 
org.apache.spark.sql.execution.command.table.{CarbonCreateTableAsSelectCommand, 
CarbonCreateTableCommand}
 import org.apache.spark.sql.types.StructField
 
-import org.apache.carbondata.common.exceptions.DeprecatedFeatureException
 import 
org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.datastore.impl.FileFactory
@@ -136,11 +135,6 @@ object CarbonSparkSqlParserUtil {
     }
 
     // validate tblProperties
-    if (tableProperties.contains(CarbonCommonConstants.DICTIONARY_INCLUDE) ||
-        tableProperties.contains(CarbonCommonConstants.DICTIONARY_EXCLUDE)) {
-      throw new DeprecatedFeatureException("global dictionary")
-    }
-
     val bucketFields = parser.getBucketFields(tableProperties, fields, options)
     var isTransactionalTable: Boolean = true
 

Reply via email to