Repository: carbondata
Updated Branches:
  refs/heads/master 8b83f5885 -> 36ceb59f0


http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SortColumnExcudeDictTestCase.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SortColumnExcudeDictTestCase.scala
 
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SortColumnExcudeDictTestCase.scala
new file mode 100644
index 0000000..6f9df82
--- /dev/null
+++ 
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SortColumnExcudeDictTestCase.scala
@@ -0,0 +1,433 @@
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.cluster.sdv.generated
+
+import org.apache.spark.sql.common.util._
+import org.scalatest.BeforeAndAfterAll
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
+
+/**
+ * Test Class for SortColumnExcudeDictTestCase to verify all scenerios
+ */
+
+class SortColumnExcudeDictTestCase extends QueryTest with BeforeAndAfterAll {
+
+
+  //create table with no dictionary sort_columns
+  test("Sortcolumn-001_TC001", Include) {
+    sql(s"""drop table if exists sorttable1""").collect
+    sql(s"""CREATE TABLE sorttable1 (empno int, empname String, designation 
String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, 
deptno int, deptname String, projectcode int, projectjoindate Timestamp, 
projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 
'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='empno','sort_columns'='empno')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select empno from sorttable1""").collect
+
+    sql(s"""drop table if exists sorttable1""").collect
+  }
+
+
+  //create table with direct-dictioanry sort_columns
+  test("Sortcolumn-001_TC003", Include) {
+    sql(s"""CREATE TABLE sorttable3 (empno int, empname String, designation 
String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, 
deptno int, deptname String, projectcode int, projectjoindate Timestamp, 
projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 
'org.apache.carbondata.format' 
tblproperties('dictionary_include'='doj','sort_columns'='doj')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable3 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable3""").collect
+
+    sql(s"""drop table if exists sorttable3""").collect
+  }
+
+
+  //create table with multi-sort_columns and data loading with offheap safe
+  test("Sortcolumn-001_TC004", Include) {
+    sql(s"""CREATE TABLE sorttable4_offheap_safe (empno int, empname String, 
designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname 
String, deptno int, deptname String, projectcode int, projectjoindate 
Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) 
STORED BY 'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='workgroupcategory','sort_columns'='workgroupcategory,
 empname')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable4_offheap_safe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= 
'\"')""").collect
+    sql(s"""select workgroupcategory, empname from 
sorttable4_offheap_safe""").collect
+
+    sql(s"""drop table if exists sorttable4_offheap_safe""").collect
+  }
+
+
+  //create table with multi-sort_columns and data loading with offheap and 
unsafe sort
+  test("Sortcolumn-001_TC005", Include) {
+    sql(s"""CREATE TABLE sorttable4_offheap_unsafe (empno int, empname String, 
designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname 
String, deptno int, deptname String, projectcode int, projectjoindate 
Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) 
STORED BY 'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='workgroupcategory','sort_columns'='workgroupcategory,
 empname')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable4_offheap_unsafe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= 
'\"')""").collect
+    sql(s"""select workgroupcategory, empname from 
sorttable4_offheap_unsafe""").collect
+
+    sql(s"""drop table if exists sorttable4_offheap_unsafe""").collect
+  }
+
+
+  //create table with multi-sort_columns and data loading with offheap and 
inmemory sort
+  test("Sortcolumn-001_TC006", Include) {
+    sql(s"""CREATE TABLE sorttable4_offheap_inmemory (empno int, empname 
String, designation String, doj Timestamp, workgroupcategory int, 
workgroupcategoryname String, deptno int, deptname String, projectcode int, 
projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization 
int,salary int) STORED BY 'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='workgroupcategory','sort_columns'='workgroupcategory,
 empname')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable4_offheap_inmemory OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= 
'\"')""").collect
+    sql(s"""select workgroupcategory, empname from 
sorttable4_offheap_inmemory""").collect
+
+    sql(s"""drop table if exists sorttable4_offheap_inmemory""").collect
+  }
+
+
+  //create table with multi-sort_columns and data loading with heap
+  test("Sortcolumn-001_TC007", Include) {
+    sql(s"""CREATE TABLE sorttable4_heap_safe (empno int, empname String, 
designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname 
String, deptno int, deptname String, projectcode int, projectjoindate 
Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) 
STORED BY 'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='workgroupcategory','sort_columns'='workgroupcategory,
 empname')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable4_heap_safe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= 
'\"')""").collect
+    sql(s"""select workgroupcategory, empname from 
sorttable4_heap_safe""").collect
+
+    sql(s"""drop table if exists sorttable4_heap_safe""").collect
+  }
+
+
+  //create table with multi-sort_columns and data loading with heap and unsafe 
sort
+  test("Sortcolumn-001_TC008", Include) {
+    sql(s"""CREATE TABLE sorttable4_heap_unsafe (empno int, empname String, 
designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname 
String, deptno int, deptname String, projectcode int, projectjoindate 
Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) 
STORED BY 'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='workgroupcategory','sort_columns'='workgroupcategory,
 empname')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable4_heap_unsafe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= 
'\"')""").collect
+    sql(s"""select workgroupcategory, empname from 
sorttable4_heap_unsafe""").collect
+
+    sql(s"""drop table if exists sorttable4_heap_unsafe""").collect
+  }
+
+
+  //create table with multi-sort_columns and data loading with heap and 
inmemory sort
+  test("Sortcolumn-001_TC009", Include) {
+    sql(s"""CREATE TABLE sorttable4_heap_inmemory (empno int, empname String, 
designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname 
String, deptno int, deptname String, projectcode int, projectjoindate 
Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) 
STORED BY 'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='workgroupcategory','sort_columns'='workgroupcategory,
 empname')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable4_heap_inmemory OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= 
'\"')""").collect
+    sql(s"""select workgroupcategory, empname from 
sorttable4_heap_inmemory""").collect
+
+    sql(s"""drop table if exists sorttable4_heap_inmemory""").collect
+  }
+
+
+  //create table with multi-sort_columns and data loading with heap and 
inmemory sort
+  test("Sortcolumn-001_TC010", Include) {
+    sql(s"""drop table if exists origintable2""").collect
+    sql(s"""drop table if exists sorttable5""").collect
+    sql(s"""CREATE TABLE origintable2 (empno int, empname String, designation 
String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, 
deptno int, deptname String, projectcode int, projectjoindate Timestamp, 
projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 
'org.apache.carbondata.format'""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE origintable2 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE origintable2 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE origintable2 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE origintable2 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""alter table origintable2 compact 'minor'""").collect
+    sql(s"""CREATE TABLE sorttable5 (empno int, empname String, designation 
String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, 
deptno int, deptname String, projectcode int, projectjoindate Timestamp, 
projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 
'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='empno','sort_columns'='empno')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable5 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable5 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable5 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable5 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""alter table sorttable5 compact 'minor'""").collect
+    sql(s"""select empno from sorttable5""").collect
+
+    sql(s"""drop table if exists sorttable5""").collect
+  }
+
+
+  //filter on sort_columns include no-dictionary
+  test("Sortcolumn-001_TC011", Include) {
+    sql(s"""drop table if exists sorttable6""").collect
+    sql(s"""CREATE TABLE sorttable6 (empno int, empname String, designation 
String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, 
deptno int, deptname String, projectcode int, projectjoindate Timestamp, 
projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 
'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='workgroupcategory','dictionary_include'='doj','sort_columns'='workgroupcategory,
 doj, empname')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable6 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from sorttable6 where workgroupcategory = 1""").collect
+
+    sql(s"""drop table if exists sorttable6""").collect
+  }
+
+
+  //filter on sort_columns include direct-dictionary
+  test("Sortcolumn-001_TC012", Include) {
+    sql(s"""CREATE TABLE sorttable6 (empno int, empname String, designation 
String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, 
deptno int, deptname String, projectcode int, projectjoindate Timestamp, 
projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 
'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='workgroupcategory','dictionary_include'='doj','sort_columns'='workgroupcategory,
 doj, empname')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable6 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from sorttable6 where doj = '2007-01-17 
00:00:00'""").collect
+
+    sql(s"""drop table if exists sorttable6""").collect
+  }
+
+
+  //filter on sort_columns include dictioanry
+  test("Sortcolumn-001_TC013", Include) {
+    sql(s"""drop table if exists sorttable6""").collect
+    sql(s"""CREATE TABLE sorttable6 (empno int, empname String, designation 
String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, 
deptno int, deptname String, projectcode int, projectjoindate Timestamp, 
projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 
'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='workgroupcategory','dictionary_include'='doj','sort_columns'='workgroupcategory,
 doj, empname')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable6 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from sorttable6 where empname = 'madhan'""").collect
+
+    sql(s"""drop table if exists sorttable6""").collect
+  }
+
+
+  //unsorted table creation, query data loading with heap and safe sort config
+  test("Sortcolumn-001_TC014", Include) {
+    sql(s"""CREATE TABLE unsortedtable_heap_safe (empno int, empname String, 
designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname 
String, deptno int, deptname String, projectcode int, projectjoindate 
Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) 
STORED BY 'org.apache.carbondata.format' 
tblproperties('sort_c+C17olumns'='')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE unsortedtable_heap_safe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= 
'\"')""").collect
+    sql(s"""select * from unsortedtable_heap_safe where empno = 11""").collect
+
+    sql(s"""drop table if exists unsortedtable_heap_safe""").collect
+  }
+
+
+  //unsorted table creation, query data loading with heap and safe sort config 
with order by
+  test("Sortcolumn-001_TC015", Include) {
+    sql(s"""drop table if exists unsortedtable_heap_safe""").collect
+    sql(s"""CREATE TABLE unsortedtable_heap_safe (empno int, empname String, 
designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname 
String, deptno int, deptname String, projectcode int, projectjoindate 
Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) 
STORED BY 'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='empno,deptno,workgroupcategory,projectcode,attendance,utilization,salary','dictionary_include'='doj','sort_columns'='')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE unsortedtable_heap_safe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= 
'\"')""").collect
+    sql(s"""select * from unsortedtable_heap_safe order by empno""").collect
+
+    sql(s"""drop table if exists unsortedtable_heap_safe""").collect
+  }
+
+
+  //unsorted table creation, query and data loading with heap and unsafe sort 
config
+  test("Sortcolumn-001_TC016", Include) {
+    sql(s"""CREATE TABLE unsortedtable_heap_unsafe (empno int, empname String, 
designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname 
String, deptno int, deptname String, projectcode int, projectjoindate 
Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) 
STORED BY 'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='empno,deptno,workgroupcategory,projectcode,attendance,utilization,salary','dictionary_include'='doj','sort_columns'='')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE unsortedtable_heap_unsafe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= 
'\"')""").collect
+    sql(s"""select * from unsortedtable_heap_unsafe where empno = 
11""").collect
+
+    sql(s"""drop table if exists unsortedtable_heap_unsafe""").collect
+  }
+
+
+  //unsorted table creation, query and data loading with heap and unsafe sort 
config with order by
+  test("Sortcolumn-001_TC017", Include) {
+    sql(s"""drop table if exists unsortedtable_heap_unsafe""").collect
+    sql(s"""CREATE TABLE unsortedtable_heap_unsafe (empno int, empname String, 
designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname 
String, deptno int, deptname String, projectcode int, projectjoindate 
Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) 
STORED BY 'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='empno,deptno,workgroupcategory,projectcode,attendance,utilization,salary','dictionary_include'='doj','sort_columns'='')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE unsortedtable_heap_unsafe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= 
'\"')""").collect
+    sql(s"""select * from unsortedtable_heap_unsafe order by empno""").collect
+
+    sql(s"""drop table if exists unsortedtable_heap_unsafe""").collect
+  }
+
+
+  //unsorted table creation, query and data loading with offheap and safe sort 
config
+  test("Sortcolumn-001_TC018", Include) {
+    sql(s"""CREATE TABLE unsortedtable_offheap_safe (empno int, empname 
String, designation String, doj Timestamp, workgroupcategory int, 
workgroupcategoryname String, deptno int, deptname String, projectcode int, 
projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization 
int,salary int) STORED BY 'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='empno,deptno,workgroupcategory,projectcode,attendance,utilization,salary','dictionary_include'='doj','sort_columns'='')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE unsortedtable_offheap_safe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= 
'\"')""").collect
+    sql(s"""select * from unsortedtable_offheap_safe where empno = 
11""").collect
+
+    sql(s"""drop table if exists unsortedtable_offheap_safe""").collect
+  }
+
+
+  //unsorted table creation, query and data loading with offheap and safe sort 
config with order by
+  test("Sortcolumn-001_TC019", Include) {
+    sql(s"""CREATE TABLE unsortedtable_offheap_safe (empno int, empname 
String, designation String, doj Timestamp, workgroupcategory int, 
workgroupcategoryname String, deptno int, deptname String, projectcode int, 
projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization 
int,salary int) STORED BY 'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='empno,deptno,workgroupcategory,projectcode,attendance,utilization,salary','dictionary_include'='doj','sort_columns'='')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE unsortedtable_offheap_safe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= 
'\"')""").collect
+    sql(s"""select * from unsortedtable_offheap_safe order by empno""").collect
+
+    sql(s"""drop table if exists unsortedtable_offheap_safe""").collect
+  }
+
+
+  //unsorted table creation, query and data loading with offheap and unsafe 
sort config
+  test("Sortcolumn-001_TC020", Include) {
+    sql(s"""CREATE TABLE unsortedtable_offheap_unsafe (empno int, empname 
String, designation String, doj Timestamp, workgroupcategory int, 
workgroupcategoryname String, deptno int, deptname String, projectcode int, 
projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization 
int,salary int) STORED BY 'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='empno,deptno,workgroupcategory,projectcode,attendance,utilization,salary','dictionary_include'='doj','sort_columns'='')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE unsortedtable_offheap_unsafe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= 
'\"')""").collect
+    sql(s"""select * from unsortedtable_offheap_unsafe where empno = 
11""").collect
+
+    sql(s"""drop table if exists unsortedtable_offheap_unsafe""").collect
+  }
+
+
+  //unsorted table creation, query and data loading with offheap and unsafe 
sort config with order by
+  test("Sortcolumn-001_TC021", Include) {
+    sql(s"""CREATE TABLE unsortedtable_offheap_unsafe (empno int, empname 
String, designation String, doj Timestamp, workgroupcategory int, 
workgroupcategoryname String, deptno int, deptname String, projectcode int, 
projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization 
int,salary int) STORED BY 'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='empno,deptno,workgroupcategory,projectcode,attendance,utilization,salary','dictionary_include'='doj','sort_columns'='')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE unsortedtable_offheap_unsafe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= 
'\"')""").collect
+    sql(s"""select * from unsortedtable_offheap_unsafe order by 
empno""").collect
+
+    sql(s"""drop table if exists unsortedtable_offheap_unsafe""").collect
+  }
+
+
+  //unsorted table creation, query and data loading with offheap and inmemory 
sort config
+  test("Sortcolumn-001_TC022", Include) {
+    sql(s"""CREATE TABLE unsortedtable_offheap_inmemory (empno int, empname 
String, designation String, doj Timestamp, workgroupcategory int, 
workgroupcategoryname String, deptno int, deptname String, projectcode int, 
projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization 
int,salary int) STORED BY 'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='empno,deptno,workgroupcategory,projectcode,attendance,utilization,salary','dictionary_include'='doj','sort_columns'='')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE unsortedtable_offheap_inmemory OPTIONS('DELIMITER'= ',', 
'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from unsortedtable_offheap_inmemory where empno = 
11""").collect
+
+    sql(s"""drop table if exists unsortedtable_offheap_inmemory""").collect
+  }
+
+
+  //unsorted table creation, query and data loading with offheap and inmemory 
sort config with order by
+  test("Sortcolumn-001_TC023", Include) {
+    sql(s"""CREATE TABLE unsortedtable_offheap_inmemory (empno int, empname 
String, designation String, doj Timestamp, workgroupcategory int, 
workgroupcategoryname String, deptno int, deptname String, projectcode int, 
projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization 
int,salary int) STORED BY 'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='empno,deptno,workgroupcategory,projectcode,attendance,utilization,salary','dictionary_include'='doj','sort_columns'='')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE unsortedtable_offheap_inmemory OPTIONS('DELIMITER'= ',', 
'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from unsortedtable_offheap_inmemory order by 
empno""").collect
+
+    sql(s"""drop table if exists unsortedtable_offheap_inmemory""").collect
+  }
+
+
+  //create table with dictioanry_exclude sort_columns
+  test("Sortcolumn-001_TC024", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""CREATE TABLE sorttable (empno int, empname String, designation 
String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, 
deptno int, deptname String, projectcode int, projectjoindate Timestamp, 
projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 
'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='empno,deptno,workgroupcategory,projectcode,attendance,utilization,salary','dictionary_include'='doj','dictionary_exclude'='empname','sort_columns'='empname')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+    sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //create table with dictionary_include,  sort_columns
+  test("Sortcolumn-001_TC025", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""CREATE TABLE sorttable (empno int, empname String, designation 
String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, 
deptno int, deptname String, projectcode int, projectjoindate Timestamp, 
projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 
'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='doj','sort_columns'='doj')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+    sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //create table with dictionary_include, dictioanry_exclude sort_columns
+  test("Sortcolumn-001_TC026", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""CREATE TABLE sorttable (empno int, empname String, designation 
String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, 
deptno int, deptname String, projectcode int, projectjoindate Timestamp, 
projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 
'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='empname,doj','sort_columns'='doj')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+    sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //create table with alter table and sort_columns with dimension
+  test("Sortcolumn-001_TC027", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""CREATE TABLE sorttable (empno int, empname String, designation 
String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, 
deptno int, deptname String, projectcode int, projectjoindate Timestamp, 
projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 
'org.apache.carbondata.format' 
tblproperties('dictionary_include'='doj','sort_columns'='doj')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""alter table sorttable add columns(newField String) 
tblproperties('dictionary_include'='newField')""").collect
+    sql(s"""LOAD DATA local inpath 
'$resourcesPath/Data/sortcolumns/dataString.csv' INTO TABLE sorttable 
OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+    sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //create table with alter table and sort_columns with measure
+  test("Sortcolumn-001_TC028", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""CREATE TABLE sorttable (empno int, empname String, designation 
String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, 
deptno int, deptname String, projectcode int, projectjoindate Timestamp, 
projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 
'org.apache.carbondata.format' 
tblproperties('dictionary_include'='doj','sort_columns'='doj')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""alter table sorttable add columns(newField Int) 
tblproperties('dictionary_include'='newField')""").collect
+    sql(s"""LOAD DATA local inpath 
'$resourcesPath/Data/sortcolumns/dataInt.csv' INTO TABLE sorttable 
OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+    sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //create table with dictionary_include ,no_inverted_index and sort_columns
+  test("Sortcolumn-001_TC030", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""CREATE TABLE sorttable (empno int, empname String, designation 
String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, 
deptno int, deptname String, projectcode int, projectjoindate Timestamp, 
projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 
'org.apache.carbondata.format' 
tblproperties('dictionary_include'='doj','sort_columns'='doj','no_inverted_index'='doj')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+    sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //create table with dictionary_include ,no_inverted_index and sort_columns 
with measure
+  test("Sortcolumn-001_TC031", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""CREATE TABLE sorttable (empno int, empname String, designation 
String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, 
deptno int, deptname String, projectcode int, projectjoindate Timestamp, 
projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 
'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='empno','sort_columns'='empno','no_inverted_index'='empno')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+    sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //test sort_column for different order of column name
+  test("Sortcolumn-001_TC032", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""CREATE TABLE sorttable (empno int, empname String, designation 
String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, 
deptno int, deptname String, projectcode int, projectjoindate Timestamp, 
projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 
'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='empno,workgroupcategory','dictionary_include'='doj','sort_columns'='empname,empno,workgroupcategory,doj')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+    sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //default behavior if sort_column not provided
+  test("Sortcolumn-001_TC033", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""CREATE TABLE sorttable (empno int, empname String, designation 
String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, 
deptno int, deptname String, projectcode int, projectjoindate Timestamp, 
projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 
'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='empno')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+    sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //test sort_column for alter table
+  test("Sortcolumn-001_TC035", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""CREATE TABLE sorttable (empno int, empname String, designation 
String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, 
deptno int, deptname String, projectcode int, projectjoindate Timestamp, 
projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 
'org.apache.carbondata.format' 
tblproperties('dictionary_include'='doj','sort_columns'='doj')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""alter table sorttable drop columns(doj)""").collect
+    sql(s"""LOAD DATA local inpath 
'$resourcesPath/Data/sortcolumns/dataDrop.csv' INTO TABLE sorttable 
OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from sorttable""").collect
+
+    sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //test sort_column for float data_type with alter query
+  test("Sortcolumn-001_TC037", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""drop table if exists sorttable1""").collect
+    sql(s"""CREATE TABLE sorttable1 (empno int, empname String, designation 
String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, 
deptno int, deptname String, projectcode int, projectjoindate Timestamp, 
projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 
'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='empno','sort_columns'='empno')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""alter table sorttable1 add columns(newField Float) 
tblproperties('DICTIONARY_INCLUDE'='newField')""").collect
+    sql(s"""LOAD DATA local inpath 
'$resourcesPath/Data/sortcolumns/dataFloat.csv' INTO TABLE sorttable1 
OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from sorttable1""").collect
+
+    sql(s"""drop table if exists sorttable1""").collect
+  }
+
+
+  //test sort_column for decimal data_type with alter query
+  test("Sortcolumn-001_TC038", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""drop table if exists sorttable1""").collect
+    sql(s"""CREATE TABLE sorttable1 (empno int, empname String, designation 
String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, 
deptno int, deptname String, projectcode int, projectjoindate Timestamp, 
projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 
'org.apache.carbondata.format' 
tblproperties('dictionary_exclude'='empno','sort_columns'='empno')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' 
INTO TABLE sorttable1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""alter table sorttable1 add columns(newField decimal) 
tblproperties('dictionary_include'='newField')""").collect
+    sql(s"""LOAD DATA local inpath 
'$resourcesPath/Data/sortcolumns/dataDecimal.csv' INTO TABLE sorttable1 
OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from sorttable1""").collect
+
+    sql(s"""drop table if exists sorttable1""").collect
+  }
+
+
+  val prop = CarbonProperties.getInstance()
+  val p1 = prop.getProperty("carbon.load.sort.scope", 
CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT)
+  val p2 = prop.getProperty("enable.unsafe.sort", 
CarbonCommonConstants.ENABLE_UNSAFE_SORT_DEFAULT)
+  val p3 = prop.getProperty("enable.offheap.sort", 
CarbonCommonConstants.ENABLE_OFFHEAP_SORT_DEFAULT)
+
+  override protected def beforeAll() {
+    // Adding new properties
+    prop.addProperty("carbon.load.sort.scope", "batch_sort")
+    prop.addProperty("enable.unsafe.sort", "true")
+    prop.addProperty("enable.offheap.sort", "true")
+  }
+
+  override def afterAll: Unit = {
+    //Reverting to old
+    prop.addProperty("carbon.load.sort.scope", p1)
+    prop.addProperty("enable.unsafe.sort", p2)
+    prop.addProperty("enable.offheap.sort", p3)
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common-test/src/test/resources/data_beyond68yrs.csv
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/resources/data_beyond68yrs.csv 
b/integration/spark-common-test/src/test/resources/data_beyond68yrs.csv
new file mode 100644
index 0000000..2188be6
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/data_beyond68yrs.csv
@@ -0,0 +1,11 @@
+empno,empname,designation,doj,workgroupcategory,workgroupcategoryname,deptno,deptname,projectcode,projectjoindate,projectenddate,attendance,utilization,salary
+11,arvind,SE,17-01-2007,1,developer,10,network,928478,17-02-1800,29-11-1900,96,96,5040
+12,krithin,SSE,29-05-2008,1,developer,11,protocol,928378,29-06-1802,30-12-1902,85,95,7124
+13,madhan,TPL,7/7/2009,2,tester,10,network,928478,7/8/2009,30-12-2016,88,99,9054
+14,anandh,SA,29-12-2010,3,manager,11,protocol,928278,29-01-2000,29-06-2016,77,92,11248
+15,ayushi,SSA,9/7/2011,1,developer,12,security,928375,9/12/2011,29-05-2016,99,91,13245
+16,pramod,SE,14-10-2012,1,developer,13,configManagement,928478,14-11-2038,29-12-2041,86,93,5040
+17,gawrav,PL,22-09-2013,2,tester,12,security,928778,22-10-3000,15-11-3002,78,97,9574
+18,sibi,TL,15-08-2014,2,tester,14,Learning,928176,15-09-2014,29-05-2016,84,98,7245
+19,shivani,PL,12/5/2015,1,developer,10,network,928977,12/6/2015,12/11/2016,88,91,11254
+20,bill,PM,1/12/2015,3,manager,14,Learning,928479,1/1/2016,30-11-2016,75,94,13547

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datetype/DateTypeTest.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datetype/DateTypeTest.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datetype/DateTypeTest.scala
index 4f29a28..a71cee1 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datetype/DateTypeTest.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datetype/DateTypeTest.scala
@@ -55,34 +55,29 @@ class DateTypeTest extends QueryTest {
     }
   }
   test("must throw exception for timestamp data type in dictionary_exclude") {
-    try {
-      sql(
-        "create table if not exists Carbon_automation_testtimestamp (imei 
string,doj timestamp," +
-        "deviceInformationId int,MAC string,deviceColor 
string,device_backColor string,modelId " +
-        "string,marketName string,AMSize string,ROMSize string,CUPAudit 
string,CPIClocked string," +
-        "series string,productionDate timestamp,bomCode string,internalModels 
string, " +
-        "deliveryTime string, channelsId string, channelsName string , 
deliveryAreaId string, " +
-        "deliveryCountry string, deliveryProvince string, deliveryCity 
string,deliveryDistrict " +
-        "string, deliveryStreet string, oxSingleNumber string, ActiveCheckTime 
string, " +
-        "ActiveAreaId string, ActiveCountry string, ActiveProvince string, 
Activecity string, " +
-        "ActiveDistrict string, ActiveStreet string, ActiveOperatorId string, 
Active_releaseId " +
-        "string, Active_EMUIVersion string, Active_operaSysVersion string, 
Active_BacVerNumber " +
-        "string, Active_BacFlashVer string, Active_webUIVersion string, 
Active_webUITypeCarrVer " +
-        "string,Active_webTypeDataVerNumber string, Active_operatorsVersion 
string, " +
-        "Active_phonePADPartitionedVersions string, Latest_YEAR int, 
Latest_MONTH int, Latest_DAY" +
-        " int, Latest_HOUR string, Latest_areaId string, Latest_country 
string, Latest_province " +
-        "string, Latest_city string, Latest_district string, Latest_street 
string, " +
-        "Latest_releaseId string, Latest_EMUIVersion string, 
Latest_operaSysVersion string, " +
-        "Latest_BacVerNumber string, Latest_BacFlashVer string, 
Latest_webUIVersion string, " +
-        "Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, " 
+
-        "Latest_operatorsVersion string, Latest_phonePADPartitionedVersions 
string, " +
-        "Latest_operatorId string, gamePointDescription string, gamePointId 
int,contractNumber " +
-        "int) STORED BY 'org.apache.carbondata.format' 
TBLPROPERTIES('DICTIONARY_EXCLUDE'='doj')")
+    sql(
+      "create table if not exists Carbon_automation_testtimestamp (imei 
string,doj timestamp," +
+      "deviceInformationId int,MAC string,deviceColor string,device_backColor 
string,modelId " +
+      "string,marketName string,AMSize string,ROMSize string,CUPAudit 
string,CPIClocked string," +
+      "series string,productionDate timestamp,bomCode string,internalModels 
string, " +
+      "deliveryTime string, channelsId string, channelsName string , 
deliveryAreaId string, " +
+      "deliveryCountry string, deliveryProvince string, deliveryCity 
string,deliveryDistrict " +
+      "string, deliveryStreet string, oxSingleNumber string, ActiveCheckTime 
string, " +
+      "ActiveAreaId string, ActiveCountry string, ActiveProvince string, 
Activecity string, " +
+      "ActiveDistrict string, ActiveStreet string, ActiveOperatorId string, 
Active_releaseId " +
+      "string, Active_EMUIVersion string, Active_operaSysVersion string, 
Active_BacVerNumber " +
+      "string, Active_BacFlashVer string, Active_webUIVersion string, 
Active_webUITypeCarrVer " +
+      "string,Active_webTypeDataVerNumber string, Active_operatorsVersion 
string, " +
+      "Active_phonePADPartitionedVersions string, Latest_YEAR int, 
Latest_MONTH int, Latest_DAY" +
+      " int, Latest_HOUR string, Latest_areaId string, Latest_country string, 
Latest_province " +
+      "string, Latest_city string, Latest_district string, Latest_street 
string, " +
+      "Latest_releaseId string, Latest_EMUIVersion string, 
Latest_operaSysVersion string, " +
+      "Latest_BacVerNumber string, Latest_BacFlashVer string, 
Latest_webUIVersion string, " +
+      "Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, " +
+      "Latest_operatorsVersion string, Latest_phonePADPartitionedVersions 
string, " +
+      "Latest_operatorId string, gamePointDescription string, gamePointId 
int,contractNumber " +
+      "int) STORED BY 'org.apache.carbondata.format' 
TBLPROPERTIES('DICTIONARY_EXCLUDE'='doj')")
 
-      assert(false)
-    }
-    catch {
-      case exception: MalformedCarbonCommandException => assert(true)
-    }
+    assert(true)
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/RangeFilterAllDataTypesTestCases.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/RangeFilterAllDataTypesTestCases.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/RangeFilterAllDataTypesTestCases.scala
index 157ae6e..ac9325d 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/RangeFilterAllDataTypesTestCases.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/RangeFilterAllDataTypesTestCases.scala
@@ -656,6 +656,7 @@ class RangeFilterMyTests extends QueryTest with 
BeforeAndAfterAll {
     sql("drop table if exists NO_DICTIONARY_CARBON_1")
     sql("drop table if exists NO_DICTIONARY_CARBON_2")
     sql("drop table if exists NO_DICTIONARY_HIVE_6")
+    sql("drop table if exists directdictionarytable")
     sql("drop table if exists dictionary_hive_6")
     sql("drop table if exists NO_DICTIONARY_HIVE_7")
     sql("drop table if exists NO_DICTIONARY_CARBON_6")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampNoDictionaryColumnTestCase.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampNoDictionaryColumnTestCase.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampNoDictionaryColumnTestCase.scala
new file mode 100644
index 0000000..e8a465a
--- /dev/null
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampNoDictionaryColumnTestCase.scala
@@ -0,0 +1,93 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.spark.testsuite.directdictionary
+
+import java.sql.Timestamp
+
+import org.apache.spark.sql.Row
+import org.apache.spark.sql.hive.HiveContext
+import org.scalatest.BeforeAndAfterAll
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import 
org.apache.carbondata.core.keygenerator.directdictionary.timestamp.TimeStampGranularityConstants
+import org.apache.carbondata.core.util.CarbonProperties
+import org.apache.spark.sql.test.util.QueryTest
+
+/**
+ * Test Class for detailed query on timestamp datatypes
+ */
+class TimestampNoDictionaryColumnTestCase extends QueryTest with 
BeforeAndAfterAll {
+
+  override def beforeAll {
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,"dd-MM-yyyy")
+
+    try {
+      sql("drop table if exists timestamp_nodictionary")
+      sql(
+        """
+         CREATE TABLE IF NOT EXISTS timestamp_nodictionary
+        (empno int, empname String, designation String, doj Timestamp, 
workgroupcategory int, workgroupcategoryname String,
+         projectcode int, projectjoindate Timestamp, projectenddate Timestamp, 
attendance int,
+         utilization int, salary Int) STORED BY 
'org.apache.carbondata.format'"""
+      )
+
+      val csvFilePath = s"$resourcesPath/data_beyond68yrs.csv"
+      sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE 
timestamp_nodictionary OPTIONS"
+          + "('DELIMITER'= ',', 'QUOTECHAR'= '\"')");
+    } catch {
+      case x: Throwable => CarbonProperties.getInstance()
+        .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+          CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
+    }
+  }
+
+  test("select projectjoindate, projectenddate from timestamp_nodictionary") {
+    checkAnswer(
+      sql("select projectjoindate, projectenddate from 
timestamp_nodictionary"),
+      Seq(Row(Timestamp.valueOf("2000-01-29 00:00:00.0"), 
Timestamp.valueOf("2016-06-29 00:00:00.0")),
+        Row(Timestamp.valueOf("1800-02-17 00:00:00.0"), 
Timestamp.valueOf("1900-11-29 00:00:00.0")),
+        Row(null, Timestamp.valueOf("2016-05-29 00:00:00.0")),
+        Row(null, Timestamp.valueOf("2016-11-30 00:00:00.0")),
+        Row(Timestamp.valueOf("3000-10-22 00:00:00.0"), 
Timestamp.valueOf("3002-11-15 00:00:00.0")),
+        Row(Timestamp.valueOf("1802-06-29 00:00:00.0"), 
Timestamp.valueOf("1902-12-30 00:00:00.0")),
+        Row(null, Timestamp.valueOf("2016-12-30 00:00:00.0")),
+        Row(Timestamp.valueOf("2038-11-14 00:00:00.0"), 
Timestamp.valueOf("2041-12-29 00:00:00.0")),
+        Row(null, null),
+        Row(Timestamp.valueOf("2014-09-15 00:00:00.0"), 
Timestamp.valueOf("2016-05-29 00:00:00.0"))
+      )
+    )
+  }
+
+
+  test("select projectjoindate, projectenddate from timestamp_nodictionary 
where in filter") {
+    checkAnswer(
+      sql("select projectjoindate, projectenddate from timestamp_nodictionary 
where projectjoindate in" +
+          "('1800-02-17 00:00:00','3000-10-22 00:00:00') or projectenddate in 
('1900-11-29 00:00:00'," +
+          "'3002-11-15 00:00:00','2041-12-29 00:00:00')"),
+      Seq(Row(Timestamp.valueOf("1800-02-17 00:00:00.0"), 
Timestamp.valueOf("1900-11-29 00:00:00.0")),
+        Row(Timestamp.valueOf("3000-10-22 00:00:00.0"), 
Timestamp.valueOf("3002-11-15 00:00:00.0")),
+        Row(Timestamp.valueOf("2038-11-14 00:00:00.0"), 
Timestamp.valueOf("2041-12-29 00:00:00.0")))
+    )
+
+  }
+
+
+  override def afterAll {
+    sql("drop table timestamp_nodictionary")
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTable.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTable.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTable.scala
index 8c79398..c0dba74 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTable.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTable.scala
@@ -69,7 +69,7 @@ class TestDDLForPartitionTable  extends QueryTest with 
BeforeAndAfterAll {
         | PARTITIONED BY (doj Timestamp)
         | STORED BY 'org.apache.carbondata.format'
         | TBLPROPERTIES('PARTITION_TYPE'='RANGE',
-        |  'RANGE_INFO'='2017-06-11 00:00:02, 2017-06-13 23:59:59')
+        |  'RANGE_INFO'='2017-06-11 00:00:02, 2017-06-13 23:59:59', 
'DICTIONARY_INCLUDE'='doj')
       """.stripMargin)
 
     val carbonTable = 
CarbonMetadata.getInstance().getCarbonTable("default_rangeTable")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTableWithDefaultProperties.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTableWithDefaultProperties.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTableWithDefaultProperties.scala
index 7359b53..1d660e8 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTableWithDefaultProperties.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTableWithDefaultProperties.scala
@@ -64,7 +64,7 @@ class TestDDLForPartitionTableWithDefaultProperties  extends 
QueryTest with Befo
         | PARTITIONED BY (doj Timestamp)
         | STORED BY 'org.apache.carbondata.format'
         | TBLPROPERTIES('PARTITION_TYPE'='RANGE',
-        |  'RANGE_INFO'='2017-06-11 00:00:02, 2017-06-13 23:59:59')
+        |  'RANGE_INFO'='2017-06-11 00:00:02, 2017-06-13 
23:59:59','DICTIONARY_INCLUDE'='doj')
       """.stripMargin)
 
     val carbonTable = 
CarbonMetadata.getInstance().getCarbonTable("default_rangeTable")
@@ -92,7 +92,8 @@ class TestDDLForPartitionTableWithDefaultProperties  extends 
QueryTest with Befo
         | PARTITIONED BY (projectenddate Timestamp)
         | STORED BY 'org.apache.carbondata.format'
         | TBLPROPERTIES('PARTITION_TYPE'='LIST',
-        |  'LIST_INFO'='2017-06-11 00:00:02, 2017-06-13 23:59:59')
+        |  'LIST_INFO'='2017-06-11 00:00:02, 2017-06-13 23:59:59',
+        |  'DICTIONARY_INCLUDE'='projectenddate')
       """.stripMargin)
     val carbonTable = 
CarbonMetadata.getInstance().getCarbonTable("default_listTable")
     val partitionInfo = 
carbonTable.getPartitionInfo(carbonTable.getFactTableName)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala
index 9e4f3b7..6347241 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala
@@ -31,6 +31,42 @@ class TestSortColumns extends QueryTest with 
BeforeAndAfterAll {
     sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE 
origintable1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""")
   }
 
+  test("create table sort columns dictionary include - int") {
+    sql(
+      "CREATE TABLE sortint (empno int, empname String, designation String, 
doj Timestamp, " +
+      "workgroupcategory int, workgroupcategoryname String, deptno int, 
deptname String, " +
+      "projectcode int, projectjoindate Timestamp, projectenddate 
Timestamp,attendance int," +
+      "utilization int,salary int) STORED BY 'org.apache.carbondata.format' 
TBLPROPERTIES " +
+      "('dictionary_include' = 'empno', 'sort_columns'='empno')")
+  }
+
+  test("create table sort columns dictionary exclude - int") {
+    sql(
+      "CREATE TABLE sortint1 (empno int, empname String, designation String, 
doj Timestamp, " +
+      "workgroupcategory int, workgroupcategoryname String, deptno int, 
deptname String, " +
+      "projectcode int, projectjoindate Timestamp, projectenddate 
Timestamp,attendance int," +
+      "utilization int,salary int) STORED BY 'org.apache.carbondata.format' 
TBLPROPERTIES " +
+      "('dictionary_exclude' = 'empno', 'sort_columns'='empno')")
+  }
+
+  test("create table sort columns dictionary include - bigint") {
+    sql(
+      "CREATE TABLE sortbigint (empno bigint, empname String, designation 
String, doj Timestamp, " +
+      "workgroupcategory int, workgroupcategoryname String, deptno int, 
deptname String, " +
+      "projectcode int, projectjoindate Timestamp, projectenddate 
Timestamp,attendance int," +
+      "utilization int,salary int) STORED BY 'org.apache.carbondata.format' 
TBLPROPERTIES " +
+      "('dictionary_include' = 'empno', 'sort_columns'='empno')")
+  }
+
+  test("create table sort columns dictionary exclude - bigint") {
+    sql(
+      "CREATE TABLE sortbigint1 (empno bigint, empname String, designation 
String, doj Timestamp, " +
+      "workgroupcategory int, workgroupcategoryname String, deptno int, 
deptname String, " +
+      "projectcode int, projectjoindate Timestamp, projectenddate 
Timestamp,attendance int," +
+      "utilization int,salary int) STORED BY 'org.apache.carbondata.format' 
TBLPROPERTIES " +
+      "('dictionary_exclude' = 'empno', 'sort_columns'='empno')")
+  }
+
   test("create table with no dictionary sort_columns") {
     sql("CREATE TABLE sorttable1 (empno int, empname String, designation 
String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, 
deptno int, deptname String, projectcode int, projectjoindate Timestamp, 
projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 
'org.apache.carbondata.format' tblproperties('sort_columns'='empno')")
     sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE 
sorttable1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""")
@@ -310,11 +346,18 @@ class TestSortColumns extends QueryTest with 
BeforeAndAfterAll {
   }
 
   def dropTable = {
+    sql("drop table if exists sortint")
+    sql("drop table if exists sortint1")
+    sql("drop table if exists sortlong")
+    sql("drop table if exists sortlong1")
+    sql("drop table if exists sortbigint")
+    sql("drop table if exists sortbigint1")
     sql("drop table if exists origintable1")
     sql("drop table if exists origintable2")
     sql("drop table if exists sorttable1")
     sql("drop table if exists sorttableDesc")
     sql("drop table if exists sorttable1a")
+    sql("drop table if exists sorttable1b")
     sql("drop table if exists sorttable2")
     sql("drop table if exists sorttable3")
     sql("drop table if exists sorttable4_offheap_safe")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
 
b/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
index d0309ba..acdec91 100644
--- 
a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
+++ 
b/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
@@ -619,6 +619,10 @@ abstract class CarbonDDLSqlParser extends 
AbstractCarbonSparkSQLParser {
     fields.foreach { field =>
       if (dictIncludeCols.exists(x => x.equalsIgnoreCase(field.column))) {
         dimFields += field
+      } else if (DataTypeUtil.getDataType(field.dataType.get.toUpperCase) == 
DataType.TIMESTAMP &&
+                 !dictIncludeCols.exists(x => 
x.equalsIgnoreCase(field.column))) {
+        noDictionaryDims :+= field.column
+        dimFields += field
       } else if (isDetectAsDimentionDatatype(field.dataType.get)) {
         dimFields += field
         // consider all String cols as noDicitonaryDims by default
@@ -626,8 +630,7 @@ abstract class CarbonDDLSqlParser extends 
AbstractCarbonSparkSQLParser {
           noDictionaryDims :+= field.column
         }
       } else if (sortKeyDimsTmp.exists(x => x.equalsIgnoreCase(field.column)) 
&&
-                 (dictExcludeCols.exists(x => 
x.equalsIgnoreCase(field.column)) ||
-                  isDefaultMeasure(field.dataType)) &&
+                 isDefaultMeasure(field.dataType) &&
                  (!field.dataType.get.equalsIgnoreCase("STRING"))) {
         throw new MalformedCarbonCommandException(s"Illegal argument in 
sort_column.Check if you " +
                                                   s"have included UNSUPPORTED 
DataType column{${
@@ -710,7 +713,7 @@ abstract class CarbonDDLSqlParser extends 
AbstractCarbonSparkSQLParser {
    * detects whether datatype is part of dictionary_exclude
    */
   def isDataTypeSupportedForDictionary_Exclude(columnDataType: String): 
Boolean = {
-    val dataTypes = Array("string")
+    val dataTypes = Array("string", "timestamp", "int", "long", "bigint")
     dataTypes.exists(x => x.equalsIgnoreCase(columnDataType))
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
 
b/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
index f5d69ef..fc20108 100644
--- 
a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
+++ 
b/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
@@ -299,7 +299,10 @@ class AlterTableColumnSchemaGenerator(
     if (alterTableModel.highCardinalityDims.contains(colName)) {
       encoders.remove(Encoding.DICTIONARY)
     }
-    if (dataType == DataType.TIMESTAMP || dataType == DataType.DATE) {
+    if (dataType == DataType.DATE) {
+      encoders.add(Encoding.DIRECT_DICTIONARY)
+    }
+    if (dataType == DataType.TIMESTAMP && 
!alterTableModel.highCardinalityDims.contains(colName)) {
       encoders.add(Encoding.DIRECT_DICTIONARY)
     }
     val colPropMap = new java.util.HashMap[String, String]()
@@ -364,7 +367,10 @@ class TableNewProcessor(cm: TableModel) {
     if (highCardinalityDims.contains(colName)) {
       encoders.remove(Encoding.DICTIONARY)
     }
-    if (dataType == DataType.TIMESTAMP || dataType == DataType.DATE) {
+    if (dataType == DataType.DATE) {
+      encoders.add(Encoding.DIRECT_DICTIONARY)
+    }
+    if (dataType == DataType.TIMESTAMP && 
!highCardinalityDims.contains(colName)) {
       encoders.add(Encoding.DIRECT_DICTIONARY)
     }
     columnSchema.setEncodingList(encoders)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
 
b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
index 130f305..01cd113 100644
--- 
a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
+++ 
b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
@@ -877,8 +877,7 @@ private[sql] case class DescribeCommandFormatted(
             .append(mapper.writeValueAsString(dimension.getColumnProperties))
             .append(",")
         }
-        if (dimension.hasEncoding(Encoding.DICTIONARY) &&
-            !dimension.hasEncoding(Encoding.DIRECT_DICTIONARY)) {
+        if (dimension.hasEncoding(Encoding.DICTIONARY)) {
           "DICTIONARY, KEY COLUMN" + 
(dimension.hasEncoding(Encoding.INVERTED_INDEX) match {
                       case false => ",NOINVERTEDINDEX"
                       case _ => ""

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createtable/TestCreateTableSyntax.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createtable/TestCreateTableSyntax.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createtable/TestCreateTableSyntax.scala
index 62713fa..7400839 100644
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createtable/TestCreateTableSyntax.scala
+++ 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createtable/TestCreateTableSyntax.scala
@@ -92,19 +92,12 @@ class TestCreateTableSyntax extends QueryTest with 
BeforeAndAfterAll {
       }
     }
   }
-    test("test carbon table create with int datatype as dictionary exclude") {
-    try {
-      sql("drop table if exists carbontable")
-      sql("create table carbontable(id int, name string, dept string, mobile 
array<string>, "+
+  test("test carbon table create with int datatype as dictionary exclude") {
+    sql("drop table if exists carbontable")
+    sql("create table carbontable(id int, name string, dept string, mobile 
array<string>, " +
         "country string, salary double) STORED BY 
'org.apache.carbondata.format' " +
         "TBLPROPERTIES('DICTIONARY_EXCLUDE'='id')")
-      assert(false)
-    } catch {
-      case e : MalformedCarbonCommandException => {
-        assert(e.getMessage.equals("DICTIONARY_EXCLUDE is unsupported for int 
" +
-          "data type column: id"))
-      }
-    }
+    assert(true)
   }
 
   test("test carbon table create with decimal datatype as dictionary exclude") 
{

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestAlterPartitionTable.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestAlterPartitionTable.scala
 
b/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestAlterPartitionTable.scala
index 3646fad..29daac9 100644
--- 
a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestAlterPartitionTable.scala
+++ 
b/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestAlterPartitionTable.scala
@@ -109,7 +109,7 @@ class TestAlterPartitionTable extends QueryTest with 
BeforeAndAfterAll {
         | PARTITIONED BY (logdate Timestamp)
         | STORED BY 'carbondata'
         | TBLPROPERTIES('PARTITION_TYPE'='RANGE',
-        | 'RANGE_INFO'='2014/01/01, 2015/01/01, 2016/01/01')
+        | 'RANGE_INFO'='2014/01/01, 2015/01/01, 2016/01/01', 
'DICTIONARY_INCLUDE'='logdate')
       """.stripMargin)
 
     /**
@@ -183,7 +183,8 @@ class TestAlterPartitionTable extends QueryTest with 
BeforeAndAfterAll {
         | PARTITIONED BY (logdate Timestamp)
         | STORED BY 'carbondata'
         | TBLPROPERTIES('PARTITION_TYPE'='RANGE',
-        | 'RANGE_INFO'='2014/01/01, 2015/01/01, 2016/01/01, 2018/01/01')
+        | 'RANGE_INFO'='2014/01/01, 2015/01/01, 2016/01/01, 2018/01/01',
+        | 'DICTIONARY_INCLUDE'='logdate')
       """.stripMargin)
 
     /**
@@ -223,6 +224,7 @@ class TestAlterPartitionTable extends QueryTest with 
BeforeAndAfterAll {
         | STORED BY 'carbondata'
         | TBLPROPERTIES('PARTITION_TYPE'='RANGE',
         | 'RANGE_INFO'='2014/01/01, 2015/01/01, 2016/01/01, 2018/01/01',
+        | 'DICTIONARY_INCLUDE'='logdate',
         | 'BUCKETNUMBER'='3',
         | 'BUCKETCOLUMNS'='country')
       """.stripMargin)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
 
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
index 23270e3..3dab247 100644
--- 
a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
+++ 
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
@@ -19,6 +19,7 @@ package org.apache.spark.carbondata.restructure
 
 import java.io.File
 import java.math.{BigDecimal, RoundingMode}
+import java.sql.Timestamp
 
 import org.apache.spark.sql.Row
 import org.apache.spark.sql.common.util.Spark2QueryTest
@@ -35,6 +36,7 @@ class AlterTableValidationTestCase extends Spark2QueryTest 
with BeforeAndAfterAl
         new File("./target/test/badRecords").getCanonicalPath)
 
     sql("drop table if exists restructure")
+    sql("drop table if exists table1")
     sql("drop table if exists restructure_test")
     sql("drop table if exists restructure_new")
     sql("drop table if exists restructure_bad")
@@ -83,7 +85,7 @@ class AlterTableValidationTestCase extends Spark2QueryTest 
with BeforeAndAfterAl
       "('DICTIONARY_EXCLUDE'='nodict', 'DEFAULT.VALUE.NoDict'= 'abcd')")
     checkAnswer(sql("select distinct(nodict) from restructure"), Row("abcd"))
   }
-  test("test add timestamp direct dictionary column") {
+  test("test add timestamp no dictionary column") {
     sql(
       "alter table restructure add columns(tmpstmp timestamp) TBLPROPERTIES 
('DEFAULT.VALUE" +
       ".tmpstmp'= '17-01-2007')")
@@ -91,6 +93,27 @@ class AlterTableValidationTestCase extends Spark2QueryTest 
with BeforeAndAfterAl
       Row(new java.sql.Timestamp(107, 0, 17, 0, 0, 0, 0)))
     checkExistence(sql("desc restructure"), true, "tmpstmptimestamp")
   }
+
+  test("test add timestamp direct dictionary column") {
+    sql(
+      "alter table restructure add columns(tmpstmp1 timestamp) TBLPROPERTIES 
('DEFAULT.VALUE" +
+      ".tmpstmp1'= '17-01-3007','DICTIONARY_INCLUDE'='tmpstmp1')")
+    checkAnswer(sql("select distinct(tmpstmp1) from restructure"),
+      Row(null))
+    checkExistence(sql("desc restructure"), true, "tmpstmptimestamp")
+  }
+
+  test("test add timestamp column and load as dictionary") {
+    sql("create table table1(name string) stored by 'carbondata'")
+    sql("insert into table1 select 'abc'")
+    sql("alter table table1 add columns(tmpstmp timestamp) TBLPROPERTIES " +
+        "('DEFAULT.VALUE.tmpstmp'='17-01-3007','DICTIONARY_INCLUDE'= 
'tmpstmp')")
+    sql("insert into table1 select 'name','17-01-2007'")
+    checkAnswer(sql("select * from table1"),
+      Seq(Row("abc",null),
+        Row("name",Timestamp.valueOf("2007-01-17 00:00:00.0"))))
+  }
+
   test("test add msr column") {
     sql(
       "alter table restructure add columns(msrField decimal(5,2))TBLPROPERTIES 
('DEFAULT.VALUE" +
@@ -441,6 +464,7 @@ class AlterTableValidationTestCase extends Spark2QueryTest 
with BeforeAndAfterAl
   }
   override def afterAll {
     sql("DROP TABLE IF EXISTS restructure")
+    sql("drop table if exists table1")
     sql("DROP TABLE IF EXISTS restructure_new")
     sql("DROP TABLE IF EXISTS restructure_test")
     sql("DROP TABLE IF EXISTS restructure_bad")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/processing/src/main/java/org/apache/carbondata/processing/newflow/converter/impl/NonDictionaryFieldConverterImpl.java
----------------------------------------------------------------------
diff --git 
a/processing/src/main/java/org/apache/carbondata/processing/newflow/converter/impl/NonDictionaryFieldConverterImpl.java
 
b/processing/src/main/java/org/apache/carbondata/processing/newflow/converter/impl/NonDictionaryFieldConverterImpl.java
index 533fc87..4861d78 100644
--- 
a/processing/src/main/java/org/apache/carbondata/processing/newflow/converter/impl/NonDictionaryFieldConverterImpl.java
+++ 
b/processing/src/main/java/org/apache/carbondata/processing/newflow/converter/impl/NonDictionaryFieldConverterImpl.java
@@ -38,8 +38,11 @@ public class NonDictionaryFieldConverterImpl implements 
FieldConverter {
 
   private boolean isEmptyBadRecord;
 
+  private DataField dataField;
+
   public NonDictionaryFieldConverterImpl(DataField dataField, String 
nullformat, int index,
       boolean isEmptyBadRecord) {
+    this.dataField = dataField;
     this.dataType = dataField.getColumn().getDataType();
     this.column = dataField.getColumn();
     this.index = index;
@@ -49,15 +52,19 @@ public class NonDictionaryFieldConverterImpl implements 
FieldConverter {
 
   @Override public void convert(CarbonRow row, BadRecordLogHolder logHolder) {
     String dimensionValue = row.getString(index);
-    if (dimensionValue == null || dimensionValue.equals(nullformat)) {
-      row.update(CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY, index);
+    if (null == dimensionValue && column.getDataType() != DataType.STRING) {
+      logHolder.setReason(
+          CarbonDataProcessorUtil.prepareFailureReason(column.getColName(), 
column.getDataType()));
+      updateWithNullValue(row);
+    } else if (dimensionValue == null || dimensionValue.equals(nullformat)) {
+      updateWithNullValue(row);
     } else {
       try {
-        row.update(
-            
DataTypeUtil.getBytesBasedOnDataTypeForNoDictionaryColumn(dimensionValue, 
dataType),
-            index);
+        row.update(DataTypeUtil
+            .getBytesBasedOnDataTypeForNoDictionaryColumn(dimensionValue, 
dataType,
+                dataField.getDateFormat()), index);
       } catch (Throwable ex) {
-        if (dimensionValue.length() > 0 || isEmptyBadRecord) {
+        if (dimensionValue.length() > 0 || (dimensionValue.length() == 0 && 
isEmptyBadRecord)) {
           String message = 
logHolder.getColumnMessageMap().get(column.getColName());
           if (null == message) {
             message = CarbonDataProcessorUtil
@@ -65,11 +72,19 @@ public class NonDictionaryFieldConverterImpl implements 
FieldConverter {
             logHolder.getColumnMessageMap().put(column.getColName(), message);
           }
           logHolder.setReason(message);
-          row.update(CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY, index);
+          updateWithNullValue(row);
         } else {
-          row.update(new byte[0], index);
+          updateWithNullValue(row);
         }
       }
     }
   }
+
+  private void updateWithNullValue(CarbonRow row) {
+    if (dataType == DataType.STRING) {
+      row.update(CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY, index);
+    } else {
+      row.update(CarbonCommonConstants.EMPTY_BYTE_ARRAY, index);
+    }
+  }
 }

Reply via email to