Repository: carbondata
Updated Branches:
  refs/heads/master 2e6bc66f4 -> 842b9e5f4


[CARBONDATA-2089] Test cases is incorrect because it always run success no 
matter whether the SQL thrown exception

exception is error. Because some test case always run success whether sql throw 
exception

This closes #1866


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/842b9e5f
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/842b9e5f
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/842b9e5f

Branch: refs/heads/master
Commit: 842b9e5f4bf524c08b3ad4f17d93d45a21cb1912
Parents: 2e6bc66
Author: xubo245 <601450...@qq.com>
Authored: Fri Jan 26 20:40:20 2018 +0800
Committer: chenliang613 <chenliang...@huawei.com>
Committed: Sun Jan 28 15:14:01 2018 +0800

----------------------------------------------------------------------
 .../dataload/TestLoadDataGeneral.scala          | 10 +--
 .../preaggregate/TestPreAggCreateCommand.scala  | 41 +++++----
 .../timeseries/TestTimeSeriesCreateTable.scala  | 88 +++++++++++++-------
 .../InsertIntoCarbonTableTestCase.scala         |  9 +-
 .../MajorCompactionIgnoreInMinorTest.scala      |  6 +-
 .../TestDataLoadWithColumnsMoreThanSchema.scala | 37 +++++---
 .../TestLoadDataWithDiffTimestampFormat.scala   | 56 +++++--------
 .../TestLoadDataWithFileHeaderException.scala   | 76 +++++++----------
 ...ataWithMalformedCarbonCommandException.scala | 52 ++++--------
 .../TestLoadDataWithNotProperInputFile.scala    | 16 ++--
 .../dataretention/DataRetentionTestCase.scala   | 75 ++++-------------
 .../deleteTable/TestDeleteTableNewDDL.scala     | 10 +--
 .../ColumnPropertyValidationTestCase.scala      | 21 +++--
 .../util/ExternalColumnDictionaryTestCase.scala | 24 ++----
 .../commands/SetCommandTestCase.scala           | 42 ++--------
 .../register/TestRegisterCarbonTable.scala      |  6 +-
 16 files changed, 232 insertions(+), 337 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/842b9e5f/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataGeneral.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataGeneral.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataGeneral.scala
index 0be0b6d..c122287 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataGeneral.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataGeneral.scala
@@ -175,17 +175,11 @@ class TestLoadDataGeneral extends QueryTest with 
BeforeAndAfterAll {
     sql("CREATE TABLE load32000chardata(dim1 String, dim2 String, mes1 int) 
STORED BY 'org.apache.carbondata.format'")
     sql("CREATE TABLE load32000chardata_dup(dim1 String, dim2 String, mes1 
int) STORED BY 'org.apache.carbondata.format'")
     sql(s"LOAD DATA LOCAL INPATH '$testdata' into table load32000chardata 
OPTIONS('FILEHEADER'='dim1,dim2,mes1')")
-    try{
+    intercept[Exception] {
       sql("insert into load32000chardata_dup select 
dim1,concat(load32000chardata.dim2,'aaaa'),mes1 from load32000chardata").show()
-      assert(false)
-    } catch {
-      case _:Exception => assert(true)
     }
-    try{
+    intercept[Exception] {
       sql("update load32000chardata_dup 
set(load32000chardata_dup.dim2)=(select concat(load32000chardata.dim2,'aaaa') 
from load32000chardata)").show()
-      assert(false)
-    } catch {
-      case _:Exception => assert(true)
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/842b9e5f/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggCreateCommand.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggCreateCommand.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggCreateCommand.scala
index 5784bf2..755a449 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggCreateCommand.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggCreateCommand.scala
@@ -118,35 +118,40 @@ class TestPreAggCreateCommand extends QueryTest with 
BeforeAndAfterAll {
   }
 
   test("test pre agg create table 13") {
-    try {
+    intercept[Exception] {
       sql(
-        "create datamap preagg19 on table PreAggMain2 using 'preaggregate' as 
select a as a1,count(distinct b) from PreAggMain2 group by a")
-      assert(false)
-    } catch {
-      case _: Exception =>
-        assert(true)
+        s"""
+           | create datamap preagg19 on table PreAggMain2
+           | using 'preaggregate'
+           | as select a as a1,count(distinct b)
+           | from PreAggMain2 group by a
+         """.stripMargin)
     }
   }
 
   test("test pre agg create table 14") {
-    try {
+    intercept[Exception] {
       sql(
-        "create datamap preagg20 on table PreAggMain2 using 'preaggregate' as 
select a as a1,sum(distinct b) from PreAggMain2 group by a")
-      assert(false)
-    } catch {
-      case _: Exception =>
-        assert(true)
+        s"""
+           | create datamap preagg20 on table PreAggMain2
+           | using 'preaggregate'
+           | as select a as a1,sum(distinct b) from PreAggMain2
+           | group by a
+         """.stripMargin)
     }
   }
 
   test("test pre agg create table 15") {
-    try {
+    intercept[Exception] {
       sql(
-        "create datamap preagg21 on table PreAggMain2 using 'preaggregate' as 
select a as a1,sum(b) from PreAggMain2 where a='vishal' group by a")
-      assert(false)
-    } catch {
-      case _: Exception =>
-        assert(true)
+        s"""
+           | create datamap preagg21 on table PreAggMain2
+           | using 'preaggregate'
+           | as select a as a1,sum(b)
+           | from PreAggMain2
+           | where a='vishal'
+           | group by a
+         """.stripMargin)
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/842b9e5f/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/timeseries/TestTimeSeriesCreateTable.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/timeseries/TestTimeSeriesCreateTable.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/timeseries/TestTimeSeriesCreateTable.scala
index 5cbcb26..c9041fa 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/timeseries/TestTimeSeriesCreateTable.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/timeseries/TestTimeSeriesCreateTable.scala
@@ -50,59 +50,87 @@ class TestTimeSeriesCreateTable extends QueryTest with 
BeforeAndAfterAll {
   }
 
   test("test timeseries create table five") {
-    try {
+    intercept[Exception] {
       sql(
-        "create datamap agg0 on table mainTable using 'preaggregate' 
DMPROPERTIES ('timeseries.eventTime'='dataTime', 
'timeseries.hierarchy'='sec=1,hour=1,day=1,month=1,year=1') as select dataTime, 
sum(age) from mainTable group by dataTime")
-      assert(false)
-    } catch {
-      case _:Exception =>
-        assert(true)
+        s"""
+           | create datamap agg0 on table mainTable
+           | using 'preaggregate'
+           | DMPROPERTIES (
+           |  'timeseries.eventTime'='dataTime',
+           |  'timeseries.hierarchy'='sec=1,hour=1,day=1,month=1,year=1')
+           | as select dataTime, sum(age) from mainTable
+           | group by dataTime
+         """.stripMargin)
     }
   }
 
   test("test timeseries create table Six") {
-    try {
+    intercept[Exception] {
       sql(
-        "create datamap agg0 on table mainTable using 'preaggregate' 
DMPROPERTIES ('timeseries.eventTime'='dataTime', 
'timeseries.hierarchy'='hour=2') as select dataTime, sum(age) from mainTable 
group by dataTime")
-      assert(false)
-    } catch {
-      case _:Exception =>
-        assert(true)
+        """
+          | create datamap agg0 on table mainTable
+          | using 'preaggregate'
+          | DMPROPERTIES ('timeseries.eventTime'='dataTime', 
'timeseries.hierarchy'='hour=2')
+          | as select dataTime, sum(age) from mainTable
+          | group by dataTime
+        """.stripMargin)
+
     }
   }
 
   test("test timeseries create table seven") {
-    try {
+    intercept[Exception] {
+      sql(
+        s"""
+           | create datamap agg0 on table mainTable
+           | using 'preaggregate'
+           | DMPROPERTIES (
+           |    'timeseries.eventTime'='dataTime',
+           |    'timeseries.hierarchy'='hour=1,day=1,year=1,month=1')
+           | as select dataTime, sum(age) from mainTable
+           | group by dataTime
+         """.stripMargin)
       sql(
-        "create datamap agg0 on table mainTable using 'preaggregate' 
DMPROPERTIES ('timeseries.eventTime'='dataTime', 
'timeseries.hierarchy'='hour=1,day=1,year=1,month=1') as select dataTime, 
sum(age) from mainTable group by dataTime")
-      assert(false)
-    } catch {
-      case _:Exception =>
-        assert(true)
+        s"""
+           | create datamap agg0 on table mainTable
+           | using 'preaggregate'
+           | DMPROPERTIES (
+           |    'timeseries.eventTime'='dataTime',
+           |    'timeseries.hierarchy'='hour=1,day=1,year=1,month=1')
+           | as select dataTime, sum(age) from mainTable
+           | group by dataTime
+         """.stripMargin)
     }
   }
 
   test("test timeseries create table Eight") {
-    try {
+    intercept[Exception] {
       sql(
-        "create datamap agg0 on table mainTable using 'preaggregate' 
DMPROPERTIES ('timeseries.eventTime'='name', 
'timeseries.hierarchy'='hour=1,day=1,year=1,month=1') as select name, sum(age) 
from mainTable group by name")
-      assert(false)
-    } catch {
-      case _:Exception =>
-        assert(true)
+        s"""
+           | create datamap agg0 on table mainTable
+           | using 'preaggregate'
+           | DMPROPERTIES ('timeseries.eventTime'='name', 
'timeseries.hierarchy'='hour=1,day=1,year=1,month=1')
+           | as select name, sum(age) from mainTable
+           | group by name
+         """.stripMargin)
     }
   }
 
   test("test timeseries create table Nine") {
-    try {
+    intercept[Exception] {
       sql(
-        "create datamap agg0 on table mainTable using 'preaggregate' 
DMPROPERTIES ('timeseries.eventTime'='dataTime', 
'timeseries.hierarchy'='hour=1,day=1,year=1,month=1') as select name, sum(age) 
from mainTable group by name")
-      assert(false)
-    } catch {
-      case _:Exception =>
-        assert(true)
+        s"""
+           | create datamap agg0 on table mainTable
+           | using 'preaggregate'
+           | DMPROPERTIES (
+           |    'timeseries.eventTime'='dataTime',
+           |    'timeseries.hierarchy'='hour=1,day=1,year=1,month=1')
+           | as select name, sum(age) from mainTable
+           | group by name
+         """.stripMargin)
     }
   }
+
   override def afterAll: Unit = {
     sql("drop table if exists mainTable")
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/842b9e5f/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
index 6739c6c..d59f0b5 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
@@ -88,12 +88,9 @@ class InsertIntoCarbonTableTestCase extends QueryTest with 
BeforeAndAfterAll {
   test("insert->carbon column is more then hive-fails") {
      sql("drop table if exists TCarbon")
      sql("create table TCarbon (imei string,deviceInformationId int,MAC 
string,deviceColor string,gamePointId double,contractNumber BigInt) STORED BY 
'org.apache.carbondata.format'")
-     try {
-        sql("insert into TCarbon select 
imei,deviceInformationId,MAC,deviceColor,gamePointId from THive")
-        assert(false)
-     } catch  {
-       case ex: Exception => assert(true)
-     }
+    intercept[Exception] {
+      sql("insert into TCarbon select 
imei,deviceInformationId,MAC,deviceColor,gamePointId from THive")
+    }
   }
   test("insert->insert wrong data types-pass") {
      sql("drop table if exists TCarbon")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/842b9e5f/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/MajorCompactionIgnoreInMinorTest.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/MajorCompactionIgnoreInMinorTest.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/MajorCompactionIgnoreInMinorTest.scala
index 6155d7c..86f974c 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/MajorCompactionIgnoreInMinorTest.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/MajorCompactionIgnoreInMinorTest.scala
@@ -104,12 +104,8 @@ class MajorCompactionIgnoreInMinorTest extends QueryTest 
with BeforeAndAfterAll
     * Delete should not work on compacted segment.
     */
   test("delete compacted segment and check status") {
-    try {
+    intercept[Throwable] {
       sql("delete from table ignoremajor where segment.id in (2)")
-      assert(false)
-    }
-    catch {
-      case _:Throwable => assert(true)
     }
 
     val carbonTable = CarbonMetadata.getInstance().getCarbonTable(

http://git-wip-us.apache.org/repos/asf/carbondata/blob/842b9e5f/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataLoadWithColumnsMoreThanSchema.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataLoadWithColumnsMoreThanSchema.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataLoadWithColumnsMoreThanSchema.scala
index 1e34ec8..1532328 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataLoadWithColumnsMoreThanSchema.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataLoadWithColumnsMoreThanSchema.scala
@@ -48,12 +48,26 @@ class TestDataLoadWithColumnsMoreThanSchema extends 
QueryTest with BeforeAndAfte
 
   test("test for invalid value of maxColumns") {
     sql("DROP TABLE IF EXISTS max_columns_test")
-    sql("CREATE TABLE max_columns_test (imei string,age int,task bigint,num 
double,level decimal(10,3),productdate timestamp,mark int,name string)STORED BY 
'org.apache.carbondata.format'")
-    try {
-      sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/character_carbon.csv' into 
table max_columns_test options('MAXCOLUMNS'='avfgd')")
-      assert(false)
-    } catch {
-      case _: Throwable => assert(true)
+    sql(
+      s"""
+         |  CREATE TABLE max_columns_test (
+         |    imei string,
+         |    age int,
+         |    task bigint,
+         |    num double,
+         |    level decimal(10,3),
+         |    productdate timestamp,
+         |    mark int,
+         |    name string)
+         |  STORED BY 'org.apache.carbondata.format'
+       """.stripMargin)
+    intercept[Throwable] {
+      sql(
+        s"""
+           | LOAD DATA LOCAL INPATH '$resourcesPath/character_carbon.csv'
+           | into table max_columns_test
+           | options('MAXCOLUMNS'='avfgd')
+         """.stripMargin)
     }
   }
 
@@ -74,15 +88,12 @@ class TestDataLoadWithColumnsMoreThanSchema extends 
QueryTest with BeforeAndAfte
       "CREATE TABLE max_columns_value_test (imei string,age int,task 
bigint,num double,level " +
       "decimal(10,3),productdate timestamp,mark int,name string) STORED BY 
'org.apache.carbondata" +
       ".format'")
-    try {
+
+
+    intercept[Throwable] {
       sql(
         s"LOAD DATA LOCAL INPATH '$resourcesPath/character_carbon.csv' into 
table " +
-        "max_columns_value_test 
options('FILEHEADER='imei,age','MAXCOLUMNS'='2')")
-      throw new MalformedCarbonCommandException("Invalid")
-    } catch {
-      case me: MalformedCarbonCommandException =>
-        assert(false)
-      case _: Throwable => assert(true)
+          "max_columns_value_test 
options('FILEHEADER='imei,age','MAXCOLUMNS'='2')")
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/842b9e5f/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithDiffTimestampFormat.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithDiffTimestampFormat.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithDiffTimestampFormat.scala
index 1b0da78..ec6fff1 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithDiffTimestampFormat.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithDiffTimestampFormat.scala
@@ -76,65 +76,51 @@ class TestLoadDataWithDiffTimestampFormat extends QueryTest 
with BeforeAndAfterA
   }
 
   test("test load data with different timestamp format with wrong setting") {
-    try {
-      sql(s"""
+
+    val ex = intercept[MalformedCarbonCommandException] {
+      sql(
+        s"""
            LOAD DATA LOCAL INPATH '$resourcesPath/timeStampFormatData1.csv' 
into table t3
            OPTIONS('dateformat' = 'date')
            """)
-      assert(false)
-    } catch {
-      case ex: MalformedCarbonCommandException =>
-        assertResult(ex.getMessage)("Error: Wrong option: date is provided for 
option DateFormat")
-      case _: Throwable=> assert(false)
     }
+    assertResult(ex.getMessage)("Error: Wrong option: date is provided for 
option DateFormat")
 
-    try {
-      sql(s"""
+    val ex0 = intercept[MalformedCarbonCommandException] {
+      sql(
+        s"""
            LOAD DATA LOCAL INPATH '$resourcesPath/timeStampFormatData1.csv' 
into table t3
            OPTIONS('timestampformat' = 'timestamp')
            """)
-      assert(false)
-    } catch {
-      case ex: MalformedCarbonCommandException =>
-        assertResult(ex.getMessage)("Error: Wrong option: timestamp is 
provided for option TimestampFormat")
-      case _: Throwable => assert(false)
     }
+    assertResult(ex0.getMessage)("Error: Wrong option: timestamp is provided 
for option TimestampFormat")
 
-    try {
-      sql(s"""
+    val ex1 = intercept[MalformedCarbonCommandException] {
+      sql(
+        s"""
            LOAD DATA LOCAL INPATH '$resourcesPath/timeStampFormatData1.csv' 
into table t3
            OPTIONS('dateformat' = 'date:  ')
            """)
-      assert(false)
-    } catch {
-      case ex: MalformedCarbonCommandException =>
-        assertResult(ex.getMessage)("Error: Wrong option: date:   is provided 
for option DateFormat")
-      case _: Throwable => assert(false)
     }
+    assertResult(ex1.getMessage)("Error: Wrong option: date:   is provided for 
option DateFormat")
 
-    try {
-      sql(s"""
+    val ex2 = intercept[MalformedCarbonCommandException] {
+      sql(
+        s"""
            LOAD DATA LOCAL INPATH '$resourcesPath/timeStampFormatData1.csv' 
into table t3
            OPTIONS('dateformat' = 'date  ')
            """)
-      assert(false)
-    } catch {
-      case ex: MalformedCarbonCommandException =>
-        assertResult(ex.getMessage)("Error: Wrong option: date   is provided 
for option DateFormat")
-      case _: Throwable => assert(false)
     }
+    assertResult(ex2.getMessage)("Error: Wrong option: date   is provided for 
option DateFormat")
 
-    try {
-      sql(s"""
+    val ex3 = intercept[MalformedCarbonCommandException] {
+      sql(
+        s"""
            LOAD DATA LOCAL INPATH '$resourcesPath/timeStampFormatData1.csv' 
into table t3
            OPTIONS('dateformat' = 'fasfdas:yyyy/MM/dd')
            """)
-      assert(false)
-    } catch {
-      case ex: MalformedCarbonCommandException =>
-        assertResult(ex.getMessage)("Error: Wrong option: fasfdas:yyyy/MM/dd 
is provided for option DateFormat")
-      case _: Throwable => assert(false)
     }
+    assertResult(ex3.getMessage)("Error: Wrong option: fasfdas:yyyy/MM/dd is 
provided for option DateFormat")
 
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/842b9e5f/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithFileHeaderException.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithFileHeaderException.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithFileHeaderException.scala
index 3d3fb0c..7700ed5 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithFileHeaderException.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithFileHeaderException.scala
@@ -32,54 +32,45 @@ class TestLoadDataWithFileHeaderException extends QueryTest 
with BeforeAndAfterA
   }
 
   test("test load data both file and ddl without file header exception") {
-    try {
-      sql(s"""
-           LOAD DATA LOCAL INPATH '$resourcesPath/source_without_header.csv' 
into table t3
-           """)
-      assert(false)
-    } catch {
-      case e: Exception =>
-        assert(e.getMessage.contains("CSV header in input file is not proper. 
Column names in schema and csv header are not the same."))
+    val e = intercept[Exception] {
+      sql(
+        s"""LOAD DATA LOCAL INPATH '$resourcesPath/source_without_header.csv' 
into table t3""")
     }
+    assert(e.getMessage.contains(
+      "CSV header in input file is not proper. Column names in schema and csv 
header are not the same."))
   }
 
   test("test load data ddl provided wrong file header exception") {
-    try {
-      sql(s"""
+    val e = intercept[Exception] {
+      sql(
+        s"""
            LOAD DATA LOCAL INPATH '$resourcesPath/source_without_header.csv' 
into table t3
            options('fileheader'='no_column')
            """)
-      assert(false)
-    } catch {
-      case e: Exception =>
-        assert(e.getMessage.contains("CSV header in DDL is not proper. Column 
names in schema and CSV header are not the same"))
     }
+    assert(e.getMessage.contains("CSV header in DDL is not proper. Column 
names in schema and CSV header are not the same"))
   }
 
   test("test load data with wrong header , but without fileheader") {
-    try {
-      sql(s"""
+    val e = intercept[Exception] {
+      sql(
+        s"""
            LOAD DATA LOCAL INPATH '$resourcesPath/source.csv' into table t3
            options('header'='abc')
            """)
-      assert(false)
-    } catch {
-      case e: Exception =>
-        assert(e.getMessage.contains("'header' option should be either 'true' 
or 'false'"))
     }
+    assert(e.getMessage.contains("'header' option should be either 'true' or 
'false'"))
   }
 
   test("test load data with wrong header and fileheader") {
-    try {
-      sql(s"""
+    val e = intercept[Exception] {
+      sql(
+        s"""
          LOAD DATA LOCAL INPATH '$resourcesPath/source_without_header.csv' 
into table t3
          options('header'='', 
'fileheader'='ID,date,country,name,phonetype,serialname,salary')
          """)
-      assert(false)
-    } catch {
-      case e: Exception =>
-        assert(e.getMessage.contains("'header' option should be either 'true' 
or 'false'"))
     }
+    assert(e.getMessage.contains("'header' option should be either 'true' or 
'false'"))
   }
 
   test("test load data with header=false, but without fileheader") {
@@ -97,16 +88,14 @@ class TestLoadDataWithFileHeaderException extends QueryTest 
with BeforeAndAfterA
   }
 
   test("test load data with header=false and wrong fileheader") {
-    try {
-      sql(s"""
+    val e = intercept[Exception] {
+      sql(
+        s"""
         LOAD DATA LOCAL INPATH '$resourcesPath/source_without_header.csv' into 
table t3
         options('header'='false', 
'fileheader'='ID1,date2,country,name,phonetype,serialname,salary')
         """)
-      assert(false)
-    } catch {
-      case e: Exception =>
-        assert(e.getMessage.contains("CSV header in DDL is not proper. Column 
names in schema and CSV header are not the same"))
     }
+    assert(e.getMessage.contains("CSV header in DDL is not proper. Column 
names in schema and CSV header are not the same"))
   }
 
   test("test load data with header=true, but without fileheader") {
@@ -117,29 +106,27 @@ class TestLoadDataWithFileHeaderException extends 
QueryTest with BeforeAndAfterA
   }
 
   test("test load data with header=true and fileheader") {
-    try {
-      sql(s"""
+
+    val e = intercept[Exception] {
+      sql(
+        s"""
            LOAD DATA LOCAL INPATH '$resourcesPath/source.csv' into table t3
            options('header'='true', 
'fileheader'='ID,date,country,name,phonetype,serialname,salary')
            """)
-      assert(false)
-    } catch {
-      case e: Exception =>
-        assert(e.getMessage.contains("When 'header' option is true, 
'fileheader' option is not required."))
     }
+    assert(e.getMessage.contains("When 'header' option is true, 'fileheader' 
option is not required."))
   }
 
   test("test load data with header=true and wrong fileheader") {
-    try {
-      sql(s"""
+
+    val e = intercept[Exception] {
+      sql(
+        s"""
            LOAD DATA LOCAL INPATH '$resourcesPath/source.csv' into table t3
            options('header'='true', 
'fileheader'='ID1,date1,country,name,phonetype,serialname,salary')
            """)
-      assert(false)
-    } catch {
-      case e: Exception =>
-        assert(e.getMessage.contains("When 'header' option is true, 
'fileheader' option is not required."))
     }
+    assert(e.getMessage.contains("When 'header' option is true, 'fileheader' 
option is not required."))
   }
 
   test("test load data without header and fileheader") {
@@ -155,6 +142,7 @@ class TestLoadDataWithFileHeaderException extends QueryTest 
with BeforeAndAfterA
          """)
   }
 
+
   override def afterAll {
     sql("DROP TABLE IF EXISTS t3")
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/842b9e5f/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithMalformedCarbonCommandException.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithMalformedCarbonCommandException.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithMalformedCarbonCommandException.scala
index 3462a07..1851705 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithMalformedCarbonCommandException.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithMalformedCarbonCommandException.scala
@@ -79,60 +79,43 @@ class TestLoadDataWithMalformedCarbonCommandException 
extends QueryTest with Bef
   }
 
   test("test load data with dictionary exclude columns which no exist in 
table.") {
-    try {
+    val e = intercept[MalformedCarbonCommandException] {
       buildTableWithNoExistDictExclude()
-    } catch {
-      case e: MalformedCarbonCommandException =>
-        assert(e.getMessage.equals("DICTIONARY_EXCLUDE column: ccc does not 
exist in table. " +
-          "Please check create table statement."))
-      case _: Throwable => assert(false)
     }
+    assert(e.getMessage.equals("DICTIONARY_EXCLUDE column: ccc does not exist 
in table. " +
+      "Please check create table statement."))
   }
 
   test("test load data with dictionary include columns which no exist in 
table.") {
-    try {
+    val e = intercept[MalformedCarbonCommandException] {
       buildTableWithNoExistDictInclude()
-    } catch {
-      case e: MalformedCarbonCommandException =>
-        assert(e.getMessage.equals("DICTIONARY_INCLUDE column: aaa does not 
exist in table. " +
-          "Please check create table statement."))
-      case _: Throwable => assert(false)
     }
+    assert(e.getMessage.equals("DICTIONARY_INCLUDE column: aaa does not exist 
in table. " +
+      "Please check create table statement."))
   }
 
   test("test load data with dictionary include is same with dictionary 
exclude") {
-    try {
+    val e = intercept[MalformedCarbonCommandException] {
       buildTableWithSameDictExcludeAndInclude()
-    } catch {
-      case e: MalformedCarbonCommandException =>
-        assert(e.getMessage.equals("DICTIONARY_EXCLUDE can not contain the 
same column: country " +
-          "with DICTIONARY_INCLUDE. Please check create table statement."))
-      case _: Throwable => assert(false)
     }
+    assert(e.getMessage.equals("DICTIONARY_EXCLUDE can not contain the same 
column: country " +
+      "with DICTIONARY_INCLUDE. Please check create table statement."))
   }
 
   test("test load data with invalid option") {
-    try {
+    val e = intercept[MalformedCarbonCommandException] {
       sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/dataretention1.csv' INTO 
TABLE " +
         "TestLoadTableOptions OPTIONS('QUOTECHAR'='\"', 'DELIMITERRR' =  ',')")
-      assert(false)
-    } catch {
-      case e: MalformedCarbonCommandException =>
-        assert(e.getMessage.equals("Error: Invalid option(s): delimiterrr"))
-      case _: Throwable => assert(false)
     }
+    assert(e.getMessage.equals("Error: Invalid option(s): delimiterrr"))
   }
 
   test("test load data with duplicate options") {
-    try {
+    val e = intercept[MalformedCarbonCommandException] {
       sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/dataretention1.csv' INTO 
TABLE " +
         "TestLoadTableOptions OPTIONS('DELIMITER' =  ',', 'quotechar'='\"', 
'DELIMITER' =  '$')")
-      assert(false)
-    } catch {
-      case e: MalformedCarbonCommandException =>
-        assert(e.getMessage.equals("Error: Duplicate option(s): delimiter"))
-      case _: Throwable => assert(false)
     }
+    assert(e.getMessage.equals("Error: Duplicate option(s): delimiter"))
   }
 
   test("test load data with case sensitive options") {
@@ -147,13 +130,10 @@ class TestLoadDataWithMalformedCarbonCommandException 
extends QueryTest with Bef
   }
 
   test("test load data with dictionary include is same with dictionary exclude 
with spaces") {
-    try {
+    val e = intercept[MalformedCarbonCommandException] {
       buildTableWithSameDictExcludeAndIncludeWithSpaces()
-    } catch {
-      case e: MalformedCarbonCommandException =>
-        assert(e.getMessage.equals("DICTIONARY_EXCLUDE can not contain the 
same column: country " +
-          "with DICTIONARY_INCLUDE. Please check create table statement."))
-      case _: Throwable => assert(false)
     }
+    assert(e.getMessage.equals("DICTIONARY_EXCLUDE can not contain the same 
column: country " +
+      "with DICTIONARY_INCLUDE. Please check create table statement."))
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/842b9e5f/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithNotProperInputFile.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithNotProperInputFile.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithNotProperInputFile.scala
index 47f05ce..a4d7b85 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithNotProperInputFile.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithNotProperInputFile.scala
@@ -27,15 +27,12 @@ import org.apache.spark.sql.test.util.QueryTest
 class TestLoadDataWithNotProperInputFile extends QueryTest {
 
   test("test loading data with input path exists but has nothing") {
-    try {
+    val e = intercept[Throwable] {
       val dataPath = s"$resourcesPath/nullSample.csv"
       FileUtils.getPaths(dataPath)
-      assert(false)
-    } catch {
-      case e: Throwable =>
-        assert(e.getMessage.contains("Please check your input path and make 
sure " +
-          "that files end with '.csv' and content is not empty"))
     }
+    assert(e.getMessage.contains("Please check your input path and make sure " 
+
+      "that files end with '.csv' and content is not empty"))
   }
 
   test("test loading data with input file not ends with '.csv'") {
@@ -50,13 +47,10 @@ class TestLoadDataWithNotProperInputFile extends QueryTest {
   }
 
   test("test loading data with input file does not exist") {
-    try {
+    val e = intercept[Throwable] {
       val dataPath = s"$resourcesPath/input_file_does_not_exist.csv"
       FileUtils.getPaths(dataPath)
-      assert(false)
-    } catch {
-      case e: Throwable =>
-        assert(e.getMessage.contains("The input file does not exist"))
     }
+    assert(e.getMessage.contains("The input file does not exist"))
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/842b9e5f/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataretention/DataRetentionTestCase.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataretention/DataRetentionTestCase.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataretention/DataRetentionTestCase.scala
index bde1f80..ed58253 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataretention/DataRetentionTestCase.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataretention/DataRetentionTestCase.scala
@@ -173,15 +173,11 @@ class DataRetentionTestCase extends QueryTest with 
BeforeAndAfterAll {
   }
 
   test("RetentionTest4_DeleteByInvalidLoadId") {
-    try {
+    val e = intercept[MalformedCarbonCommandException] {
       // delete segment with no id
       sql("delete from table DataRetentionTable where segment.id in ()")
-      assert(false)
-    } catch {
-      case e: MalformedCarbonCommandException =>
-        assert(e.getMessage.contains("should not be empty"))
-      case _: Throwable => assert(false)
     }
+    assert(e.getMessage.contains("should not be empty"))
   }
 
   test("test delete segments by load date with case-insensitive table name") {
@@ -211,27 +207,19 @@ class DataRetentionTestCase extends QueryTest with 
BeforeAndAfterAll {
 
   test("RetentionTest_DeleteSegmentsByLoadTimeValiadtion") {
 
-    try {
+    val e = intercept[MalformedCarbonCommandException] {
       sql(
         "delete from table DataRetentionTable where segment.starttime before" +
-        " 'abcd-01-01 00:00:00'")
-      assert(false)
-    } catch {
-      case e: MalformedCarbonCommandException =>
-        assert(e.getMessage.contains("Invalid load start time format"))
-      case _: Throwable => assert(false)
+          " 'abcd-01-01 00:00:00'")
     }
+    assert(e.getMessage.contains("Invalid load start time format"))
 
-    try {
+    val ex = intercept[MalformedCarbonCommandException] {
       sql(
         "delete from table DataRetentionTable where segment.starttime before" +
-        " '2099:01:01 00:00:00'")
-      assert(false)
-    } catch {
-      case e: MalformedCarbonCommandException =>
-        assert(e.getMessage.contains("Invalid load start time format"))
-      case _: Throwable => assert(false)
+          " '2099:01:01 00:00:00'")
     }
+    assert(ex.getMessage.contains("Invalid load start time format"))
 
     checkAnswer(
       sql("SELECT country, count(salary) AS amount FROM DataRetentionTable 
WHERE country" +
@@ -249,31 +237,16 @@ class DataRetentionTestCase extends QueryTest with 
BeforeAndAfterAll {
 
   test("RetentionTest_InvalidDeleteCommands") {
     // All these queries should fail.
-    try {
+    intercept[Exception] {
       sql("DELETE LOADS FROM TABLE DataRetentionTable where STARTTIME before 
'2099-01-01'")
-      throw new MalformedCarbonCommandException("Invalid query")
-    } catch {
-      case e: MalformedCarbonCommandException =>
-        assert(!e.getMessage.equalsIgnoreCase("Invalid query"))
-      case _: Throwable => assert(true)
     }
 
-    try {
+    intercept[Exception] {
       sql("DELETE LOAD 2 FROM TABLE DataRetentionTable")
-      throw new MalformedCarbonCommandException("Invalid query")
-    } catch {
-      case e: MalformedCarbonCommandException =>
-        assert(!e.getMessage.equalsIgnoreCase("Invalid query"))
-      case _: Throwable => assert(true)
     }
 
-    try {
+    intercept[Exception] {
       sql("show loads for table DataRetentionTable")
-      throw new MalformedCarbonCommandException("Invalid query")
-    } catch {
-      case e: MalformedCarbonCommandException =>
-        assert(!e.getMessage.equalsIgnoreCase("Invalid query"))
-      case _: Throwable => assert(true)
     }
 
   }
@@ -288,37 +261,19 @@ class DataRetentionTestCase extends QueryTest with 
BeforeAndAfterAll {
     carbonCleanFilesLock.lockWithRetries()
 
     // delete segment 0 it should fail
-    try {
+    intercept[Exception] {
       sql("delete from table retentionlock where segment.id in (0)")
-      throw new MalformedCarbonCommandException("Invalid")
-    } catch {
-      case me: MalformedCarbonCommandException =>
-        assert(false)
-      case ex: Exception =>
-        assert(true)
     }
 
     // it should fail
-    try {
+    intercept[Exception] {
       sql("delete from table retentionlock where segment.starttime before " +
-          "'2099-01-01 00:00:00.0'")
-      throw new MalformedCarbonCommandException("Invalid")
-    } catch {
-      case me: MalformedCarbonCommandException =>
-        assert(false)
-      case ex: Exception =>
-        assert(true)
+        "'2099-01-01 00:00:00.0'")
     }
 
     // it should fail
-    try {
+    intercept[Exception] {
       sql("clean files for table retentionlock")
-      throw new MalformedCarbonCommandException("Invalid")
-    } catch {
-      case me: MalformedCarbonCommandException =>
-        assert(false)
-      case ex: Exception =>
-        assert(true)
     }
 
     sql("SHOW SEGMENTS FOR TABLE retentionlock").show

http://git-wip-us.apache.org/repos/asf/carbondata/blob/842b9e5f/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
index 2f30215..811cb02 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
@@ -64,18 +64,12 @@ class TestDeleteTableNewDDL extends QueryTest with 
BeforeAndAfterAll {
     sql("use testdb")
     sql("CREATE TABLE IF NOT EXISTS testtable(empno Int, empname string, 
utilization Int,salary Int)"
         + " STORED BY 'org.apache.carbondata.format' ")
-    try {
+    intercept[Exception] {
       sql("drop database testdb")
-      assert(false)
-    } catch {
-      case _ : Exception =>
     }
     sql("drop database testdb cascade")
-    try {
+    intercept[Exception] {
       sql("use testdb")
-      assert(false)
-    } catch {
-      case _ : Exception =>
     }
     sql("use default")
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/842b9e5f/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/ColumnPropertyValidationTestCase.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/ColumnPropertyValidationTestCase.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/ColumnPropertyValidationTestCase.scala
index b3ea677..69cccf7 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/ColumnPropertyValidationTestCase.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/ColumnPropertyValidationTestCase.scala
@@ -34,13 +34,20 @@ class ColumnPropertyValidationTestCase extends QueryTest 
with BeforeAndAfterAll
      }
   }
   test("Validate Dictionary include _ invalid key") {
-     try {
-       sql("create table employee(empname String,empid String,city 
String,country String,gender String,salary Double) stored by 
'org.apache.carbondata.format' 
tblproperties('columnproperties.invalid.key'='value')")
-       assert(false)
-       sql("drop table employee")
-     } catch {
-       case e: Throwable =>assert(true)
-     }
+    intercept[Throwable] {
+      sql(
+        s"""
+           | create table employee(
+           |    empname String,
+           |    empid String,
+           |    city String,
+           |    country String,
+           |    gender String,
+           |    salary Double)
+           | stored by 'org.apache.carbondata.format'
+           | tblproperties('columnproperties.invalid.key'='value')
+         """.stripMargin)
+    }
   }
 
   override def afterAll() {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/842b9e5f/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala
 
b/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala
index cad7807..a1b39d8 100644
--- 
a/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala
+++ 
b/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala
@@ -263,37 +263,29 @@ class ExternalColumnDictionaryTestCase extends 
Spark2QueryTest with BeforeAndAft
   }
 
   test("COLUMNDICT and ALL_DICTIONARY_PATH can not be used together") {
-    try {
+    val ex = intercept[MalformedCarbonCommandException] {
       sql(
         s"""
         LOAD DATA LOCAL INPATH "$complexFilePath1" INTO TABLE loadSqlTest
         
OPTIONS('COLUMNDICT'='$extColDictFilePath1',"ALL_DICTIONARY_PATH"='$extColDictFilePath1')
         """)
-      assert(false)
-    } catch {
-      case ex: MalformedCarbonCommandException =>
-        assertResult(ex.getMessage)(
-          "Error: COLUMNDICT and ALL_DICTIONARY_PATH can not be used together 
" +
-          "in options")
-      case _: Throwable => assert(false)
     }
+    assertResult(ex.getMessage)(
+      "Error: COLUMNDICT and ALL_DICTIONARY_PATH can not be used together " +
+        "in options")
   }
 
   test("Measure can not use COLUMNDICT") {
-    try {
+    val ex = intercept[DataLoadingException] {
       sql(
         s"""
       LOAD DATA LOCAL INPATH "$complexFilePath1" INTO TABLE loadSqlTest
       OPTIONS('single_pass'='true','FILEHEADER'='$header', 
'COLUMNDICT'='gamePointId:$filePath')
       """)
-      assert(false)
-    } catch {
-      case ex: DataLoadingException =>
-        assertResult(ex.getMessage)(
-          "Column gamePointId is not a key column. Only key column can be part 
" +
-          "of dictionary and used in COLUMNDICT option.")
-      case _: Throwable => assert(false)
     }
+    assertResult(ex.getMessage)(
+      "Column gamePointId is not a key column. Only key column can be part " +
+        "of dictionary and used in COLUMNDICT option.")
   }
 
   def cleanAllTables: Unit = {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/842b9e5f/integration/spark2/src/test/scala/org/apache/spark/carbondata/commands/SetCommandTestCase.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark2/src/test/scala/org/apache/spark/carbondata/commands/SetCommandTestCase.scala
 
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/commands/SetCommandTestCase.scala
index 60cf121..303a478 100644
--- 
a/integration/spark2/src/test/scala/org/apache/spark/carbondata/commands/SetCommandTestCase.scala
+++ 
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/commands/SetCommandTestCase.scala
@@ -39,12 +39,8 @@ class SetCommandTestCase extends Spark2QueryTest with 
BeforeAndAfterAll{
   }
 
   test("test set command for enable.unsafe.sort for invalid option") {
-    try {
+    intercept[InvalidConfigurationException] {
       checkAnswer(sql("set enable.unsafe.sort=123"), sql("set 
enable.unsafe.sort"))
-      assert(false)
-    } catch {
-      case ex: InvalidConfigurationException =>
-        assert(true)
     }
   }
   //is_empty_data_bad_record
@@ -58,15 +54,11 @@ class SetCommandTestCase extends Spark2QueryTest with 
BeforeAndAfterAll{
 
   test(s"test set command for ${
     CarbonLoadOptionConstants.CARBON_OPTIONS_BAD_RECORDS_LOGGER_ENABLE} for 
invalid option") {
-    try {
+    intercept[InvalidConfigurationException] {
       checkAnswer(sql(s"set ${
         CarbonLoadOptionConstants
           .CARBON_OPTIONS_BAD_RECORDS_LOGGER_ENABLE
       }=123"), sql(s"set ${ 
CarbonLoadOptionConstants.CARBON_OPTIONS_BAD_RECORDS_LOGGER_ENABLE }"))
-      assert(false)
-    } catch {
-      case ex: InvalidConfigurationException =>
-        assert(true)
     }
   }
   test(s"test set command for ${
@@ -82,14 +74,10 @@ class SetCommandTestCase extends Spark2QueryTest with 
BeforeAndAfterAll{
 
   test(s"test set command for 
${CarbonLoadOptionConstants.CARBON_OPTIONS_IS_EMPTY_DATA_BAD_RECORD} " +
        s"for invalid option") {
-    try {
+    intercept[InvalidConfigurationException] {
       checkAnswer(
         sql(s"set 
${CarbonLoadOptionConstants.CARBON_OPTIONS_IS_EMPTY_DATA_BAD_RECORD}=123"),
         sql(s"set 
${CarbonLoadOptionConstants.CARBON_OPTIONS_IS_EMPTY_DATA_BAD_RECORD}"))
-      assert(false)
-    } catch {
-      case ex: InvalidConfigurationException =>
-        assert(true)
     }
   }
   //carbon.custom.block.distribution
@@ -99,13 +87,9 @@ class SetCommandTestCase extends Spark2QueryTest with 
BeforeAndAfterAll{
   }
 
   test("test set command for carbon.custom.block.distribution for invalid 
option") {
-    try {
+    intercept[InvalidConfigurationException] {
       checkAnswer(sql("set carbon.custom.block.distribution=123"),
         sql("set carbon.custom.block.distribution"))
-      assert(false)
-    } catch {
-      case ex: InvalidConfigurationException =>
-        assert(true)
     }
   }
   // sort_scope
@@ -115,13 +99,9 @@ class SetCommandTestCase extends Spark2QueryTest with 
BeforeAndAfterAll{
   }
 
   test(s"test set command for 
${CarbonLoadOptionConstants.CARBON_OPTIONS_SORT_SCOPE} for invalid option") {
-    try {
+    intercept[InvalidConfigurationException] {
       checkAnswer(sql(s"set 
${CarbonLoadOptionConstants.CARBON_OPTIONS_SORT_SCOPE}=123"),
         sql(s"set ${CarbonLoadOptionConstants.CARBON_OPTIONS_SORT_SCOPE}"))
-      assert(false)
-    } catch {
-      case ex: InvalidConfigurationException =>
-        assert(true)
     }
   }
   // batch_sort_size_inmb
@@ -131,13 +111,9 @@ class SetCommandTestCase extends Spark2QueryTest with 
BeforeAndAfterAll{
   }
 
   test(s"test set 
${CarbonLoadOptionConstants.CARBON_OPTIONS_BATCH_SORT_SIZE_INMB} for invalid 
option") {
-    try {
+    intercept[InvalidConfigurationException] {
       checkAnswer(sql(s"set 
${CarbonLoadOptionConstants.CARBON_OPTIONS_BATCH_SORT_SIZE_INMB}=hjf"),
         sql(s"set 
${CarbonLoadOptionConstants.CARBON_OPTIONS_BATCH_SORT_SIZE_INMB}"))
-      assert(false)
-    } catch {
-      case ex: InvalidConfigurationException =>
-        assert(true)
     }
   }
   // single_pass
@@ -147,13 +123,9 @@ class SetCommandTestCase extends Spark2QueryTest with 
BeforeAndAfterAll{
   }
 
   test(s"test set ${CarbonLoadOptionConstants.CARBON_OPTIONS_SINGLE_PASS} for 
invalid option") {
-    try {
+    intercept[InvalidConfigurationException] {
       checkAnswer(sql(s"set 
${CarbonLoadOptionConstants.CARBON_OPTIONS_SINGLE_PASS}=123"),
         sql(s"set ${CarbonLoadOptionConstants.CARBON_OPTIONS_SINGLE_PASS}"))
-      assert(false)
-    } catch {
-      case ex: InvalidConfigurationException =>
-        assert(true)
     }
   }
   override def afterAll {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/842b9e5f/integration/spark2/src/test/scala/org/apache/spark/carbondata/register/TestRegisterCarbonTable.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark2/src/test/scala/org/apache/spark/carbondata/register/TestRegisterCarbonTable.scala
 
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/register/TestRegisterCarbonTable.scala
index 1b36e17..389f2cd 100644
--- 
a/integration/spark2/src/test/scala/org/apache/spark/carbondata/register/TestRegisterCarbonTable.scala
+++ 
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/register/TestRegisterCarbonTable.scala
@@ -150,12 +150,8 @@ class TestRegisterCarbonTable extends QueryTest with 
BeforeAndAfterAll {
     sql("drop table carbontable")
     if 
(!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore)
 {
       restoreData(dblocation, "carbontable")
-      try {
+      intercept[AnalysisException] {
         sql("refresh table carbontable")
-        assert(false)
-      } catch {
-        case e: AnalysisException =>
-          assert(true)
       }
       restoreData(dblocation, "carbontable_preagg1")
     }

Reply via email to