This is an automated email from the ASF dual-hosted git repository.
akashrn5 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git
The following commit(s) were added to refs/heads/master by this push:
new b04e443 [SDV] Enhancements in SDV Test framework Problem: Migrated
sdv framework to run with Spark-2.3. Some testcases were failing because hive
deletes the csv file after loading, hence next load was failing.
b04e443 is described below
commit b04e443c71ccf089fa5f2d75a7cacf89fcb53348
Author: shivamasn <[email protected]>
AuthorDate: Tue Sep 24 11:59:46 2019 +0530
[SDV] Enhancements in SDV Test framework
Problem: Migrated sdv framework to run with Spark-2.3. Some testcases were
failing because hive
deletes the csv file after loading, hence next load was failing.
Solution: So, fixed the same by adding CSV files for hive tables and fixed
compile issues for different spark
version.
This closes #3397
---
integration/spark-common-cluster-test/pom.xml | 14 +-
.../cluster/sdv/generated/BadRecordTestCase.scala | 4 +-
.../sdv/generated/BatchSortLoad3TestCase.scala | 2 +-
.../cluster/sdv/generated/BucketingTestCase.scala | 10 +-
.../sdv/generated/DataLoadingIUDTestCase.scala | 4 +-
.../sdv/generated/DataLoadingTestCase.scala | 58 +++---
.../cluster/sdv/generated/GlobalSortTestCase.scala | 15 +-
.../cluster/sdv/generated/QueriesBVATestCase.scala | 4 +-
.../sdv/generated/QueriesBasicTestCase.scala | 14 +-
.../sdv/generated/QueriesCompactionTestCase.scala | 92 +++++-----
.../QueriesExcludeDictionaryTestCase.scala | 4 +-
.../QueriesIncludeDictionaryTestCase.scala | 4 +-
.../sdv/generated/QueriesNormalTestCase.scala | 4 +-
.../sdv/generated/QueriesRangeFilterTestCase.scala | 12 +-
.../generated/QueriesSparkBlockDistTestCase.scala | 4 +-
.../generated/TableCommentAlterTableTestCase.scala | 5 +-
.../carbondata/cluster/sdv/suite/SDVSuites.scala | 199 +++++++++++----------
.../apache/spark/sql/common/util/PlanTest.scala | 2 +-
.../spark/sql/test/Spark2TestQueryExecutor.scala | 4 +-
pom.xml | 3 +-
20 files changed, 246 insertions(+), 212 deletions(-)
diff --git a/integration/spark-common-cluster-test/pom.xml
b/integration/spark-common-cluster-test/pom.xml
index 9fe89cc..e8cece2 100644
--- a/integration/spark-common-cluster-test/pom.xml
+++ b/integration/spark-common-cluster-test/pom.xml
@@ -69,6 +69,18 @@
<scope>test</scope>
</dependency>
<dependency>
+ <groupId>io.netty</groupId>
+ <artifactId>netty</artifactId>
+ <version>3.9.9.Final</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>io.netty</groupId>
+ <artifactId>netty-all</artifactId>
+ <version>4.1.17.Final</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
<groupId>org.apache.carbondata</groupId>
<artifactId>carbondata-spark2</artifactId>
<version>${project.version}</version>
@@ -180,7 +192,7 @@
<spark.master.url>${spark.master.url}</spark.master.url>
<hdfs.url>${hdfs.url}</hdfs.url>
<presto.jdbc.url>${presto.jdbc.url}</presto.jdbc.url>
-
<spark.hadoop.hive.metastore.uris>${spark.hadoop.hive.metastore.uris}</spark.hadoop.hive.metastore.uris>
+
<!--<spark.hadoop.hive.metastore.uris>${spark.hadoop.hive.metastore.uris}</spark.hadoop.hive.metastore.uris>-->
<spark.carbon.hive.schema.store>${carbon.hive.based.metastore}</spark.carbon.hive.schema.store>
</systemProperties>
</configuration>
diff --git
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BadRecordTestCase.scala
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BadRecordTestCase.scala
index 3b57ff0..a4cd333 100644
---
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BadRecordTestCase.scala
+++
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BadRecordTestCase.scala
@@ -117,7 +117,7 @@ class BadRecordTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""drop table if exists hivetable7""").collect
sql(s"""CREATE TABLE badrecordtest7 (ID int,CUST_ID int,cust_name string)
STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""CREATE TABLE hivetable7 (ID int,CUST_ID int,cust_name string) row
format delimited fields terminated by ','""").collect
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test2.csv' into
table hivetable7""").collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test2_hive1.csv'
into table hivetable7""").collect
sql(s"""insert into table badrecordtest7 select * from
hivetable7""").collect
checkAnswer(s"""select count(*) from badrecordtest7""",
Seq(Row(3)), "BadRecordTestCase-BadRecords-001_PTS008_TC001")
@@ -132,7 +132,7 @@ class BadRecordTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""drop table if exists hivetable9""").collect
sql(s"""CREATE TABLE badrecordTest9 (ID int,CUST_ID int,cust_name string)
STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""CREATE TABLE hivetable9 (ID int,CUST_ID int,cust_name string) row
format delimited fields terminated by ','""").collect
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test2.csv' into
table hivetable9""").collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test2_hive2.csv'
into table hivetable9""").collect
sql(s"""insert into table badrecordTest9 select * from
hivetable9""").collect
checkAnswer(s"""select count(*) from badrecordTest9""",
Seq(Row(3)), "BadRecordTestCase-BadRecords-001_PTS015_TC001")
diff --git
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortLoad3TestCase.scala
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortLoad3TestCase.scala
index 5aaeffe..ed803ca 100644
---
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortLoad3TestCase.scala
+++
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortLoad3TestCase.scala
@@ -170,7 +170,7 @@ class BatchSortLoad3TestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""drop TABLE if exists uniqdata_h""").collect
sql(s"""drop TABLE if exists uniqdata_c""").collect
sql(s"""CREATE TABLE uniqdata_h (CUST_ID int,CUST_NAME
String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1
bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2
decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1
int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ','""").collect
- sql(s"""load data inpath '$resourcesPath/Data/uniqdata/7000_UniqData.csv'
into table uniqdata_h""").collect
+ sql(s"""load data inpath
'$resourcesPath/Data/uniqdata/7000_UniqData_hive2.csv' into table
uniqdata_h""").collect
sql(s"""CREATE TABLE uniqdata_c (CUST_ID int,CUST_NAME
String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1
bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2
decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1
int) STORED BY 'carbondata'""").collect
sql(s"""insert into uniqdata_c select * from uniqdata_h""")
sql(s"""drop table if exists uniqdata_h""").collect
diff --git
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BucketingTestCase.scala
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BucketingTestCase.scala
index 501b089..a6fcf62 100644
---
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BucketingTestCase.scala
+++
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BucketingTestCase.scala
@@ -22,7 +22,7 @@ import org.apache.carbondata.core.metadata.CarbonMetadata
import org.apache.carbondata.core.metadata.schema.table.CarbonTable
import org.apache.carbondata.core.util.CarbonProperties
import org.apache.spark.sql.common.util._
-import org.apache.spark.sql.execution.exchange.ShuffleExchange
+import org.apache.spark.sql.execution.exchange.ShuffleExchangeExec
import org.scalatest.BeforeAndAfterAll
class BucketingTestCase extends QueryTest with BeforeAndAfterAll {
@@ -100,7 +100,7 @@ class BucketingTestCase extends QueryTest with
BeforeAndAfterAll {
""".stripMargin).queryExecution.executedPlan
var shuffleExists = false
plan.collect {
- case s: ShuffleExchange => shuffleExists = true
+ case s: ShuffleExchangeExec => shuffleExists = true
}
assert(!shuffleExists, "shuffle should not exist on bucket column join")
}
@@ -121,7 +121,7 @@ class BucketingTestCase extends QueryTest with
BeforeAndAfterAll {
var shuffleExists = false
plan.collect {
- case s: ShuffleExchange => shuffleExists = true
+ case s: ShuffleExchangeExec => shuffleExists = true
}
assert(shuffleExists, "shuffle should exist on non-bucket column join")
}
@@ -145,7 +145,7 @@ class BucketingTestCase extends QueryTest with
BeforeAndAfterAll {
""".stripMargin).queryExecution.executedPlan
var shuffleExists = false
plan.collect {
- case s: ShuffleExchange => shuffleExists = true
+ case s: ShuffleExchangeExec => shuffleExists = true
}
assert(!shuffleExists, "shuffle should not exist on bucket tables")
}
@@ -167,7 +167,7 @@ class BucketingTestCase extends QueryTest with
BeforeAndAfterAll {
""".stripMargin).queryExecution.executedPlan
var shuffleExists = false
plan.collect {
- case s: ShuffleExchange => shuffleExists = true
+ case s: ShuffleExchangeExec => shuffleExists = true
}
assert(!shuffleExists, "shuffle should not exist on bucket tables")
}
diff --git
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingIUDTestCase.scala
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingIUDTestCase.scala
index f4d7034..c344f81 100644
---
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingIUDTestCase.scala
+++
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingIUDTestCase.scala
@@ -81,9 +81,9 @@ class DataLoadingIUDTestCase extends QueryTest with
BeforeAndAfterAll with Befor
//NA
test("IUD-01-01-01_001-001", Include) {
sql("create table T_Hive1(Active_status BOOLEAN, Item_type_cd TINYINT,
Qty_day_avg SMALLINT, Qty_total INT, Sell_price BIGINT, Sell_pricep FLOAT,
Discount_price DOUBLE , Profit DECIMAL(3,2), Item_code STRING, Item_name
VARCHAR(50), Outlet_name CHAR(100), Update_time TIMESTAMP, Create_date DATE)
row format delimited fields terminated by ',' collection items terminated by
'$'")
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv'
overwrite into table T_Hive1""").collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1_hive10.csv'
overwrite into table T_Hive1""").collect
sql("create table T_Hive6(Item_code STRING, Sub_item_cd ARRAY<string>)row
format delimited fields terminated by ',' collection items terminated by '$'")
- sql(s"""load data inpath '$resourcesPath/Data/InsertData/T_Hive1.csv'
overwrite into table T_Hive6""").collect
+ sql(s"""load data inpath '$resourcesPath/Data/InsertData/T_Hive1_hive11.csv'
overwrite into table T_Hive6""").collect
sql(s"""create table t_carbn02(Active_status String,Item_type_cd
INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep
DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name
String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY
'org.apache.carbondata.format'""").collect
sql(s"""insert into t_carbn02 select * from default.t_carbn01b limit
4""").collect
checkAnswer(s"""select count(*) from t_carbn01b""",
diff --git
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingTestCase.scala
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingTestCase.scala
index 172cb64..ff8e193 100644
---
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingTestCase.scala
+++
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingTestCase.scala
@@ -295,7 +295,7 @@ class DataLoadingTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""drop table IF EXISTS T_Carbn01""").collect
sql(s"""create table T_Carbn01(Active_status String,Item_type_cd
INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep
DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name
String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY
'org.apache.carbondata.format'""").collect
sql(s"""create table T_Hive1(Active_status String, Item_type_cd INT,
Qty_day_avg INT, Qty_total INT, Sell_price BIGINT, Sell_pricep DOUBLE,
Discount_price DOUBLE , Profit DECIMAL(3,2), Item_code STRING, Item_name
VARCHAR(50), Outlet_name CHAR(100), Update_time TIMESTAMP, Create_date
String)row format delimited fields terminated by ',' collection items
terminated by '$DOLLAR'""").collect
- sql(s"""load data INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv'
overwrite into table T_Hive1""").collect
+ sql(s"""load data INPATH '$resourcesPath/Data/InsertData/T_Hive1_hive1.csv'
overwrite into table T_Hive1""").collect
sql(s"""insert into T_Carbn01 select * from T_Hive1""").collect
checkAnswer(s"""select
active_status,item_type_cd,qty_day_avg,qty_total,sell_price,sell_pricep,discount_price,
profit,item_code,item_name from T_Carbn01 order by update_time""",
s"""select
active_status,item_type_cd,qty_day_avg,qty_total,sell_price,sell_pricep,discount_price,
profit,item_code,item_name from T_Hive1 order by update_time""",
"DataLoadingTestCase-Insert_Func_005")
@@ -347,7 +347,7 @@ class DataLoadingTestCase extends QueryTest with
BeforeAndAfterAll {
test("Insert_Func_008", Include) {
sql(s"""drop table IF EXISTS t_hive2""").collect
sql(s"""create table T_Hive2(Active_status String, Item_type_cd INT,
Qty_day_avg SMALLINT, Qty_total INT, Sell_price BIGINT, Sell_pricep DOUBLE,
Discount_price DOUBLE , Profit DECIMAL(3,2), Item_code STRING, Item_name
VARCHAR(50), Outlet_name CHAR(100), Update_time TIMESTAMP, Create_date
String,Profit_perc DECIMAL(4,3),name string)row format delimited fields
terminated by ',' collection items terminated by '$DOLLAR'""").collect
- sql(s"""load data INPATH '$resourcesPath/Data/InsertData/T_Hive2.csv'
overwrite into table T_Hive2""").collect
+ sql(s"""load data INPATH '$resourcesPath/Data/InsertData/T_Hive2_hive2.csv'
overwrite into table T_Hive2""").collect
sql(s"""create table T_Carbn01(Active_status String,Item_type_cd
INT,Qty_day_avg SMALLINT,Qty_total INT,Sell_price BIGINT,Sell_pricep
DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name
String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY
'org.apache.carbondata.format'""").collect
sql(s"""insert into T_Carbn01 select * from T_Hive2""").collect
checkAnswer(s"""select
active_status,item_type_cd,qty_day_avg,qty_total,sell_price,sell_pricep,discount_price,
profit,item_code,item_name from T_Carbn01 order by update_time""",
@@ -427,8 +427,8 @@ class DataLoadingTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""drop table IF EXISTS T_Carbn01""").collect
sql(s"""create table T_Hive4(Item_code STRING, Item_name VARCHAR(50))row
format delimited fields terminated by ',' collection items terminated by
'$DOLLAR'""").collect
sql(s"""create table T_Hive5(Item_code STRING, Profit DECIMAL(3,2))row
format delimited fields terminated by ',' collection items terminated by
'$DOLLAR'""").collect
- sql(s"""load data INPATH '$resourcesPath/Data/InsertData/T_Hive4.csv'
overwrite into table T_Hive4""").collect
- sql(s"""load data INPATH '$resourcesPath/Data/InsertData/T_Hive5.csv'
overwrite into table T_Hive5""").collect
+ sql(s"""load data INPATH '$resourcesPath/Data/InsertData/T_Hive4_hive4.csv'
overwrite into table T_Hive4""").collect
+ sql(s"""load data INPATH '$resourcesPath/Data/InsertData/T_Hive5_hive5.csv'
overwrite into table T_Hive5""").collect
sql(s"""create table T_Carbn01(Active_status String,Item_type_cd
INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep
DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name
String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY
'org.apache.carbondata.format'""").collect
sql(s"""insert into T_carbn01 select
x.Active_status,x.Item_type_cd,x.Qty_day_avg,x.Qty_total,x.Sell_price,x.Sell_pricep,x.Discount_price,z.Profit,x.Item_code,y.Item_name,x.Outlet_name,x.Update_time,x.Create_date
from T_Hive1 x,T_Hive4 y, T_Hive5 z where x.Item_code = y.Item_code and
x.Item_code = z.Item_code""").collect
checkAnswer(s"""select
active_status,item_type_cd,qty_day_avg,qty_total,sell_price,sell_pricep,discount_price,
profit,item_code,item_name from T_Carbn01 order by update_time""",
@@ -442,8 +442,8 @@ class DataLoadingTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""drop table IF EXISTS t_hive7""").collect
sql(s"""drop table IF EXISTS T_Carbn01""").collect
sql(s"""create table T_Hive7(Active_status1 BOOLEAN, Item_type_cd1 TINYINT,
Qty_day_avg1 SMALLINT, Qty_total1 INT, Sell_price1 BIGINT, Sell_pricep1 FLOAT,
Discount_price1 DOUBLE , Profit1 DECIMAL(3,2), Item_code1 STRING, Item_name1
VARCHAR(50), Outlet_name1 CHAR(100), Update_time TIMESTAMP, Create_date
DATE)row format delimited fields terminated by ',' collection items terminated
by '$DOLLAR'""").collect
- sql(s"""load data INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv'
overwrite into table T_Hive7""").collect
- sql(s"""load data INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv' into
table T_Hive7""").collect
+ sql(s"""load data INPATH '$resourcesPath/Data/InsertData/T_Hive1_hive0.csv'
overwrite into table T_Hive7""").collect
+ sql(s"""load data INPATH '$resourcesPath/Data/InsertData/T_Hive1_hive3.csv'
into table T_Hive7""").collect
sql(s"""create table T_Carbn01(Active_status String,Item_type_cd
INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep
DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name
String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY
'org.apache.carbondata.format'""").collect
sql(s"""insert into T_Carbn01 select * from T_Hive7""").collect
checkAnswer(s"""select
active_status,item_type_cd,qty_day_avg,qty_total,sell_price,sell_pricep,discount_price,
profit,item_code,item_name from T_Carbn01 order by update_time""",
@@ -484,7 +484,7 @@ class DataLoadingTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""create table T_Carbn01(Active_status String,Item_type_cd
INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep
DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name
String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY
'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv' INTO
table T_Carbn01 options ('DELIMITER'=',',
'QUOTECHAR'='\','FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date')""").collect
sql(s"""create table T_Hive_1(Active_status String,Item_type_cd
INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep
DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name
String,Outlet_name String,Update_time TIMESTAMP,Create_date String) row format
delimited fields terminated by ',' collection items terminated by
'\n'""").collect
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv' INTO
table T_Hive_1""").collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1_hive6.csv'
INTO table T_Hive_1""").collect
sql(s"""insert into T_Carbn01 select * from T_Hive_1""").collect
checkAnswer(s"""select count(*) from T_Carbn01""",
Seq(Row(20)), "DataLoadingTestCase-Insert_Func_027")
@@ -520,7 +520,7 @@ class DataLoadingTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""drop table IF EXISTS t_hive5""").collect
sql(s"""drop table IF EXISTS T_Carbn01""").collect
sql(s"""create table T_Hive5(Item_code STRING, Profit DECIMAL(3,2))row
format delimited fields terminated by ',' collection items terminated by
'$DOLLAR'""").collect
- sql(s"""load data INPATH '$resourcesPath/Data/InsertData/T_Hive5.csv'
overwrite into table T_Hive5""").collect
+ sql(s"""load data INPATH '$resourcesPath/Data/InsertData/T_Hive5_hive7.csv'
overwrite into table T_Hive5""").collect
sql(s"""create table T_Carbn01(Active_status String,Item_type_cd
INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep
DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name
String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY
'org.apache.carbondata.format'""").collect
sql(s"""insert into T_Carbn01 select * from T_Hive1 x where exists (select
* from T_Hive5 y where x.Item_code= y.Item_code) """).collect
checkAnswer(s"""select
active_status,item_type_cd,qty_day_avg,qty_total,sell_price,sell_pricep,discount_price,
profit,item_code,item_name from T_Carbn01 order by update_time""",
@@ -534,7 +534,7 @@ class DataLoadingTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""drop table if exists t_hive4""").collect
sql(s"""drop table IF EXISTS T_Carbn01""").collect
sql(s"""create table T_Hive4(Item_code STRING, Item_name VARCHAR(50))row
format delimited fields terminated by ','""").collect
- sql(s"""load data INPATH '$resourcesPath/Data/InsertData/T_Hive4.csv'
overwrite into table T_Hive4""").collect
+ sql(s"""load data INPATH '$resourcesPath/Data/InsertData/T_Hive4_hive8.csv'
overwrite into table T_Hive4""").collect
sql(s"""create table T_Carbn01(Active_status String,Item_type_cd
INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep
DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name
String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY
'org.apache.carbondata.format'""").collect
sql(s"""insert into T_Carbn01 select * from T_Hive1 a where a.Item_code in
(select b.item_code from T_Hive4 b)""").collect
checkAnswer(s"""select
active_status,item_type_cd,qty_day_avg,qty_total,sell_price,sell_pricep,discount_price,
profit,item_code,item_name from T_Carbn01 order by update_time""",
@@ -657,7 +657,7 @@ class DataLoadingTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv' INTO
table T_Carbn01 options ('DELIMITER'=',', 'QUOTECHAR'='\',
'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date')""").collect
sql(s"""drop table if exists T_Hive1""").collect
sql(s"""create table T_Hive1(Active_status String,Item_type_cd
INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep
DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name
String,Outlet_name String,Update_time TIMESTAMP,Create_date String) row format
delimited fields terminated by ',' collection items terminated by
'\n'""").collect
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv' INTO
table T_Hive1""").collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1_hive9.csv'
INTO table T_Hive1""").collect
sql(s"""Insert into T_Carbn01 select * from T_Hive1""").collect
sql(s"""delete from table T_Carbn01 where segment.id in (0)""").collect
sql(s"""select count(*) from T_Carbn01""").collect
@@ -696,7 +696,7 @@ class DataLoadingTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""create table T_Hive14(Item_code STRING, Profit DECIMAL(3,2))
partitioned by (Qty_total INT, Item_type_cd TINYINT) row format delimited
fields terminated by ',' collection items terminated by '$DOLLAR'""").collect
sql(s"""drop table IF EXISTS T_Carbn014""").collect
sql(s"""create table T_Carbn014(Item_code STRING, Profit DECIMAL(3,2),
Qty_total INT, Item_type_cd INT) STORED BY
'org.apache.carbondata.format'""").collect
- sql(s"""load data INPATH '$resourcesPath/Data/InsertData/T_Hive14.csv'
overwrite into table T_Hive14 partition(Qty_total=100,
Item_type_cd=2)""").collect
+ sql(s"""load data INPATH
'$resourcesPath/Data/InsertData/T_Hive14_hive1.csv' overwrite into table
T_Hive14 partition(Qty_total=100, Item_type_cd=2)""").collect
sql(s"""insert into T_carbn014 select * from T_Hive14 where Qty_total
=100""").collect
checkAnswer(s"""select item_code, profit from T_Carbn014 order by
item_code, profit""",
Seq(Row("BE3423ee",4.99),Row("BE3423ee",4.99),Row("SE3423ee",4.99),Row("SE3423ee",4.99),Row("SE3423ee",4.99),Row("SE3423ee",4.99)),
"DataLoadingTestCase-Insert_Func_109")
@@ -790,7 +790,7 @@ class DataLoadingTestCase extends QueryTest with
BeforeAndAfterAll {
//Check for insert into carbon table with select from Hive table where only
Measures columns are present.
test("Insert_Func_066", Include) {
sql(s"""create table Measures_Dataload_H (Item_code STRING, Qty int)row
format delimited fields terminated by ',' LINES TERMINATED BY '\n'""").collect
- sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/InsertData/vardhandaterestruct.csv' INTO TABLE
Measures_Dataload_H""").collect
+ sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/InsertData/vardhandaterestruct_hive1.csv' INTO TABLE
Measures_Dataload_H""").collect
sql(s"""create table Measures_Dataload_C (Item_code STRING, Qty int)stored
by 'org.apache.carbondata.format'""").collect
sql(s"""insert into Measures_Dataload_C select * from
Measures_Dataload_H""").collect
checkAnswer(s"""select count(*) from Measures_Dataload_C""",
@@ -819,8 +819,8 @@ class DataLoadingTestCase extends QueryTest with
BeforeAndAfterAll {
//Check for insert into carbon table with select statement having logical
operators
test("Insert_Func_043", Include) {
sql(s"""create table Logical_Dataload_H (Item_code STRING, Qty int)row
format delimited fields terminated by ',' LINES TERMINATED BY '\n'""").collect
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/Measures.csv' INTO
TABLE Logical_Dataload_H""").collect
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/Measures.csv' INTO
TABLE Logical_Dataload_H""").collect
+ sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/InsertData/Measures_hive2.csv' INTO TABLE
Logical_Dataload_H""").collect
+ sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/InsertData/Measures_hive3.csv' INTO TABLE
Logical_Dataload_H""").collect
sql(s"""create table Logical_Dataload_C (Item_code STRING, Qty int)stored
by 'org.apache.carbondata.format'""").collect
sql(s"""insert into Logical_Dataload_C select * from Logical_Dataload_H
where Item_Code != 'D' and Qty < 40""").collect
checkAnswer(s"""select count(*) from Logical_Dataload_C""",
@@ -833,7 +833,7 @@ class DataLoadingTestCase extends QueryTest with
BeforeAndAfterAll {
//Check that select query fetches the correct data after doing insert and
load .
test("Insert_Func_073", Include) {
sql(s"""create table Dataload_H (Item_code STRING, Qty int)row format
delimited fields terminated by ',' LINES TERMINATED BY '\n'""").collect
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/Measures.csv' INTO
TABLE Dataload_H""").collect
+ sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/InsertData/Measures_hive4.csv' INTO TABLE
Dataload_H""").collect
sql(s"""create table Dataload_C (Item_code STRING, Qty int)stored by
'org.apache.carbondata.format'""").collect
@@ -849,7 +849,7 @@ class DataLoadingTestCase extends QueryTest with
BeforeAndAfterAll {
test("Insert_Func_111", Include) {
sql(s"""create database insert1""").collect
sql(s"""create table insert1.DiffDB_Dataload_H(Item_code STRING, Qty
int)row format delimited fields terminated by ',' LINES TERMINATED BY
'\n'""").collect
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/Measures.csv' INTO
TABLE insert1.DiffDB_Dataload_H""").collect
+ sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/InsertData/Measures_hive5.csv' INTO TABLE
insert1.DiffDB_Dataload_H""").collect
sql(s"""create database insert2""").collect
sql(s"""create table insert2.DiffDB_Dataload_C(Item_code STRING, Qty
int)stored by 'org.apache.carbondata.format'""").collect
sql(s"""insert into insert2.DiffDB_Dataload_C select * from
insert1.DiffDB_Dataload_H""").collect
@@ -863,7 +863,7 @@ class DataLoadingTestCase extends QueryTest with
BeforeAndAfterAll {
//Check for insert into carbon table with select from Hive table where only
Dimension columns are present.
ignore("Insert_Func_065", Include) {
sql(s"""create table Dimension_Dataload_H (Item_code STRING)row format
delimited fields terminated by ',' LINES TERMINATED BY '\n'""").collect
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/Measures.csv' INTO
TABLE Dimension_Dataload_H""").collect
+ sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/InsertData/Measures_hive1.csv' INTO TABLE
Dimension_Dataload_H""").collect
sql(s"""create table Dimension_Dataload_C (Item_code STRING)stored by
'org.apache.carbondata.format'""").collect
sql(s"""insert into Dimension_Dataload_C select * from
Dimension_Dataload_H""").collect
checkAnswer(s"""select count(*) from Dimension_Dataload_C""",
@@ -1418,18 +1418,16 @@ class DataLoadingTestCase extends QueryTest with
BeforeAndAfterAll {
test("Insert_Func_023_01", Include) {
dropTable("Norecords_Dataload_C")
dropTable("Norecords_Dataload_H")
- intercept[Exception] {
- sql(s"""create table Norecords_Dataload_H (Item_code STRING, Qty
int)stored by 'org.apache.carbondata.format'""").collect
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/Measures.csv'
INTO TABLE Norecords_Dataload_H OPTIONS('DELIMITER'=',' ,
'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='Item_code,Qty')""").collect
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/Measures.csv'
INTO TABLE Norecords_Dataload_H OPTIONS('DELIMITER'=',' ,
'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='Item_code,Qty')""").collect
- sql(s"""create table Norecords_Dataload_C (Item_code STRING, Qty
int)stored by 'org.apache.carbondata.format'""").collect
- sql(s"""delete from table Norecords_Dataload_H where segment.id in
(0,1)""").collect
- sql(s"""insert into Norecords_Dataload_C select * from
Norecords_Dataload_H""").collect
- checkAnswer(s"""select count(*) from Norecords_Dataload_C""",
- Seq(Row(0)), "DataLoadingTestCase-Insert_Func_023_01")
- }
- dropTable("Norecords_Dataload_C")
- dropTable("Norecords_Dataload_H")
+ sql(s"""create table Norecords_Dataload_H (Item_code STRING, Qty
int)stored by 'org.apache.carbondata.format'""").collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/Measures.csv'
INTO TABLE Norecords_Dataload_H OPTIONS('DELIMITER'=',' ,
'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='Item_code,Qty')""").collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/Measures.csv'
INTO TABLE Norecords_Dataload_H OPTIONS('DELIMITER'=',' ,
'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='Item_code,Qty')""").collect
+ sql(s"""create table Norecords_Dataload_C (Item_code STRING, Qty
int)stored by 'org.apache.carbondata.format'""").collect
+ sql(s"""delete from table Norecords_Dataload_H where segment.id in
(0,1)""").collect
+ sql(s"""insert into Norecords_Dataload_C select * from
Norecords_Dataload_H""").collect
+ checkAnswer(s"""select count(*) from Norecords_Dataload_C""",
+ Seq(Row(0)), "DataLoadingTestCase-Insert_Func_023_01")
+ dropTable("Norecords_Dataload_C")
+ dropTable("Norecords_Dataload_H")
}
@@ -1458,7 +1456,7 @@ class DataLoadingTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""drop database if exists insertInto CASCADE""").collect
sql(s"""create database insertInto""").collect
sql(s"""create table insertInto.Norecords_Dataload_H (Item_code STRING, Qty
int)row format delimited fields terminated by ',' LINES TERMINATED BY
'\n'""").collect
- sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/InsertData/vardhandaterestruct.csv' INTO TABLE
insertInto.Norecords_Dataload_H""").collect
+ sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/InsertData/vardhandaterestruct_hive2.csv' INTO TABLE
insertInto.Norecords_Dataload_H""").collect
sql(s"""use insertInto""").collect
sql(s"""drop table if exists Norecords_Dataload_Carbon""").collect
sql(s"""create table Norecords_Dataload_Carbon (Item_code STRING, Qty
int)stored by 'org.apache.carbondata.format'""").collect
diff --git
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/GlobalSortTestCase.scala
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/GlobalSortTestCase.scala
index 36b7b6e..2c3ebca 100644
---
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/GlobalSortTestCase.scala
+++
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/GlobalSortTestCase.scala
@@ -19,19 +19,23 @@
package org.apache.carbondata.cluster.sdv.generated
import org.apache.spark.sql.common.util._
-import org.scalatest.BeforeAndAfterAll
+import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
/**
* Test Class for globalsort1TestCase to verify all scenerios
*/
-class GlobalSortTestCase extends QueryTest with BeforeAndAfterAll {
+class GlobalSortTestCase extends QueryTest with BeforeAndAfterAll with
BeforeAndAfterEach{
override def beforeAll {
sql(s"""drop table if exists uniqdata11""").collect
sql(s"""drop table if exists uniqdataquery1""").collect
}
+ override def beforeEach(): Unit = {
+ sql(s"""drop table if exists uniqdata11""").collect
+ sql(s"""drop table if exists uniqdataquery1""").collect
+ }
//Carbon-Loading-Optimizations-Global-Sort-01-01-01
test("Carbon-Loading-Optimizations-Global-Sort-01-01-01", Include) {
@@ -234,7 +238,7 @@ class GlobalSortTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""drop table if exists uniqdata_h""").collect
sql(s"""drop table if exists uniqdata_c""").collect
sql(s"""CREATE TABLE uniqdata_h (CUST_ID int,CUST_NAME
String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1
bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2
decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1
int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ','""").collect
- sql(s"""load data inpath '$resourcesPath/Data/uniqdata/2000_UniqData.csv'
into table uniqdata_h""").collect
+ sql(s"""load data inpath
'$resourcesPath/Data/uniqdata/2000_UniqData_hive2.csv' into table
uniqdata_h""").collect
sql(s"""CREATE TABLE uniqdata_c (CUST_ID int,CUST_NAME
String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1
bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2
decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1
int) STORED BY 'carbondata'""").collect
sql(s"""insert into uniqdata_c select * from uniqdata_h""").collect
@@ -618,4 +622,9 @@ class GlobalSortTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""drop table if exists uniqdata11""").collect
sql(s"""drop table if exists uniqdataquery1""").collect
}
+
+ override def afterEach: Unit = {
+ sql(s"""drop table if exists uniqdata11""").collect
+ sql(s"""drop table if exists uniqdataquery1""").collect
+ }
}
\ No newline at end of file
diff --git
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesBVATestCase.scala
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesBVATestCase.scala
index 11c705d..f0c5599 100644
---
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesBVATestCase.scala
+++
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesBVATestCase.scala
@@ -54,7 +54,7 @@ class QueriesBVATestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/Test_Data1.csv' INTO table
Test_Boundary OPTIONS('DELIMITER'=',','QUOTECHAR'='',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='c1_int,c2_Bigint,c3_Decimal,c4_double,c5_string,c6_Timestamp,c7_Datatype_Desc')""").collect
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/Test_Data1.csv' INTO table
Test_Boundary_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/Test_Data1_hive1.csv' INTO
table Test_Boundary_hive """).collect
}
@@ -78,7 +78,7 @@ class QueriesBVATestCase extends QueryTest with
BeforeAndAfterAll {
sql(s""" LOAD DATA INPATH '$resourcesPath/Data/Test_Data1.csv' INTO table
Test_Boundary1 OPTIONS('DELIMITER'=',','QUOTECHAR'='',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='c1_int,c2_Bigint,c3_Decimal,c4_double,c5_string,c6_Timestamp,c7_Datatype_Desc')""").collect
- sql(s""" LOAD DATA INPATH '$resourcesPath/Data/Test_Data1.csv' INTO table
Test_Boundary1_hive """).collect
+ sql(s""" LOAD DATA INPATH '$resourcesPath/Data/Test_Data1_hive2.csv' INTO
table Test_Boundary1_hive """).collect
}
diff --git
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesBasicTestCase.scala
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesBasicTestCase.scala
index 5b3df0a..ed58e70 100644
---
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesBasicTestCase.scala
+++
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesBasicTestCase.scala
@@ -52,7 +52,7 @@ class QueriesBasicTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv'
into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv'
into table uniqdata_hive """).collect
+ sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/uniqdata/2000_UniqData_hive1.csv' into table uniqdata_hive
""").collect
}
@@ -71,7 +71,7 @@ class QueriesBasicTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/4000_UniqData.csv'
into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/4000_UniqData.csv'
into table uniqdata_hive """).collect
+ sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/uniqdata/4000_UniqData_hive1.csv' into table uniqdata_hive
""").collect
}
@@ -89,7 +89,7 @@ class QueriesBasicTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/6000_UniqData.csv'
into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/6000_UniqData.csv'
into table uniqdata_hive """).collect
+ sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/uniqdata/6000_UniqData_hive1.csv' into table uniqdata_hive
""").collect
}
@@ -100,7 +100,7 @@ class QueriesBasicTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv'
into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv'
into table uniqdata_hive """).collect
+ sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/uniqdata/7000_UniqData_hive1.csv' into table uniqdata_hive
""").collect
}
@@ -111,7 +111,7 @@ class QueriesBasicTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/uniqdata/3000_1_UniqData.csv' into table uniqdata
OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
- sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/uniqdata/3000_1_UniqData.csv' into table uniqdata_hive
""").collect
+ sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/uniqdata/3000_1_UniqData_hive1.csv' into table
uniqdata_hive """).collect
}
test("UNIQDATA_DeleteLoad1_3", Include) {
@@ -5304,7 +5304,7 @@ class QueriesBasicTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/HiveData/100_hive_test.csv'
INTO TABLE Carbon_automation
OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/HiveData/100_hive_test.csv'
INTO TABLE Carbon_automation_hive """).collect
+ sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/HiveData/100_hive_test_hive1.csv' INTO TABLE
Carbon_automation_hive """).collect
}
@@ -13093,7 +13093,7 @@ class QueriesBasicTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/100_VMALL_1_Day_DATA_2015-09-15.csv' INTO table
pushupfilter options('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='imei,uuid,MAC,device_color,device_shell_color,device_name,product_name,ram,rom,cpu_clock,series,check_date,check_year,check_month,check_day,check_hour,bom,inside_name,packing_date,packing_year,packing_month,packing_day,packing_hour,customer_name,deliveryAreaId,deliveryCountry,deliveryProvince,de
[...]
- sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/100_VMALL_1_Day_DATA_2015-09-15.csv' INTO table
pushupfilter_hive """).collect
+ sql(s"""insert overwrite table pushupfilter_hive select * from
pushupfilter""").collect
}
diff --git
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesCompactionTestCase.scala
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesCompactionTestCase.scala
index 5fdc098..cb275d6 100644
---
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesCompactionTestCase.scala
+++
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesCompactionTestCase.scala
@@ -47,7 +47,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive1.csv'
INTO table Comp_DICTIONARY_INCLUDE_hive """).collect
}
@@ -58,7 +58,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive2.csv'
INTO table Comp_DICTIONARY_INCLUDE_hive """).collect
}
@@ -69,7 +69,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive3.csv'
INTO table Comp_DICTIONARY_INCLUDE_hive """).collect
}
@@ -80,7 +80,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE',
'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive4.csv'
INTO table Comp_DICTIONARY_INCLUDE_hive """).collect
}
@@ -91,7 +91,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive5.csv'
INTO table Comp_DICTIONARY_INCLUDE_hive """).collect
}
@@ -102,7 +102,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE options ('DELIMITER'=',',
'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE',
'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive6.csv'
INTO table Comp_DICTIONARY_INCLUDE_hive """).collect
}
@@ -113,7 +113,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE',
'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive7.csv'
INTO table Comp_DICTIONARY_INCLUDE_hive """).collect
}
@@ -124,7 +124,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE options ('DELIMITER'=',',
'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE',
'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive8.csv'
INTO table Comp_DICTIONARY_INCLUDE_hive """).collect
}
@@ -135,7 +135,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE options ('DELIMITER'=',',
'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE',
'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive9.csv'
INTO table Comp_DICTIONARY_INCLUDE_hive """).collect
}
@@ -146,7 +146,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE options ('DELIMITER'=',',
'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE',
'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive10.csv'
INTO table Comp_DICTIONARY_INCLUDE_hive """).collect
}
@@ -157,7 +157,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE options ('DELIMITER'=',',
'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE',
'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive11.csv'
INTO table Comp_DICTIONARY_INCLUDE_hive """).collect
}
@@ -168,7 +168,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE options ('DELIMITER'=',',
'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE',
'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive12.csv'
INTO table Comp_DICTIONARY_INCLUDE_hive """).collect
}
@@ -179,7 +179,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE options ('DELIMITER'=',',
'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE',
'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive13.csv'
INTO table Comp_DICTIONARY_INCLUDE_hive """).collect
}
@@ -190,7 +190,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE options ('DELIMITER'=',',
'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE',
'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive14.csv'
INTO table Comp_DICTIONARY_INCLUDE_hive """).collect
}
@@ -201,7 +201,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE options ('DELIMITER'=',',
'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,c
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive15.csv'
INTO table Comp_DICTIONARY_INCLUDE_hive """).collect
}
@@ -212,7 +212,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE options ('DELIMITER'=',',
'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE',
'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive16.csv'
INTO table Comp_DICTIONARY_INCLUDE_hive """).collect
}
@@ -223,7 +223,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE options ('DELIMITER'=',',
'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE',
'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive17.csv'
INTO table Comp_DICTIONARY_INCLUDE_hive """).collect
}
@@ -234,7 +234,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE options ('DELIMITER'=',',
'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE',
'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive18.csv'
INTO table Comp_DICTIONARY_INCLUDE_hive """).collect
}
@@ -245,7 +245,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE options ('DELIMITER'=',',
'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE',
'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive19.csv'
INTO table Comp_DICTIONARY_INCLUDE_hive """).collect
}
@@ -256,7 +256,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE options ('DELIMITER'=',',
'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE',
'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_INCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive20.csv'
INTO table Comp_DICTIONARY_INCLUDE_hive """).collect
}
@@ -287,7 +287,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive21.csv'
INTO table Comp_DICTIONARY_EXCLUDE_hive """).collect
}
@@ -298,7 +298,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE',
'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive22.csv'
INTO table Comp_DICTIONARY_EXCLUDE_hive """).collect
}
@@ -309,7 +309,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive23.csv'
INTO table Comp_DICTIONARY_EXCLUDE_hive """).collect
}
@@ -320,7 +320,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive24.csv'
INTO table Comp_DICTIONARY_EXCLUDE_hive """).collect
}
@@ -331,7 +331,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE',
'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive25.csv'
INTO table Comp_DICTIONARY_EXCLUDE_hive """).collect
}
@@ -342,7 +342,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive26.csv'
INTO table Comp_DICTIONARY_EXCLUDE_hive """).collect
}
@@ -353,7 +353,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive27.csv'
INTO table Comp_DICTIONARY_EXCLUDE_hive """).collect
}
@@ -364,7 +364,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE',
'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive28.csv'
INTO table Comp_DICTIONARY_EXCLUDE_hive """).collect
}
@@ -375,7 +375,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE',
'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive29.csv'
INTO table Comp_DICTIONARY_EXCLUDE_hive """).collect
}
@@ -386,7 +386,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive30.csv'
INTO table Comp_DICTIONARY_EXCLUDE_hive """).collect
}
@@ -397,7 +397,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE',
'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive31.csv'
INTO table Comp_DICTIONARY_EXCLUDE_hive """).collect
}
@@ -408,7 +408,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive32.csv'
INTO table Comp_DICTIONARY_EXCLUDE_hive """).collect
}
@@ -419,7 +419,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE',
'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive33.csv'
INTO table Comp_DICTIONARY_EXCLUDE_hive """).collect
}
@@ -430,7 +430,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive34.csv'
INTO table Comp_DICTIONARY_EXCLUDE_hive """).collect
}
@@ -441,7 +441,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE',
'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive35.csv'
INTO table Comp_DICTIONARY_EXCLUDE_hive """).collect
}
@@ -452,7 +452,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive36.csv'
INTO table Comp_DICTIONARY_EXCLUDE_hive """).collect
}
@@ -463,7 +463,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE',
'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive37.csv'
INTO table Comp_DICTIONARY_EXCLUDE_hive """).collect
}
@@ -474,7 +474,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive38.csv'
INTO table Comp_DICTIONARY_EXCLUDE_hive """).collect
}
@@ -485,7 +485,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE',
'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive39.csv'
INTO table Comp_DICTIONARY_EXCLUDE_hive """).collect
}
@@ -496,7 +496,7 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table Comp_DICTIONARY_EXCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive40.csv'
INTO table Comp_DICTIONARY_EXCLUDE_hive """).collect
}
@@ -3265,8 +3265,8 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
//Comp_DICTIONARY_INCLUDE_344
test("Comp_DICTIONARY_INCLUDE_344", Include) {
- checkAnswer(s"""select
b.contractNumber,b.Latest_DAY,b.gamePointId,b.productionDate,b.deviceInformationId,b.IMEI
from Comp_DICTIONARY_INCLUDE a join Comp_DICTIONARY_INCLUDE b on
a.Latest_Day=b.Latest_Day""",
- s"""select
b.contractNumber,b.Latest_DAY,b.gamePointId,b.productionDate,b.deviceInformationId,b.IMEI
from Comp_DICTIONARY_INCLUDE_hive a join Comp_DICTIONARY_INCLUDE_hive b on
a.Latest_Day=b.Latest_Day""",
"QueriesCompactionTestCase_DICTIONARY_INCLUDE_344")
+ checkAnswer(s"""select
b.contractNumber,b.Latest_DAY,b.gamePointId,b.productionDate,b.deviceInformationId,b.IMEI
from Comp_DICTIONARY_INCLUDE a join Comp_DICTIONARY_INCLUDE b on
a.Latest_Day=b.Latest_Day limit 5""",
+ s"""select
b.contractNumber,b.Latest_DAY,b.gamePointId,b.productionDate,b.deviceInformationId,b.IMEI
from Comp_DICTIONARY_INCLUDE_hive a join Comp_DICTIONARY_INCLUDE_hive b on
a.Latest_Day=b.Latest_Day limit 5""",
"QueriesCompactionTestCase_DICTIONARY_INCLUDE_344")
}
@@ -3274,8 +3274,8 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
//Comp_DICTIONARY_INCLUDE_345
test("Comp_DICTIONARY_INCLUDE_345", Include) {
- checkAnswer(s"""select
b.contractNumber,b.Latest_DAY,b.gamePointId,b.productionDate,b.deviceInformationId,b.IMEI
from Comp_DICTIONARY_INCLUDE a join Comp_DICTIONARY_INCLUDE b on
a.contractnumber=b.contractnumber""",
- s"""select
b.contractNumber,b.Latest_DAY,b.gamePointId,b.productionDate,b.deviceInformationId,b.IMEI
from Comp_DICTIONARY_INCLUDE_hive a join Comp_DICTIONARY_INCLUDE_hive b on
a.contractnumber=b.contractnumber""",
"QueriesCompactionTestCase_DICTIONARY_INCLUDE_345")
+ checkAnswer(s"""select
b.contractNumber,b.Latest_DAY,b.gamePointId,b.productionDate,b.deviceInformationId,b.IMEI
from Comp_DICTIONARY_INCLUDE a join Comp_DICTIONARY_INCLUDE b on
a.contractnumber=b.contractnumber limit 5""",
+ s"""select
b.contractNumber,b.Latest_DAY,b.gamePointId,b.productionDate,b.deviceInformationId,b.IMEI
from Comp_DICTIONARY_INCLUDE_hive a join Comp_DICTIONARY_INCLUDE_hive b on
a.contractnumber=b.contractnumber limit 5""",
"QueriesCompactionTestCase_DICTIONARY_INCLUDE_345")
}
@@ -3283,8 +3283,8 @@ class QueriesCompactionTestCase extends QueryTest with
BeforeAndAfterAll {
//Comp_DICTIONARY_INCLUDE_346
test("Comp_DICTIONARY_INCLUDE_346", Include) {
- checkAnswer(s"""select count( contractNumber ),sum( contractNumber
),count(distinct contractNumber ),avg( contractNumber ),max( contractNumber
),min( contractNumber ),1 from Comp_DICTIONARY_INCLUDE""",
- s"""select count( contractNumber ),sum( contractNumber ),count(distinct
contractNumber ),avg( contractNumber ),max( contractNumber ),min(
contractNumber ),1 from Comp_DICTIONARY_INCLUDE_hive""",
"QueriesCompactionTestCase_DICTIONARY_INCLUDE_346")
+ checkAnswer(s"""select count( contractNumber ),sum( contractNumber
),count(distinct contractNumber ),avg( contractNumber ),max( contractNumber
),min( contractNumber ),1 from Comp_DICTIONARY_INCLUDE limit 5""",
+ s"""select count( contractNumber ),sum( contractNumber ),count(distinct
contractNumber ),avg( contractNumber ),max( contractNumber ),min(
contractNumber ),1 from Comp_DICTIONARY_INCLUDE_hive limit 5""",
"QueriesCompactionTestCase_DICTIONARY_INCLUDE_346")
}
diff --git
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesExcludeDictionaryTestCase.scala
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesExcludeDictionaryTestCase.scala
index fcd20fd..49d5b3e 100644
---
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesExcludeDictionaryTestCase.scala
+++
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesExcludeDictionaryTestCase.scala
@@ -54,7 +54,7 @@ class QueriesExcludeDictionaryTestCase extends QueryTest with
BeforeAndAfterAll
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table TABLE_DICTIONARY_EXCLUDE options ('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table TABLE_DICTIONARY_EXCLUDE1_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive41.csv'
INTO table TABLE_DICTIONARY_EXCLUDE1_hive """).collect
}
@@ -3892,7 +3892,7 @@ class QueriesExcludeDictionaryTestCase extends QueryTest
with BeforeAndAfterAll
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/SEQ500/seq_500Records.csv'
into table smart_500_DE options('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='SID,PROBEID,INTERFACEID,GROUPID,GGSN_ID,SGSN_ID,dummy,SESSION_INDICATOR,BEGIN_TIME,BEGIN_TIME_MSEL,END_TIME,END_TIME_MSEL,PROT_CATEGORY,PROT_TYPE,L7_CARRIER_PROT,SUB_PROT_TYPE,MSISDN,IMSI,IMEI,ENCRYPT_VERSION,ROAMING_TYPE,ROAM_DIRECTION,MS_IP,SERVER_IP,MS_PORT,APN,SGSN_SIG_IP,GGSN_USER_IP,SGSN_USER_IP,MCC
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/SEQ500/seq_500Records.csv'
into table smart_500_DE_hive """).collect
+ sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/SEQ500/seq_500Records_hive1.csv' into table
smart_500_DE_hive """).collect
}
diff --git
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesIncludeDictionaryTestCase.scala
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesIncludeDictionaryTestCase.scala
index 769911c..dad731f 100644
---
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesIncludeDictionaryTestCase.scala
+++
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesIncludeDictionaryTestCase.scala
@@ -55,7 +55,7 @@ class QueriesIncludeDictionaryTestCase extends QueryTest with
BeforeAndAfterAll
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table TABLE_DICTIONARY_INCLUDE options ('DELIMITER'=',',
'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20.csv' INTO
table TABLE_DICTIONARY_INCLUDE_hive """).collect
+ sql(s"""LOAD DATA INPATH '$resourcesPath/Data/100_olap_C20_hive42.csv'
INTO table TABLE_DICTIONARY_INCLUDE_hive """).collect
}
@@ -3883,7 +3883,7 @@ class QueriesIncludeDictionaryTestCase extends QueryTest
with BeforeAndAfterAll
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/SEQ500/seq_500Records.csv'
into table smart_500_DINC options('DELIMITER'=',', 'QUOTECHAR'='"',
'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='SID,PROBEID,INTERFACEID,GROUPID,GGSN_ID,SGSN_ID,dummy,SESSION_INDICATOR,BEGIN_TIME,BEGIN_TIME_MSEL,END_TIME,END_TIME_MSEL,PROT_CATEGORY,PROT_TYPE,L7_CARRIER_PROT,SUB_PROT_TYPE,MSISDN,IMSI,IMEI,ENCRYPT_VERSION,ROAMING_TYPE,ROAM_DIRECTION,MS_IP,SERVER_IP,MS_PORT,APN,SGSN_SIG_IP,GGSN_USER_IP,SGSN_USER_IP,M
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/SEQ500/seq_500Records.csv'
into table smart_500_DINC_hive """).collect
+ sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/SEQ500/seq_500Records_hive2.csv' into table
smart_500_DINC_hive """).collect
}
diff --git
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesNormalTestCase.scala
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesNormalTestCase.scala
index 138dc56..0411dcb 100644
---
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesNormalTestCase.scala
+++
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesNormalTestCase.scala
@@ -287,7 +287,7 @@ class QueriesNormalTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""CREATE table cmb (Cust_UID String,year String, month String,
companyNumber String, familyadNumber String, companyAddress String, company
String, occupation String, certicardValidTime String, race String,
CerticardCity String, birthday String, VIPLevel String, ageRange String,
familyaddress String, dimension16 String, SubsidaryBank String,
AccountCreationTime String, dimension19 String, dimension20 String,
DemandDeposits double, TimeDeposits double, financial double, Treasury [...]
- sql(s"""CREATE table cmb_hive (Cust_UID String,year String, month String,
companyAddress String,companyNumber String,company String,occupation String,
certicardValidTime String,race String, CerticardCity String,birthday String,
VIPLevel String, ageRange String, familyaddress String,familyadNumber String,
dimension16 String, SubsidaryBank String, AccountCreationTime String,
dimension19 String, dimension20 String, DemandDeposits double, TimeDeposits
double, financial double, TreasuryBo [...]
+ sql(s"""CREATE table cmb_hive (Cust_UID String,year String, month String,
companyNumber String, familyadNumber String, companyAddress String, company
String, occupation String, certicardValidTime String, race String,
CerticardCity String, birthday String, VIPLevel String, ageRange String,
familyaddress String, dimension16 String, SubsidaryBank String,
AccountCreationTime String, dimension19 String, dimension20 String,
DemandDeposits double, TimeDeposits double, financial double, Tre [...]
}
@@ -297,7 +297,7 @@ class QueriesNormalTestCase extends QueryTest with
BeforeAndAfterAll {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/cmb/data.csv' INTO table
cmb OPTIONS ('DELIMITER'=',' ,
'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='Cust_UID,year,month,companyAddress,companyNumber,company,occupation,certicardValidTime,race,CerticardCity,birthday,VIPLevel,ageRange,familyaddress,familyadNumber,dimension16,SubsidaryBank,AccountCreationTime,dimension19,dimension20,DemandDeposits,TimeDeposits,financial,TreasuryBonds,fund,incomeOneyear,outcomeOneyear,insuranc
[...]
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/cmb/data.csv' INTO table
cmb_hive """).collect
+ sql(s"""insert overwrite table cmb_hive select * from cmb""").collect
}
diff --git
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesRangeFilterTestCase.scala
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesRangeFilterTestCase.scala
index 4b99dbf..bac4697 100644
---
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesRangeFilterTestCase.scala
+++
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesRangeFilterTestCase.scala
@@ -49,7 +49,7 @@ test("Range_Filter_01_1", Include) {
sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/RangeFilter/rangefilterdata.csv' INTO TABLE
NO_DICTIONARY_CARBON OPTIONS('DELIMITER'= ',', 'QUOTECHAR'='"',
'FILEHEADER'='empno,empname,designation,doj,workgroupcategory,workgroupcategoryname,deptno,deptname,projectcode,projectjoindate,projectenddate,attendance,utilization,salary')""").collect
- sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/RangeFilter/rangefilterdata.csv' INTO TABLE
NO_DICTIONARY_CARBON_hive """).collect
+ sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/RangeFilter/rangefilterdata_hive1.csv' INTO TABLE
NO_DICTIONARY_CARBON_hive """).collect
sql(s"""CREATE TABLE NO_DICTIONARY_CARBON_6 (empno string, doj Timestamp,
workgroupcategory Int, empname String,workgroupcategoryname String, deptno Int,
deptname String, projectcode Int, projectjoindate Timestamp, projectenddate
Timestamp, designation String,attendance Int,utilization Int,salary Int) STORED
BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_EXCLUDE'='empno,
empname,designation')""").collect
@@ -57,7 +57,7 @@ test("Range_Filter_01_1", Include) {
sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/RangeFilter/rangefilterdata.csv' INTO TABLE
NO_DICTIONARY_CARBON_6 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'='"',
'FILEHEADER'='empno,empname,designation,doj,workgroupcategory,workgroupcategoryname,deptno,deptname,projectcode,projectjoindate,projectenddate,attendance,utilization,salary')""").collect
- sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/RangeFilter/rangefilterdata.csv' INTO TABLE
NO_DICTIONARY_CARBON_6_hive """).collect
+ sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/RangeFilter/rangefilterdata_hive2.csv' INTO TABLE
NO_DICTIONARY_CARBON_6_hive """).collect
sql(s"""CREATE TABLE DICTIONARY_CARBON_6 (empno string, doj Timestamp,
workgroupcategory Int, empname String,workgroupcategoryname String, deptno Int,
deptname String, projectcode Int, projectjoindate timestamp,projectenddate
Timestamp, designation String,attendance Int,utilization Int,salary Int) STORED
BY 'org.apache.carbondata.format'
TBLPROPERTIES('DICTIONARY_EXCLUDE'='empname,designation')""").collect
@@ -65,7 +65,7 @@ test("Range_Filter_01_1", Include) {
sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/RangeFilter/rangefilterdata.csv' INTO TABLE
DICTIONARY_CARBON_6 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'='"',
'FILEHEADER'='empno,empname,designation,doj,workgroupcategory,workgroupcategoryname,deptno,deptname,projectcode,projectjoindate,projectenddate,attendance,utilization,salary')""").collect
- sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/RangeFilter/rangefilterdata.csv' INTO TABLE
DICTIONARY_CARBON_6_hive """).collect
+ sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/RangeFilter/rangefilterdata_hive3.csv' INTO TABLE
DICTIONARY_CARBON_6_hive """).collect
sql(s"""CREATE TABLE NO_DICTIONARY_CARBON_7 (empno string, doj Timestamp,
workgroupcategory Int, empname String,workgroupcategoryname String, deptno Int,
deptname String, projectcode Int, projectjoindate Timestamp, projectenddate
Timestamp, designation String,attendance Int,utilization Int,salary Int) STORED
BY 'org.apache.carbondata.format'
TBLPROPERTIES('DICTIONARY_EXCLUDE'='empno,empname,designation')""").collect
@@ -73,7 +73,7 @@ test("Range_Filter_01_1", Include) {
sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/RangeFilter/rangefilterdata.csv' INTO TABLE
NO_DICTIONARY_CARBON_7 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"' ,
'FILEHEADER'='empno,empname,designation,doj,workgroupcategory,workgroupcategoryname,deptno,deptname,projectcode,projectjoindate,projectenddate,attendance,utilization,salary')""").collect
- sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/RangeFilter/rangefilterdata.csv' INTO TABLE
NO_DICTIONARY_CARBON_7_hive """).collect
+ sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/RangeFilter/rangefilterdata_hive4.csv' INTO TABLE
NO_DICTIONARY_CARBON_7_hive """).collect
sql(s"""CREATE TABLE NO_DICTIONARY_CARBON_8 (empno string, doj Timestamp,
workgroupcategory Int, empname String,workgroupcategoryname String, deptno Int,
deptname String, projectcode Int, projectjoindate Timestamp, projectenddate
Timestamp, designation String,attendance Int,utilization Int,salary Int) STORED
BY 'org.apache.carbondata.format'
TBLPROPERTIES('DICTIONARY_EXCLUDE'='empno,empname,designation')""").collect
@@ -81,7 +81,7 @@ test("Range_Filter_01_1", Include) {
sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/RangeFilter/rangefilterdata.csv' INTO TABLE
NO_DICTIONARY_CARBON_8 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"' ,
'FILEHEADER'='empno,empname,designation,doj,workgroupcategory,workgroupcategoryname,deptno,deptname,projectcode,projectjoindate,projectenddate,attendance,utilization,salary')""").collect
- sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/RangeFilter/rangefilterdata.csv' INTO TABLE
NO_DICTIONARY_CARBON_8_hive """).collect
+ sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/RangeFilter/rangefilterdata_hive5.csv' INTO TABLE
NO_DICTIONARY_CARBON_8_hive """).collect
sql(s"""CREATE TABLE if not exists directDictionaryTable (empno int,doj
Timestamp, salary int) STORED BY 'org.apache.carbondata.format'""").collect
@@ -89,7 +89,7 @@ test("Range_Filter_01_1", Include) {
sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/RangeFilter/rangedatasample.csv' INTO TABLE
directDictionaryTable OPTIONS ('DELIMITER'= ',', 'QUOTECHAR'=
'"','FILEHEADER'='empno,doj,salary')""").collect
- sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/RangeFilter/rangedatasample.csv' INTO TABLE
directDictionaryTable_hive """).collect
+ sql(s"""LOAD DATA INPATH
'$resourcesPath/Data/RangeFilter/rangedatasample_hive1.csv' INTO TABLE
directDictionaryTable_hive """).collect
}
diff --git
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesSparkBlockDistTestCase.scala
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesSparkBlockDistTestCase.scala
index 13c4918..a5004eb 100644
---
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesSparkBlockDistTestCase.scala
+++
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesSparkBlockDistTestCase.scala
@@ -34,10 +34,10 @@ class QueriesSparkBlockDistTestCase extends QueryTest with
BeforeAndAfterAll {
sql("drop table if exists flow_carbon_256b_hive")
sql(s"""CREATE TABLE IF NOT EXISTS flow_carbon_256b ( txn_dte String,
dt String, txn_bk String, txn_br String, own_bk String, own_br
String, opp_bk String, bus_opr_cde String, opt_prd_cde String, cus_no
String, cus_ac String, opp_ac_nme String, opp_ac String, bv_no
String, aco_ac String, ac_dte String, txn_cnt int,
jrn_par int, mfm_jrn_no String, cbn_jrn_no String, ibs_jrn_no
String, vch_no [...]
- sql(s"""CREATE TABLE IF NOT EXISTS flow_carbon_256b_hive ( txn_dte
String, dt String, txn_bk String, txn_br String, own_bk String,
own_br String, opp_bk String, bus_opr_cde String, opt_prd_cde String,
cus_no String, cus_ac String, opp_ac_nme String, opp_ac String,
bv_no String, aco_ac String, ac_dte String, txn_cnt int,
jrn_par int, mfm_jrn_no String, cbn_jrn_no String,
ibs_jrn_no String, vc [...]
+ sql(s"""CREATE TABLE IF NOT EXISTS flow_carbon_256b_hive ( txn_dte
String, dt String, txn_bk String, txn_br String, own_bk String,
own_br String, opp_bk String, bus_opr_cde String, opt_prd_cde String,
cus_no String, cus_ac String, opp_ac_nme String, opp_ac String,
bv_no String, aco_ac String, ac_dte String, txn_cnt int,
jrn_par int, mfm_jrn_no String, cbn_jrn_no String,
ibs_jrn_no String, vc [...]
sql(s"""LOAD DATA inpath '$resourcesPath/Data/cmb/data.csv' into table
flow_carbon_256b options('BAD_RECORDS_ACTION'='FORCE','DELIMITER'=',',
'QUOTECHAR'='"','FILEHEADER'='txn_dte,dt,txn_bk,txn_br,own_bk,own_br,opp_bk,bus_opr_cde,opt_prd_cde,cus_no,cus_ac,opp_ac_nme,opp_ac,bv_no,aco_ac,ac_dte,txn_cnt,jrn_par,mfm_jrn_no,cbn_jrn_no,ibs_jrn_no,vch_no,vch_seq,srv_cde,bus_cd_no,id_flg,bv_cde,txn_time,txn_tlr,ety_tlr,ety_bk,ety_br,bus_pss_no,chk_flg,chk_tlr,chk_jrn_no,bus_sys_no,txn_sub_
[...]
- sql(s"""LOAD DATA inpath '$resourcesPath/Data/cmb/data.csv' into table
flow_carbon_256b_hive""").collect
+ sql(s"""insert overwrite table flow_carbon_256b_hive select * from
flow_carbon_256b""").collect
}
diff --git
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/TableCommentAlterTableTestCase.scala
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/TableCommentAlterTableTestCase.scala
index 8bd53ae..26a6882 100644
---
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/TableCommentAlterTableTestCase.scala
+++
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/TableCommentAlterTableTestCase.scala
@@ -74,10 +74,9 @@ class TableCommentAlterTableTestCase extends QueryTest with
BeforeAndAfterAll {
}
//Check create table with comment after stored by clause
+ //This behavior is okay in Spark-2.3 but may fail with earlier spark
versions.
test("TableCommentAlterTable_001_06", Include) {
- intercept[AnalysisException] {
- sql("create table table_comment_afterstoredby (id int, name string)
STORED BY 'carbondata' comment 'This is table comment'")
- }
+ sql("create table table_comment_afterstoredby (id int, name string) STORED
BY 'carbondata' comment 'This is table comment'")
}
//Check the comment by "describe formatted"
diff --git
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/suite/SDVSuites.scala
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/suite/SDVSuites.scala
index 7448e95..7e25dcd 100644
---
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/suite/SDVSuites.scala
+++
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/suite/SDVSuites.scala
@@ -29,44 +29,44 @@ import
org.apache.carbondata.spark.testsuite.localdictionary.LoadTableWithLocalD
*/
class SDVSuites extends Suites with BeforeAndAfterAll {
- val suites = new AlterTableTestCase ::
- new BadRecordTestCase ::
- new BatchSortLoad1TestCase ::
- new BatchSortLoad2TestCase ::
- new BatchSortQueryTestCase ::
- new ColumndictTestCase ::
- new CreateTableAsSelectTestCase ::
- new DataLoadingTestCase ::
- new DataLoadingV3TestCase ::
- new InvertedindexTestCase ::
- new OffheapQuery1TestCase ::
- new OffheapQuery2TestCase ::
- new OffheapSort1TestCase ::
- new OffheapSort2TestCase ::
- new PartitionTestCase ::
- new QueriesBasicTestCase ::
- new QueriesBVATestCase ::
- new QueriesCompactionTestCase ::
- new QueriesExcludeDictionaryTestCase ::
- new QueriesIncludeDictionaryTestCase ::
- new QueriesNormalTestCase ::
- new QueriesRangeFilterTestCase ::
- new QueriesSparkBlockDistTestCase ::
- new ShowLoadsTestCase ::
- new SinglepassTestCase ::
- new SortColumnTestCase ::
- new TableCommentAlterTableTestCase ::
- new TimestamptypesTestCase ::
- new V3offheapvectorTestCase ::
- new StandardPartitionTestCase ::
- new Vector1TestCase ::
- new Vector2TestCase ::
- new PreAggregateTestCase ::
- new TimeSeriesPreAggregateTestCase ::
- new TestPartitionWithGlobalSort ::
- new PartitionWithPreAggregateTestCase ::
- new CreateTableWithLocalDictionaryTestCase ::
- new LoadTableWithLocalDictionaryTestCase :: Nil
+ val suites = new AlterTableTestCase ::
+ new BadRecordTestCase ::
+ new BatchSortLoad1TestCase ::
+ new BatchSortLoad2TestCase ::
+ new BatchSortQueryTestCase ::
+ new ColumndictTestCase ::
+ new CreateTableAsSelectTestCase ::
+ new DataLoadingTestCase ::
+ new DataLoadingV3TestCase ::
+ new InvertedindexTestCase ::
+ new OffheapQuery1TestCase ::
+ new OffheapQuery2TestCase ::
+ new OffheapSort1TestCase ::
+ new OffheapSort2TestCase ::
+ new PartitionTestCase ::
+ new QueriesBasicTestCase ::
+ new QueriesBVATestCase ::
+ new QueriesCompactionTestCase ::
+ new QueriesExcludeDictionaryTestCase ::
+ new QueriesIncludeDictionaryTestCase ::
+ new QueriesNormalTestCase ::
+ new QueriesRangeFilterTestCase ::
+ new QueriesSparkBlockDistTestCase ::
+ new ShowLoadsTestCase ::
+ new SinglepassTestCase ::
+ new SortColumnTestCase ::
+ new TableCommentAlterTableTestCase ::
+ new TimestamptypesTestCase ::
+ new V3offheapvectorTestCase ::
+ new StandardPartitionTestCase ::
+ new Vector1TestCase ::
+ new Vector2TestCase ::
+ new PreAggregateTestCase ::
+ new TimeSeriesPreAggregateTestCase ::
+ new TestPartitionWithGlobalSort ::
+ new PartitionWithPreAggregateTestCase ::
+ new CreateTableWithLocalDictionaryTestCase ::
+ new LoadTableWithLocalDictionaryTestCase :: Nil
override val nestedSuites = suites.toIndexedSeq
@@ -82,17 +82,17 @@ class SDVSuites extends Suites with BeforeAndAfterAll {
*/
class SDVSuites1 extends Suites with BeforeAndAfterAll {
- val suites = new BadRecordTestCase ::
- new ComplexDataTypeTestCase ::
- new BatchSortLoad1TestCase ::
- new BatchSortQueryTestCase ::
- new DataLoadingTestCase ::
- new OffheapSort2TestCase ::
- new PartitionTestCase ::
- new QueriesBasicTestCase ::
- new BatchSortLoad3TestCase ::
- new GlobalSortTestCase ::
- new MergeIndexTestCase :: Nil
+ val suites = new BadRecordTestCase ::
+ new ComplexDataTypeTestCase ::
+ new BatchSortLoad1TestCase ::
+ new BatchSortQueryTestCase ::
+ new DataLoadingTestCase ::
+ new OffheapSort2TestCase ::
+ new PartitionTestCase ::
+ new QueriesBasicTestCase ::
+ new BatchSortLoad3TestCase ::
+ new GlobalSortTestCase ::
+ new MergeIndexTestCase :: Nil
override val nestedSuites = suites.toIndexedSeq
@@ -108,10 +108,9 @@ class SDVSuites1 extends Suites with BeforeAndAfterAll {
*/
class SDVSuites2 extends Suites with BeforeAndAfterAll {
- val suites = new QueriesBVATestCase ::
- new QueriesCompactionTestCase ::
- new QueriesExcludeDictionaryTestCase ::
- new DataLoadingIUDTestCase :: Nil
+ val suites = new QueriesBVATestCase ::
+ new QueriesExcludeDictionaryTestCase ::
+ new DataLoadingIUDTestCase :: Nil
override val nestedSuites = suites.toIndexedSeq
@@ -127,39 +126,7 @@ class SDVSuites2 extends Suites with BeforeAndAfterAll {
*/
class SDVSuites3 extends Suites with BeforeAndAfterAll {
- val suites = new AlterTableTestCase ::
- new BatchSortLoad2TestCase ::
- new BucketingTestCase ::
- new CreateTableAsSelectTestCase ::
- new InvertedindexTestCase ::
- new OffheapQuery1TestCase ::
- new OffheapQuery2TestCase ::
- new OffheapSort1TestCase ::
- new ShowLoadsTestCase ::
- new SinglepassTestCase ::
- new SortColumnTestCase ::
- new TimestamptypesTestCase ::
- new V3offheapvectorTestCase ::
- new Vector1TestCase ::
- new Vector2TestCase ::
- new QueriesNormalTestCase ::
- new ColumndictTestCase ::
- new QueriesRangeFilterTestCase ::
- new QueriesSparkBlockDistTestCase ::
- new DataLoadingV3TestCase ::
- new QueriesIncludeDictionaryTestCase ::
- new TestRegisterCarbonTable ::
- new TableCommentAlterTableTestCase ::
- new StandardPartitionTestCase ::
- new PreAggregateTestCase ::
- new LuceneTestCase ::
- new TimeSeriesPreAggregateTestCase ::
- new TestPartitionWithGlobalSort ::
- new SDKwriterTestCase ::
- new SetParameterTestCase ::
- new PartitionWithPreAggregateTestCase ::
- new CreateTableWithLocalDictionaryTestCase ::
- new LoadTableWithLocalDictionaryTestCase :: Nil
+ val suites = new QueriesCompactionTestCase :: Nil
override val nestedSuites = suites.toIndexedSeq
@@ -171,13 +138,43 @@ class SDVSuites3 extends Suites with BeforeAndAfterAll {
}
/**
- * Suite class for compatabiity tests
+ * Suite class for all tests.
*/
class SDVSuites4 extends Suites with BeforeAndAfterAll {
- val suites = new CreateTableUsingSparkCarbonFileFormatTestCase ::
- new SparkCarbonDataSourceTestCase ::
- new CarbonV1toV3CompatabilityTestCase :: Nil
+ val suites = new AlterTableTestCase ::
+ new BatchSortLoad2TestCase ::
+ new BucketingTestCase ::
+ new CreateTableAsSelectTestCase ::
+ new InvertedindexTestCase ::
+ new OffheapQuery1TestCase ::
+ new OffheapQuery2TestCase ::
+ new OffheapSort1TestCase ::
+ new ShowLoadsTestCase ::
+ new SinglepassTestCase ::
+ new SortColumnTestCase ::
+ new TimestamptypesTestCase ::
+ new V3offheapvectorTestCase ::
+ new Vector1TestCase ::
+ new Vector2TestCase ::
+ new QueriesNormalTestCase ::
+ new ColumndictTestCase ::
+ new QueriesRangeFilterTestCase ::
+ new QueriesSparkBlockDistTestCase ::
+ new DataLoadingV3TestCase ::
+ new QueriesIncludeDictionaryTestCase ::
+ new TestRegisterCarbonTable ::
+ new TableCommentAlterTableTestCase ::
+ new StandardPartitionTestCase ::
+ new PreAggregateTestCase ::
+ new LuceneTestCase ::
+ new TimeSeriesPreAggregateTestCase ::
+ new TestPartitionWithGlobalSort ::
+ new SDKwriterTestCase ::
+ new SetParameterTestCase ::
+ new PartitionWithPreAggregateTestCase ::
+ new CreateTableWithLocalDictionaryTestCase ::
+ new LoadTableWithLocalDictionaryTestCase :: Nil
override val nestedSuites = suites.toIndexedSeq
@@ -189,11 +186,29 @@ class SDVSuites4 extends Suites with BeforeAndAfterAll {
}
/**
- * Suite class for presto tests
+ * Suite class for compatabiity tests
*/
class SDVSuites5 extends Suites with BeforeAndAfterAll {
- val suites = new PrestoSampleTestCase :: Nil
+ val suites = new CreateTableUsingSparkCarbonFileFormatTestCase ::
+ new SparkCarbonDataSourceTestCase ::
+ new CarbonV1toV3CompatabilityTestCase :: Nil
+
+ override val nestedSuites = suites.toIndexedSeq
+
+ override protected def afterAll() = {
+ println("---------------- Stopping spark -----------------")
+ TestQueryExecutor.INSTANCE.stop()
+ println("---------------- Stopped spark -----------------")
+ }
+}
+
+/**
+ * Suite class for presto tests
+ */
+class SDVSuites6 extends Suites with BeforeAndAfterAll {
+
+ val suites = new PrestoSampleTestCase :: Nil
override val nestedSuites = suites.toIndexedSeq
diff --git
a/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/PlanTest.scala
b/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/PlanTest.scala
index cdd415f..78e157e 100644
---
a/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/PlanTest.scala
+++
b/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/PlanTest.scala
@@ -28,7 +28,7 @@ class PlanTest extends CarbonFunSuite {
/** Fails the test if the two expressions do not match */
protected def compareExpressions(e1: Expression, e2: Expression): Unit = {
- comparePlans(Filter(e1, OneRowRelation), Filter(e2, OneRowRelation))
+ comparePlans(Filter(e1, OneRowRelation.apply()), Filter(e2,
OneRowRelation.apply()))
}
/** Fails the test if the two plans do not match */
diff --git
a/integration/spark2/src/main/scala/org/apache/spark/sql/test/Spark2TestQueryExecutor.scala
b/integration/spark2/src/main/scala/org/apache/spark/sql/test/Spark2TestQueryExecutor.scala
index 0729713..bfaa0cb 100644
---
a/integration/spark2/src/main/scala/org/apache/spark/sql/test/Spark2TestQueryExecutor.scala
+++
b/integration/spark2/src/main/scala/org/apache/spark/sql/test/Spark2TestQueryExecutor.scala
@@ -51,8 +51,8 @@ object Spark2TestQueryExecutor {
val conf = new SparkConf()
if (!TestQueryExecutor.masterUrl.startsWith("local")) {
conf.setJars(TestQueryExecutor.jars).
- set("spark.driver.memory", "6g").
- set("spark.executor.memory", "4g").
+ set("spark.driver.memory", "14g").
+ set("spark.executor.memory", "8g").
set("spark.executor.cores", "2").
set("spark.executor.instances", "2").
set("spark.cores.max", "4")
diff --git a/pom.xml b/pom.xml
index ea4371e..7a3042c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -133,7 +133,8 @@
<spark.master.url>local[2]</spark.master.url>
<hdfs.url>local</hdfs.url>
<presto.jdbc.url>localhost:8086</presto.jdbc.url>
-
<spark.hadoop.hive.metastore.uris>thrift://localhost:8086</spark.hadoop.hive.metastore.uris>
+ <!--todo:this can be enabled when presto tests need to be run-->
+
<!--<spark.hadoop.hive.metastore.uris>thrift://localhost:8086</spark.hadoop.hive.metastore.uris>-->
<suite.name>org.apache.carbondata.cluster.sdv.suite.SDVSuites</suite.name>
<script.exetension>.sh</script.exetension>
<carbon.hive.based.metastore>false</carbon.hive.based.metastore>