[CARBONDATA-2754] Fixed testcases if HiveMetastore is enabled Fixed testcase for if HiveMetastore is enabled
This closes #2518 Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/92b2070e Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/92b2070e Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/92b2070e Branch: refs/heads/branch-1.4 Commit: 92b2070ed6942838b31d961a38e7654d854d5053 Parents: 99021f6 Author: rahul <[email protected]> Authored: Tue Jul 17 19:19:27 2018 +0530 Committer: ravipesala <[email protected]> Committed: Tue Jul 31 00:10:41 2018 +0530 ---------------------------------------------------------------------- .../createTable/TestCreateExternalTable.scala | 46 ++++++++++++++------ .../iud/DeleteCarbonTableTestCase.scala | 2 +- .../carbondata/store/SparkCarbonStore.scala | 16 ++++--- .../carbondata/store/SparkCarbonStoreTest.scala | 8 ++-- .../apache/spark/util/CarbonCommandSuite.scala | 3 +- .../apache/carbondata/store/CarbonStore.java | 5 ++- .../carbondata/store/LocalCarbonStore.java | 15 ++++--- .../carbondata/store/LocalCarbonStoreTest.java | 4 +- 8 files changed, 63 insertions(+), 36 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/carbondata/blob/92b2070e/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateExternalTable.scala ---------------------------------------------------------------------- diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateExternalTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateExternalTable.scala index 3b21d0a..519089b 100644 --- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateExternalTable.scala +++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateExternalTable.scala @@ -23,6 +23,9 @@ import org.apache.spark.sql.{AnalysisException, CarbonEnv} import org.apache.spark.sql.test.util.QueryTest import org.scalatest.BeforeAndAfterAll +import org.apache.carbondata.core.constants.CarbonCommonConstants +import org.apache.carbondata.core.util.CarbonProperties + class TestCreateExternalTable extends QueryTest with BeforeAndAfterAll { var originDataPath: String = _ @@ -43,25 +46,40 @@ class TestCreateExternalTable extends QueryTest with BeforeAndAfterAll { test("create external table with existing files") { assert(new File(originDataPath).exists()) sql("DROP TABLE IF EXISTS source") + if (CarbonProperties.getInstance() + .getProperty(CarbonCommonConstants.ENABLE_HIVE_SCHEMA_META_STORE, + CarbonCommonConstants.ENABLE_HIVE_SCHEMA_META_STORE_DEFAULT).equalsIgnoreCase("false")) { - // create external table with existing files - sql( - s""" - |CREATE EXTERNAL TABLE source - |STORED BY 'carbondata' - |LOCATION '$storeLocation/origin' + // create external table with existing files + sql( + s""" + |CREATE EXTERNAL TABLE source + |STORED BY 'carbondata' + |LOCATION '$storeLocation/origin' """.stripMargin) - checkAnswer(sql("SELECT count(*) from source"), sql("SELECT count(*) from origin")) + checkAnswer(sql("SELECT count(*) from source"), sql("SELECT count(*) from origin")) - checkExistence(sql("describe formatted source"), true, storeLocation+"/origin") + checkExistence(sql("describe formatted source"), true, storeLocation + "/origin") - val carbonTable = CarbonEnv.getCarbonTable(None, "source")(sqlContext.sparkSession) - assert(carbonTable.isExternalTable) - - sql("DROP TABLE IF EXISTS source") + val carbonTable = CarbonEnv.getCarbonTable(None, "source")(sqlContext.sparkSession) + assert(carbonTable.isExternalTable) - // DROP TABLE should not delete data - assert(new File(originDataPath).exists()) + sql("DROP TABLE IF EXISTS source") + + // DROP TABLE should not delete data + assert(new File(originDataPath).exists()) + } + else { + intercept[Exception] { + // create external table with existing files + sql( + s""" + |CREATE EXTERNAL TABLE source + |STORED BY 'carbondata' + |LOCATION '$storeLocation/origin' + """.stripMargin) + } + } } test("create external table with empty folder") { http://git-wip-us.apache.org/repos/asf/carbondata/blob/92b2070e/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/DeleteCarbonTableTestCase.scala ---------------------------------------------------------------------- diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/DeleteCarbonTableTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/DeleteCarbonTableTestCase.scala index de93229..8280693 100644 --- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/DeleteCarbonTableTestCase.scala +++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/DeleteCarbonTableTestCase.scala @@ -214,7 +214,7 @@ class DeleteCarbonTableTestCase extends QueryTest with BeforeAndAfterAll { }).length == 2) } else - assert(files.listFiles().length == 1) + assert(files.listFiles().length == 2) sql("drop table update_status_files") } http://git-wip-us.apache.org/repos/asf/carbondata/blob/92b2070e/integration/spark2/src/main/scala/org/apache/carbondata/store/SparkCarbonStore.scala ---------------------------------------------------------------------- diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/store/SparkCarbonStore.scala b/integration/spark2/src/main/scala/org/apache/carbondata/store/SparkCarbonStore.scala index 3a6adea..e33b8e2 100644 --- a/integration/spark2/src/main/scala/org/apache/carbondata/store/SparkCarbonStore.scala +++ b/integration/spark2/src/main/scala/org/apache/carbondata/store/SparkCarbonStore.scala @@ -24,12 +24,13 @@ import scala.collection.JavaConverters._ import org.apache.spark.{CarbonInputMetrics, SparkConf} import org.apache.spark.rpc.{Master, Worker} +import org.apache.spark.sql.{CarbonEnv, SparkSession} import org.apache.spark.sql.CarbonSession._ -import org.apache.spark.sql.SparkSession import org.apache.carbondata.common.annotations.InterfaceAudience import org.apache.carbondata.common.logging.LogServiceFactory import org.apache.carbondata.core.datastore.row.CarbonRow +import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier import org.apache.carbondata.core.metadata.schema.table.CarbonTable import org.apache.carbondata.core.scan.expression.Expression import org.apache.carbondata.core.util.CarbonProperties @@ -68,21 +69,22 @@ class SparkCarbonStore extends MetaCachedCarbonStore { @throws[IOException] override def scan( - path: String, + tableIdentifier: AbsoluteTableIdentifier, projectColumns: Array[String]): java.util.Iterator[CarbonRow] = { - require(path != null) + require(tableIdentifier != null) require(projectColumns != null) - scan(path, projectColumns, null) + scan(tableIdentifier, projectColumns, null) } @throws[IOException] override def scan( - path: String, + tableIdentifier: AbsoluteTableIdentifier, projectColumns: Array[String], filter: Expression): java.util.Iterator[CarbonRow] = { - require(path != null) + require(tableIdentifier != null) require(projectColumns != null) - val table = getTable(path) + val table = CarbonEnv + .getCarbonTable(Some(tableIdentifier.getDatabaseName), tableIdentifier.getTableName)(session) val rdd = new CarbonScanRDD[CarbonRow]( spark = session, columnProjection = new CarbonProjection(projectColumns), http://git-wip-us.apache.org/repos/asf/carbondata/blob/92b2070e/integration/spark2/src/test/scala/org/apache/carbondata/store/SparkCarbonStoreTest.scala ---------------------------------------------------------------------- diff --git a/integration/spark2/src/test/scala/org/apache/carbondata/store/SparkCarbonStoreTest.scala b/integration/spark2/src/test/scala/org/apache/carbondata/store/SparkCarbonStoreTest.scala index d389670..f3d2c2f 100644 --- a/integration/spark2/src/test/scala/org/apache/carbondata/store/SparkCarbonStoreTest.scala +++ b/integration/spark2/src/test/scala/org/apache/carbondata/store/SparkCarbonStoreTest.scala @@ -43,8 +43,8 @@ class SparkCarbonStoreTest extends QueryTest with BeforeAndAfterAll { } test("test CarbonStore.get, compare projection result") { - val tablePath = CarbonEnv.getCarbonTable(None, "t1")(sqlContext.sparkSession).getTablePath - val rows = store.scan(s"$tablePath", Seq("empno", "empname").toArray) + val table = CarbonEnv.getCarbonTable(None, "t1")(sqlContext.sparkSession) + val rows = store.scan(table.getAbsoluteTableIdentifier, Seq("empno", "empname").toArray) val sparkResult: Array[Row] = sql("select empno, empname from t1").collect() sparkResult.zipWithIndex.foreach { case (r: Row, i: Int) => val carbonRow = rows.next() @@ -55,11 +55,11 @@ class SparkCarbonStoreTest extends QueryTest with BeforeAndAfterAll { } test("test CarbonStore.get, compare projection and filter result") { - val tablePath = CarbonEnv.getCarbonTable(None, "t1")(sqlContext.sparkSession).getTablePath + val table = CarbonEnv.getCarbonTable(None, "t1")(sqlContext.sparkSession) val filter = new EqualToExpression( new ColumnExpression("empno", DataTypes.INT), new LiteralExpression(10, DataTypes.INT)) - val rows = store.scan(s"$tablePath", Seq("empno", "empname").toArray, filter) + val rows = store.scan(table.getAbsoluteTableIdentifier, Seq("empno", "empname").toArray, filter) val sparkResult: Array[Row] = sql("select empno, empname from t1 where empno = 10").collect() sparkResult.zipWithIndex.foreach { case (r: Row, i: Int) => val carbonRow = rows.next() http://git-wip-us.apache.org/repos/asf/carbondata/blob/92b2070e/integration/spark2/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala ---------------------------------------------------------------------- diff --git a/integration/spark2/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala b/integration/spark2/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala index e601043..8ad28a3 100644 --- a/integration/spark2/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala +++ b/integration/spark2/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala @@ -21,6 +21,7 @@ import java.io.File import java.sql.Timestamp import java.util.Date +import org.apache.spark.sql.CarbonEnv import org.apache.spark.sql.common.util.Spark2QueryTest import org.scalatest.BeforeAndAfterAll @@ -172,7 +173,7 @@ class CarbonCommandSuite extends Spark2QueryTest with BeforeAndAfterAll { sql(s"drop table if exists ${tableName}") sql(s"create table ${tableName} (name String, age int) stored by 'carbondata' " + "TBLPROPERTIES('AUTO_LOAD_MERGE'='true','COMPACTION_LEVEL_THRESHOLD'='2,2')") - val carbonTable = CarbonMetadata.getInstance().getCarbonTable("default", tableName) + val carbonTable = CarbonEnv.getCarbonTable(Some("default"), tableName)(sqlContext.sparkSession) sql(s"insert into ${tableName} select 'abc1',1") sql(s"insert into ${tableName} select 'abc2',2") sql(s"insert into ${tableName} select 'abc3',3") http://git-wip-us.apache.org/repos/asf/carbondata/blob/92b2070e/store/sdk/src/main/java/org/apache/carbondata/store/CarbonStore.java ---------------------------------------------------------------------- diff --git a/store/sdk/src/main/java/org/apache/carbondata/store/CarbonStore.java b/store/sdk/src/main/java/org/apache/carbondata/store/CarbonStore.java index c6b2fb8..a8c7f58 100644 --- a/store/sdk/src/main/java/org/apache/carbondata/store/CarbonStore.java +++ b/store/sdk/src/main/java/org/apache/carbondata/store/CarbonStore.java @@ -24,6 +24,7 @@ import java.util.Iterator; import org.apache.carbondata.common.annotations.InterfaceAudience; import org.apache.carbondata.common.annotations.InterfaceStability; import org.apache.carbondata.core.datastore.row.CarbonRow; +import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier; import org.apache.carbondata.core.scan.expression.Expression; /** @@ -41,7 +42,7 @@ public interface CarbonStore extends Closeable { * @throws IOException if unable to read files in table path */ Iterator<CarbonRow> scan( - String path, + AbsoluteTableIdentifier tableIdentifier, String[] projectColumns) throws IOException; /** @@ -53,7 +54,7 @@ public interface CarbonStore extends Closeable { * @throws IOException if unable to read files in table path */ Iterator<CarbonRow> scan( - String path, + AbsoluteTableIdentifier tableIdentifier, String[] projectColumns, Expression filter) throws IOException; http://git-wip-us.apache.org/repos/asf/carbondata/blob/92b2070e/store/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java ---------------------------------------------------------------------- diff --git a/store/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java b/store/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java index daa1447..dd7f333 100644 --- a/store/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java +++ b/store/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java @@ -27,6 +27,7 @@ import org.apache.carbondata.common.annotations.InterfaceAudience; import org.apache.carbondata.common.logging.LogService; import org.apache.carbondata.common.logging.LogServiceFactory; import org.apache.carbondata.core.datastore.row.CarbonRow; +import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier; import org.apache.carbondata.core.metadata.schema.table.CarbonTable; import org.apache.carbondata.core.scan.expression.Expression; import org.apache.carbondata.hadoop.CarbonProjection; @@ -55,16 +56,18 @@ class LocalCarbonStore extends MetaCachedCarbonStore { LogServiceFactory.getLogService(LocalCarbonStore.class.getName()); @Override - public Iterator<CarbonRow> scan(String path, String[] projectColumns) throws IOException { - return scan(path, projectColumns, null); + public Iterator<CarbonRow> scan(AbsoluteTableIdentifier tableIdentifier, String[] projectColumns) + throws IOException { + return scan(tableIdentifier, projectColumns, null); } - @Override public Iterator<CarbonRow> scan(String path, String[] projectColumns, Expression filter) - throws IOException { - Objects.requireNonNull(path); + @Override + public Iterator<CarbonRow> scan(AbsoluteTableIdentifier tableIdentifier, String[] projectColumns, + Expression filter) throws IOException { + Objects.requireNonNull(tableIdentifier); Objects.requireNonNull(projectColumns); - CarbonTable table = getTable(path); + CarbonTable table = getTable(tableIdentifier.getTablePath()); if (table.isStreamingSink() || table.isHivePartitionTable()) { throw new UnsupportedOperationException("streaming and partition table is not supported"); } http://git-wip-us.apache.org/repos/asf/carbondata/blob/92b2070e/store/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java ---------------------------------------------------------------------- diff --git a/store/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java b/store/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java index c885a26..2b8c708 100644 --- a/store/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java +++ b/store/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java @@ -22,6 +22,7 @@ import java.io.IOException; import java.util.Iterator; import org.apache.carbondata.core.datastore.row.CarbonRow; +import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier; import org.apache.carbondata.core.metadata.datatype.DataTypes; import org.apache.carbondata.sdk.file.Field; import org.apache.carbondata.sdk.file.Schema; @@ -59,7 +60,8 @@ public class LocalCarbonStoreTest { TestUtil.writeFilesAndVerify(100, new Schema(fields), path, true); CarbonStore store = new LocalCarbonStore(); - Iterator<CarbonRow> rows = store.scan(path, new String[]{"name, age"}, null); + Iterator<CarbonRow> rows = + store.scan(AbsoluteTableIdentifier.from(path, "", ""), new String[] { "name, age" }, null); while (rows.hasNext()) { CarbonRow row = rows.next();
