Repository: carbondata Updated Branches: refs/heads/master 805a5baf3 -> 214d9eb9c
[CARBONDATA-1767] Remove dependency of Java 1.8 1.Removed dependency of Java 1.8, it can be compile with Java 1.7 and Java 1. 2.Clean up some testcase This closes #1531 Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/214d9eb9 Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/214d9eb9 Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/214d9eb9 Branch: refs/heads/master Commit: 214d9eb9c0835d2e092b91e617fe770a00b3fbd9 Parents: 805a5ba Author: Jacky Li <jacky.li...@qq.com> Authored: Sun Nov 19 11:00:43 2017 +0800 Committer: chenliang613 <chenliang...@huawei.com> Committed: Mon Nov 20 10:36:41 2017 +0800 ---------------------------------------------------------------------- .../carbondata/core/cache/CacheProviderTest.java | 1 + .../carbondata/hadoop/ft/CarbonInputMapperTest.java | 15 ++++++++++++++- .../carbondata/presto/impl/CarbonTableReader.java | 5 ++++- .../deleteTable/TestDeleteTableNewDDL.scala | 1 + .../execution/command/CarbonCreateTableCommand.scala | 6 ++++-- .../command/CarbonDescribeFormattedCommand.scala | 6 +++--- .../command/management/LoadTableCommand.scala | 2 +- .../loading/csvinput/CSVInputFormatTest.java | 3 ++- 8 files changed, 30 insertions(+), 9 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/carbondata/blob/214d9eb9/core/src/test/java/org/apache/carbondata/core/cache/CacheProviderTest.java ---------------------------------------------------------------------- diff --git a/core/src/test/java/org/apache/carbondata/core/cache/CacheProviderTest.java b/core/src/test/java/org/apache/carbondata/core/cache/CacheProviderTest.java index 37db079..38f5a7b 100644 --- a/core/src/test/java/org/apache/carbondata/core/cache/CacheProviderTest.java +++ b/core/src/test/java/org/apache/carbondata/core/cache/CacheProviderTest.java @@ -88,6 +88,7 @@ public class CacheProviderTest { throws IOException, NoSuchFieldException, IllegalAccessException { // get cache provider instance CacheProvider cacheProvider = CacheProvider.getInstance(); + cacheProvider.dropAllCache(); CarbonProperties.getInstance().addProperty(CarbonCommonConstants.IS_DRIVER_INSTANCE, "true"); Cache<TableSegmentUniqueIdentifier, SegmentTaskIndexStore> driverCache = cacheProvider.createCache(CacheType.DRIVER_BTREE); http://git-wip-us.apache.org/repos/asf/carbondata/blob/214d9eb9/hadoop/src/test/java/org/apache/carbondata/hadoop/ft/CarbonInputMapperTest.java ---------------------------------------------------------------------- diff --git a/hadoop/src/test/java/org/apache/carbondata/hadoop/ft/CarbonInputMapperTest.java b/hadoop/src/test/java/org/apache/carbondata/hadoop/ft/CarbonInputMapperTest.java index 068d8b3..1ed8c5c 100644 --- a/hadoop/src/test/java/org/apache/carbondata/hadoop/ft/CarbonInputMapperTest.java +++ b/hadoop/src/test/java/org/apache/carbondata/hadoop/ft/CarbonInputMapperTest.java @@ -141,7 +141,20 @@ public class CarbonInputMapperTest extends TestCase { return 0; } - public static class Map extends Mapper<Void, Object[], Text, Text> { + @Override public void tearDown() throws Exception { + super.tearDown(); + CarbonProperties.getInstance() + .addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS, "true"); + } + + @Override public void setUp() throws Exception { + super.setUp(); + CarbonProperties.getInstance() + .addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS, "false"); + StoreCreator.createCarbonStore(); + } + + public static class Map extends Mapper<Void, Object[], Text, Text> { private BufferedWriter fileWriter; http://git-wip-us.apache.org/repos/asf/carbondata/blob/214d9eb9/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonTableReader.java ---------------------------------------------------------------------- diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonTableReader.java b/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonTableReader.java index f72bb7a..ce159d6 100755 --- a/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonTableReader.java +++ b/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonTableReader.java @@ -287,7 +287,10 @@ public class CarbonTableReader { private CarbonTable parseCarbonMetadata(SchemaTableName table) { CarbonTable result = null; try { - CarbonTableCacheModel cache = cc.getOrDefault(table, new CarbonTableCacheModel()); + CarbonTableCacheModel cache = cc.get(table); + if (cache == null) { + cache = new CarbonTableCacheModel(); + } if (cache.isValid()) return cache.carbonTable; // If table is not previously cached, then: http://git-wip-us.apache.org/repos/asf/carbondata/blob/214d9eb9/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala ---------------------------------------------------------------------- diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala index b82a0af..7b51438 100644 --- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala +++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala @@ -49,6 +49,7 @@ class TestDeleteTableNewDDL extends QueryTest with BeforeAndAfterAll { } test("test drop database cascade command") { + sql("drop database if exists testdb cascade") sql("create database testdb") sql("use testdb") sql("CREATE TABLE IF NOT EXISTS testtable(empno Int, empname string, utilization Int,salary Int)" http://git-wip-us.apache.org/repos/asf/carbondata/blob/214d9eb9/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/CarbonCreateTableCommand.scala ---------------------------------------------------------------------- diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/CarbonCreateTableCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/CarbonCreateTableCommand.scala index 8880626..11f0bc5 100644 --- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/CarbonCreateTableCommand.scala +++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/CarbonCreateTableCommand.scala @@ -17,6 +17,8 @@ package org.apache.spark.sql.execution.command +import scala.collection.JavaConverters._ + import org.apache.spark.sql.{CarbonEnv, GetDB, Row, SparkSession} import org.apache.carbondata.common.logging.LogServiceFactory @@ -50,8 +52,8 @@ case class CarbonCreateTableCommand( val tableInfo: TableInfo = TableNewProcessor(cm) // Add validation for sort scope when create table - val sortScope = tableInfo.getFactTable.getTableProperties - .getOrDefault("sort_scope", CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT) + val sortScope = tableInfo.getFactTable.getTableProperties.asScala + .getOrElse("sort_scope", CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT) if (!CarbonUtil.isValidSortOption(sortScope)) { throw new InvalidConfigurationException( s"Passing invalid SORT_SCOPE '$sortScope', valid SORT_SCOPE are 'NO_SORT', 'BATCH_SORT'," + http://git-wip-us.apache.org/repos/asf/carbondata/blob/214d9eb9/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/CarbonDescribeFormattedCommand.scala ---------------------------------------------------------------------- diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/CarbonDescribeFormattedCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/CarbonDescribeFormattedCommand.scala index b61078b..b9ba9cb 100644 --- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/CarbonDescribeFormattedCommand.scala +++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/CarbonDescribeFormattedCommand.scala @@ -109,8 +109,8 @@ private[sql] case class CarbonDescribeFormattedCommand( results ++= Seq(("CARBON Store Path: ", CarbonProperties.getStorePath, "")) val carbonTable = relation.carbonTable // Carbon table support table comment - val tableComment = carbonTable.getTableInfo.getFactTable.getTableProperties - .getOrDefault(CarbonCommonConstants.TABLE_COMMENT, "") + val tableComment = carbonTable.getTableInfo.getFactTable.getTableProperties.asScala + .getOrElse(CarbonCommonConstants.TABLE_COMMENT, "") results ++= Seq(("Comment: ", tableComment, "")) results ++= Seq(("Table Block Size : ", carbonTable.getBlockSizeInMB + " MB", "")) val dataIndexSize = CarbonUtil.calculateDataIndexSize(carbonTable) @@ -123,7 +123,7 @@ private[sql] case class CarbonDescribeFormattedCommand( dataIndexSize.get(CarbonCommonConstants.LAST_UPDATE_TIME).toString, "")) } results ++= Seq(("SORT_SCOPE", carbonTable.getTableInfo.getFactTable - .getTableProperties.getOrDefault("sort_scope", CarbonCommonConstants + .getTableProperties.asScala.getOrElse("sort_scope", CarbonCommonConstants .LOAD_SORT_SCOPE_DEFAULT), CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT)) results ++= Seq(("", "", ""), ("##Detailed Column property", "", "")) if (colPropStr.length() > 0) { http://git-wip-us.apache.org/repos/asf/carbondata/blob/214d9eb9/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/LoadTableCommand.scala ---------------------------------------------------------------------- diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/LoadTableCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/LoadTableCommand.scala index 0f4ca01..a24c408 100644 --- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/LoadTableCommand.scala +++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/LoadTableCommand.scala @@ -116,7 +116,7 @@ case class LoadTableCommand( val tableProperties = table.getTableInfo.getFactTable.getTableProperties val optionsFinal = DataLoadingUtil.getDataLoadingOptions(carbonProperty, options) - optionsFinal.put("sort_scope", tableProperties.getOrDefault("sort_scope", + optionsFinal.put("sort_scope", tableProperties.asScala.getOrElse("sort_scope", carbonProperty.getProperty(CarbonLoadOptionConstants.CARBON_OPTIONS_SORT_SCOPE, carbonProperty.getProperty(CarbonCommonConstants.LOAD_SORT_SCOPE, CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT)))) http://git-wip-us.apache.org/repos/asf/carbondata/blob/214d9eb9/processing/src/test/java/org/apache/carbondata/processing/loading/csvinput/CSVInputFormatTest.java ---------------------------------------------------------------------- diff --git a/processing/src/test/java/org/apache/carbondata/processing/loading/csvinput/CSVInputFormatTest.java b/processing/src/test/java/org/apache/carbondata/processing/loading/csvinput/CSVInputFormatTest.java index 925701d..14c680e 100644 --- a/processing/src/test/java/org/apache/carbondata/processing/loading/csvinput/CSVInputFormatTest.java +++ b/processing/src/test/java/org/apache/carbondata/processing/loading/csvinput/CSVInputFormatTest.java @@ -128,6 +128,8 @@ public class CSVInputFormatTest extends TestCase { @Test public void testReadCSVFiles() throws Exception{ Configuration conf = new Configuration(); prepareConf(conf); + File output = new File("target/output_CSVInputFormatTest"); + conf.set("mapreduce.cluster.local.dir", output.getCanonicalPath()); Job job = Job.getInstance(conf, "CSVInputFormat_normal"); job.setJarByClass(CSVInputFormatTest.class); job.setMapperClass(CSVCheckMapper.class); @@ -141,7 +143,6 @@ public class CSVInputFormatTest extends TestCase { // FileInputFormat.addInputPath(job, new Path(inputFolder + File.separator + "data.csv.lz4")); // FileInputFormat.addInputPath(job, new Path(inputFolder + File.separator + "data.csv.snappy")); - File output = new File("target/output_CSVInputFormatTest"); deleteOutput(output); FileOutputFormat.setOutputPath(job, new Path(output.getCanonicalPath()));