[MINOR]Remove dependency of Java 1.8

This closes #1928


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/0f210c86
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/0f210c86
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/0f210c86

Branch: refs/heads/branch-1.3
Commit: 0f210c86ca3ee9f0fa845cdeaef418ed9253b6f8
Parents: e363dd1
Author: Zhang Zhichao <441586...@qq.com>
Authored: Sun Feb 4 12:54:24 2018 +0800
Committer: Venkata Ramana G <ramana.gollam...@huawei.com>
Committed: Tue Feb 27 13:17:52 2018 +0530

----------------------------------------------------------------------
 .../command/preaaggregate/CreatePreAggregateTableCommand.scala     | 2 +-
 .../apache/spark/sql/execution/datasources/CarbonFileFormat.scala  | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/0f210c86/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/CreatePreAggregateTableCommand.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/CreatePreAggregateTableCommand.scala
 
b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/CreatePreAggregateTableCommand.scala
index c8bee62..d2acb00 100644
--- 
a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/CreatePreAggregateTableCommand.scala
+++ 
b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/CreatePreAggregateTableCommand.scala
@@ -79,7 +79,7 @@ case class CreatePreAggregateTableCommand(
     )
     tableProperties.put(CarbonCommonConstants.SORT_COLUMNS, 
neworder.mkString(","))
     tableProperties.put("sort_scope", parentTable.getTableInfo.getFactTable.
-      getTableProperties.getOrDefault("sort_scope", CarbonCommonConstants
+      getTableProperties.asScala.getOrElse("sort_scope", CarbonCommonConstants
       .LOAD_SORT_SCOPE_DEFAULT))
     tableProperties
       .put(CarbonCommonConstants.TABLE_BLOCKSIZE, 
parentTable.getBlockSizeInMB.toString)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/0f210c86/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/CarbonFileFormat.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/CarbonFileFormat.scala
 
b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/CarbonFileFormat.scala
index 99e5732..17749c8 100644
--- 
a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/CarbonFileFormat.scala
+++ 
b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/CarbonFileFormat.scala
@@ -272,7 +272,7 @@ private class CarbonOutputWriter(path: String,
       val formattedPartitions = updatedPartitions.map {case (col, value) =>
         // Only convert the static partitions to the carbon format and use it 
while loading data
         // to carbon.
-        if (staticPartition.getOrDefault(col, false)) {
+        if (staticPartition.asScala.getOrElse(col, false)) {
           (col, CarbonScalaUtil.convertToCarbonFormat(value,
             CarbonScalaUtil.convertCarbonToSparkDataType(
               table.getColumnByName(table.getTableName, col).getDataType),

Reply via email to