This is an automated email from the ASF dual-hosted git repository.
codope pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/master by this push:
new 44cd05c03e7 [MINOR] Fix ut due to the scala compile ambiguity of
Properties#putAll (#9601)
44cd05c03e7 is described below
commit 44cd05c03e7fe6dc28588c65e29a86a9d06a2d2f
Author: xuzifu666 <[email protected]>
AuthorDate: Sat Sep 2 17:50:48 2023 +0800
[MINOR] Fix ut due to the scala compile ambiguity of Properties#putAll
(#9601)
Co-authored-by: xuyu <[email protected]>
---
.../org/apache/hudi/functional/RecordLevelIndexTestBase.scala | 7 ++-----
.../org/apache/hudi/functional/TestColumnStatsIndexWithSQL.scala | 6 ++----
.../scala/org/apache/hudi/functional/TestMetadataRecordIndex.scala | 6 ++----
3 files changed, 6 insertions(+), 13 deletions(-)
diff --git
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/RecordLevelIndexTestBase.scala
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/RecordLevelIndexTestBase.scala
index fcaac58e072..8e898deb537 100644
---
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/RecordLevelIndexTestBase.scala
+++
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/RecordLevelIndexTestBase.scala
@@ -23,7 +23,7 @@ import org.apache.hudi.DataSourceWriteOptions._
import org.apache.hudi.client.SparkRDDWriteClient
import org.apache.hudi.client.common.HoodieSparkEngineContext
import org.apache.hudi.client.utils.MetadataConversionUtils
-import org.apache.hudi.common.config.HoodieMetadataConfig
+import org.apache.hudi.common.config.{HoodieMetadataConfig, TypedProperties}
import org.apache.hudi.common.model._
import org.apache.hudi.common.table.timeline.HoodieInstant
import org.apache.hudi.common.table.{HoodieTableConfig, HoodieTableMetaClient}
@@ -37,12 +37,10 @@ import org.apache.spark.sql.functions.{col, not}
import org.junit.jupiter.api.Assertions.{assertEquals, assertTrue}
import org.junit.jupiter.api._
-import java.util.Properties
import java.util.concurrent.atomic.AtomicInteger
import java.util.stream.Collectors
import scala.collection.JavaConverters._
import scala.collection.{JavaConverters, mutable}
-import scala.util.Using
class RecordLevelIndexTestBase extends HoodieSparkClientTestBase {
var spark: SparkSession = _
@@ -230,8 +228,7 @@ class RecordLevelIndexTestBase extends
HoodieSparkClientTestBase {
}
protected def getWriteConfig(hudiOpts: Map[String, String]):
HoodieWriteConfig = {
- val props = new Properties()
- props.putAll(JavaConverters.mapAsJavaMapConverter(hudiOpts).asJava)
+ val props =
TypedProperties.fromMap(JavaConverters.mapAsJavaMapConverter(hudiOpts).asJava)
HoodieWriteConfig.newBuilder()
.withProps(props)
.withPath(basePath)
diff --git
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestColumnStatsIndexWithSQL.scala
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestColumnStatsIndexWithSQL.scala
index 1bb35bc150c..bb0c0065a91 100644
---
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestColumnStatsIndexWithSQL.scala
+++
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestColumnStatsIndexWithSQL.scala
@@ -22,7 +22,7 @@ import
org.apache.hudi.DataSourceWriteOptions.{DELETE_OPERATION_OPT_VAL, PRECOMB
import org.apache.hudi.client.SparkRDDWriteClient
import org.apache.hudi.client.common.HoodieSparkEngineContext
import org.apache.hudi.client.utils.MetadataConversionUtils
-import org.apache.hudi.common.config.HoodieMetadataConfig
+import org.apache.hudi.common.config.{HoodieMetadataConfig, TypedProperties}
import org.apache.hudi.common.fs.FSUtils
import org.apache.hudi.common.model.{HoodieCommitMetadata, HoodieTableType,
WriteOperationType}
import org.apache.hudi.common.table.HoodieTableConfig
@@ -40,7 +40,6 @@ import org.junit.jupiter.api.Assertions.{assertEquals,
assertFalse, assertTrue}
import org.junit.jupiter.params.ParameterizedTest
import org.junit.jupiter.params.provider.MethodSource
-import java.util.Properties
import scala.collection.JavaConverters
import scala.jdk.CollectionConverters.{asScalaIteratorConverter,
collectionAsScalaIterableConverter}
@@ -299,8 +298,7 @@ class TestColumnStatsIndexWithSQL extends
ColumnStatIndexTestBase {
}
protected def getWriteConfig(hudiOpts: Map[String, String]):
HoodieWriteConfig = {
- val props = new Properties()
- props.putAll(JavaConverters.mapAsJavaMapConverter(hudiOpts).asJava)
+ val props =
TypedProperties.fromMap(JavaConverters.mapAsJavaMapConverter(hudiOpts).asJava)
HoodieWriteConfig.newBuilder()
.withProps(props)
.withPath(basePath)
diff --git
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestMetadataRecordIndex.scala
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestMetadataRecordIndex.scala
index 0f716e18951..e29b2a2b0ed 100644
---
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestMetadataRecordIndex.scala
+++
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestMetadataRecordIndex.scala
@@ -20,7 +20,7 @@ package org.apache.hudi.functional
import org.apache.hadoop.fs.Path
import org.apache.hudi.DataSourceWriteOptions._
-import org.apache.hudi.common.config.HoodieMetadataConfig
+import org.apache.hudi.common.config.{HoodieMetadataConfig, TypedProperties}
import org.apache.hudi.common.model.HoodieTableType
import org.apache.hudi.common.table.timeline.HoodieInstant
import org.apache.hudi.common.table.{HoodieTableConfig, HoodieTableMetaClient}
@@ -35,7 +35,6 @@ import org.junit.jupiter.params.ParameterizedTest
import org.junit.jupiter.params.provider.EnumSource
import java.util.concurrent.atomic.AtomicInteger
-import java.util.Properties
import scala.collection.JavaConverters._
import scala.collection.mutable
@@ -158,8 +157,7 @@ class TestMetadataRecordIndex extends
HoodieSparkClientTestBase {
}
private def getWriteConfig(hudiOpts: Map[String, String]): HoodieWriteConfig
= {
- val props = new Properties()
- props.putAll(hudiOpts.asJava)
+ val props = TypedProperties.fromMap(hudiOpts.asJava)
HoodieWriteConfig.newBuilder()
.withProps(props)
.withPath(basePath)