This is an automated email from the ASF dual-hosted git repository.
yihua pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/master by this push:
new 7c5ea90228fa [MINOR] Fix broken test in
HoodieBackedTableMetadataIndexLookupTestBase (#13618)
7c5ea90228fa is described below
commit 7c5ea90228fae1f068184f6715df8d3c1c03e740
Author: Davis-Zhang-Onehouse
<[email protected]>
AuthorDate: Thu Jul 24 17:20:24 2025 -0700
[MINOR] Fix broken test in HoodieBackedTableMetadataIndexLookupTestBase
(#13618)
---
.../testHoodieBackedTableMetadataIndexLookup.scala | 74 ++++++++--------------
1 file changed, 26 insertions(+), 48 deletions(-)
diff --git
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/feature/index/testHoodieBackedTableMetadataIndexLookup.scala
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/feature/index/testHoodieBackedTableMetadataIndexLookup.scala
index cfa5cf807587..4b4102d6e629 100644
---
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/feature/index/testHoodieBackedTableMetadataIndexLookup.scala
+++
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/feature/index/testHoodieBackedTableMetadataIndexLookup.scala
@@ -54,7 +54,29 @@ abstract class HoodieBackedTableMetadataIndexLookupTestBase
extends HoodieSparkS
protected var hoodieBackedTableMetadata: HoodieBackedTableMetadata = _
protected var testData: Seq[Seq[Any]] = _
protected var tmpDir: File = _
-
+ private val createTableStatementProvider = () =>
+ s"""
+ |create table if not exists $tableName (
+ | id string,
+ | name string,
+ | price double,
+ | ts long
+ |) using hudi
+ | options (
+ | primaryKey ='id',
+ | type = 'cow',
+ | preCombineField = 'ts',
+ | hoodie.metadata.enable = 'true',
+ | hoodie.metadata.record.index.enable = 'true',
+ | hoodie.metadata.index.column.stats.enable = 'true',
+ | hoodie.metadata.index.secondary.enable = 'true',
+ | hoodie.metadata.record.index.min.filegroup.count =
'${getNumFileIndexGroup}',
+ | hoodie.metadata.record.index.max.filegroup.count =
'${getNumFileIndexGroup}',
+ | hoodie.write.table.version = '${getTableVersion}',
+ | hoodie.datasource.write.payload.class =
'org.apache.hudi.common.model.OverwriteWithLatestAvroPayload'
+ | )
+ | location '$basePath'
+ """.stripMargin
/**
* Get the table version for this test implementation
*/
@@ -86,29 +108,7 @@ abstract class HoodieBackedTableMetadataIndexLookupTestBase
extends HoodieSparkS
* Ensure table exists - compensates for parent class cleanup
*/
private def ensureTableExists(): Unit = {
- spark.sql(
- s"""
- |create table if not exists $tableName (
- | id string,
- | name string,
- | price double,
- | ts long
- |) using hudi
- | options (
- | primaryKey ='id',
- | type = 'cow',
- | preCombineField = 'ts',
- | hoodie.metadata.enable = 'true',
- | hoodie.metadata.record.index.enable = 'true',
- | hoodie.metadata.index.column.stats.enable = 'true',
- | hoodie.metadata.index.secondary.enable = 'true',
- | hoodie.metadata.record.index.min.filegroup.count =
'${getNumFileIndexGroup}',
- | hoodie.metadata.record.index.max.filegroup.count =
'${getNumFileIndexGroup}',
- | hoodie.write.table.version = '${getTableVersion}',
- | hoodie.datasource.write.payload.class =
'org.apache.hudi.common.model.OverwriteWithLatestAvroPayload'
- | )
- | location '$basePath'
- """.stripMargin)
+ spark.sql(createTableStatementProvider.apply())
}
/**
@@ -149,29 +149,7 @@ abstract class
HoodieBackedTableMetadataIndexLookupTestBase extends HoodieSparkS
spark.sql("set hoodie.embed.timeline.server=false")
// Create table with specified version
- spark.sql(
- s"""
- |create table $tableName (
- | id string,
- | name string,
- | price int,
- | ts long
- |) using hudi
- | options (
- | primaryKey ='id',
- | type = 'cow',
- | preCombineField = 'ts',
- | hoodie.metadata.enable = 'true',
- | hoodie.metadata.record.index.enable = 'true',
- | hoodie.metadata.index.column.stats.enable = 'true',
- | hoodie.metadata.index.secondary.enable = 'true',
- | hoodie.metadata.record.index.min.filegroup.count =
'${getNumFileIndexGroup}',
- | hoodie.metadata.record.index.max.filegroup.count =
'${getNumFileIndexGroup}',
- | hoodie.write.table.version = '${getTableVersion}',
- | hoodie.datasource.write.payload.class =
'org.apache.hudi.common.model.OverwriteWithLatestAvroPayload'
- | )
- | location '$basePath'
- """.stripMargin)
+ spark.sql(createTableStatementProvider.apply())
// Insert initial test data
spark.sql(s"insert into $tableName values('1', 'b1', 10, 1000)")
@@ -358,7 +336,7 @@ abstract class HoodieBackedTableMetadataIndexLookupTestBase
extends HoodieSparkS
// Case 6: Test with different secondary index (price column)
val priceIndexName = "secondary_index_idx_price"
- val priceKeys = HoodieListData.eager(List("10", "20", "30").asJava)
+ val priceKeys = HoodieListData.eager(List("10.0", "20.0", "30.0").asJava)
val priceResult =
hoodieBackedTableMetadata.readSecondaryIndexLocations(priceKeys,
priceIndexName).collectAsList().asScala
assert(priceResult.size == 3, s"Should return 3 results for price
secondary keys in table version ${getTableVersion}")