This is an automated email from the ASF dual-hosted git repository.

etudenhoefner pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/iceberg.git


The following commit(s) were added to refs/heads/main by this push:
     new 65869bfad8 Spark 3.4, 3.5: Order results to fix flakiness with remote 
planning (#15725)
65869bfad8 is described below

commit 65869bfad87bc2ef8c3eaecdd02bc3a1506460a5
Author: Russell Spitzer <[email protected]>
AuthorDate: Mon Mar 23 01:45:32 2026 -0500

    Spark 3.4, 3.5: Order results to fix flakiness with remote planning (#15725)
---
 .../spark/src/test/java/org/apache/iceberg/spark/sql/TestSelect.java   | 3 ++-
 .../spark/src/test/java/org/apache/iceberg/spark/sql/TestSelect.java   | 3 ++-
 2 files changed, 4 insertions(+), 2 deletions(-)

diff --git 
a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/sql/TestSelect.java 
b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/sql/TestSelect.java
index 35b3b7dd27..497a224200 100644
--- 
a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/sql/TestSelect.java
+++ 
b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/sql/TestSelect.java
@@ -513,7 +513,8 @@ public class TestSelect extends CatalogTestBase {
             .read()
             .format("iceberg")
             .option(SparkReadOptions.TIMESTAMP_AS_OF, formattedDate)
-            .load(tableName);
+            .load(tableName)
+            .orderBy("id");
     List<Object[]> fromDF = rowsToJava(df.collectAsList());
     assertEquals("Snapshot at timestamp " + timestamp, expected, fromDF);
   }
diff --git 
a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/sql/TestSelect.java 
b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/sql/TestSelect.java
index eee5a29241..21d5afcf1d 100644
--- 
a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/sql/TestSelect.java
+++ 
b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/sql/TestSelect.java
@@ -513,7 +513,8 @@ public class TestSelect extends CatalogTestBase {
             .read()
             .format("iceberg")
             .option(SparkReadOptions.TIMESTAMP_AS_OF, formattedDate)
-            .load(tableName);
+            .load(tableName)
+            .orderBy("id");
     List<Object[]> fromDF = rowsToJava(df.collectAsList());
     assertEquals("Snapshot at timestamp " + timestamp, expected, fromDF);
   }

Reply via email to