nastra commented on code in PR #7899:
URL: https://github.com/apache/iceberg/pull/7899#discussion_r1241084713


##########
spark/v3.4/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestWriteAborts.java:
##########
@@ -105,23 +105,22 @@ public void testBatchAppend() throws Exception {
             new SimpleRecord(4, "b"));
     Dataset<Row> inputDF = spark.createDataFrame(records, SimpleRecord.class);
 
-    AssertHelpers.assertThrows(
-        "Write must fail",
-        SparkException.class,
-        "Encountered records that belong to already closed files",
-        () -> {
-          try {
-            // incoming records are not ordered by partitions so the job must 
fail
-            inputDF
-                .coalesce(1)
-                .sortWithinPartitions("id")
-                .writeTo(tableName)
-                .option(SparkWriteOptions.USE_TABLE_DISTRIBUTION_AND_ORDERING, 
"false")
-                .append();
-          } catch (NoSuchTableException e) {
-            throw new RuntimeException(e);
-          }
-        });
+    Assertions.assertThatThrownBy(

Review Comment:
   ```suggestion
       Assertions.assertThatThrownBy(
               () ->
                   // incoming records are not ordered by partitions so the job 
must fail
                   inputDF
                       .coalesce(1)
                       .sortWithinPartitions("id")
                       .writeTo(tableName)
                       
.option(SparkWriteOptions.USE_TABLE_DISTRIBUTION_AND_ORDERING, "false")
                       .append())
           .isInstanceOf(SparkException.class)
           .hasMessageContaining("Encountered records that belong to already 
closed files");
   ```



##########
spark/v3.4/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestConflictValidation.java:
##########
@@ -69,21 +69,22 @@ public void testOverwriteFilterSerializableIsolation() 
throws Exception {
 
     // Validating from previous snapshot finds conflicts
     Dataset<Row> conflictingDf = spark.createDataFrame(records, 
SimpleRecord.class);
-    AssertHelpers.assertThrows(
-        "Conflicting new data files should throw exception",
-        ValidationException.class,
-        "Found conflicting files that can contain records matching 
ref(name=\"id\") == 1:",
-        () -> {
-          try {
-            conflictingDf
-                .writeTo(tableName)
-                .option(SparkWriteOptions.VALIDATE_FROM_SNAPSHOT_ID, 
String.valueOf(snapshotId))
-                .option(SparkWriteOptions.ISOLATION_LEVEL, 
IsolationLevel.SERIALIZABLE.toString())
-                .overwrite(functions.col("id").equalTo(1));
-          } catch (NoSuchTableException e) {
-            throw new RuntimeException(e);
-          }
-        });
+    Assertions.assertThatThrownBy(
+            () -> {
+              try {

Review Comment:
   the try-catch can be removed here



##########
spark/v3.4/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestConflictValidation.java:
##########
@@ -409,20 +414,21 @@ public void 
testOverwritePartitionNoSnapshotIdValidation() throws Exception {
 
     // Validating from null snapshot is equivalent to validating from beginning
     Dataset<Row> conflictingDf = spark.createDataFrame(records, 
SimpleRecord.class);
-    AssertHelpers.assertThrows(
-        "Conflicting deleted data files should throw exception",
-        ValidationException.class,
-        "Found conflicting files that can contain records matching partitions 
[id=1]",
-        () -> {
-          try {
-            conflictingDf
-                .writeTo(tableName)
-                .option(SparkWriteOptions.ISOLATION_LEVEL, 
IsolationLevel.SERIALIZABLE.toString())
-                .overwritePartitions();
-          } catch (NoSuchTableException e) {
-            throw new RuntimeException(e);
-          }
-        });
+    Assertions.assertThatThrownBy(
+            () -> {
+              try {

Review Comment:
   we can remove all try-catch inside `assertThatThrownBy` blocks



##########
spark/v3.4/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestConflictValidation.java:
##########
@@ -111,21 +112,21 @@ public void testOverwriteFilterSerializableIsolation2() 
throws Exception {
     // Validating from previous snapshot finds conflicts
     List<SimpleRecord> conflictingRecords = Lists.newArrayList(new 
SimpleRecord(1, "a"));
     Dataset<Row> conflictingDf = spark.createDataFrame(conflictingRecords, 
SimpleRecord.class);
-    AssertHelpers.assertThrows(
-        "Conflicting new delete files should throw exception",
-        ValidationException.class,
-        "Found new conflicting delete files that can apply to records matching 
ref(name=\"id\") == 1:",
-        () -> {
-          try {
-            conflictingDf
-                .writeTo(tableName)
-                .option(SparkWriteOptions.VALIDATE_FROM_SNAPSHOT_ID, 
String.valueOf(snapshotId))
-                .option(SparkWriteOptions.ISOLATION_LEVEL, 
IsolationLevel.SNAPSHOT.toString())
-                .overwrite(functions.col("id").equalTo(1));
-          } catch (NoSuchTableException e) {
-            throw new RuntimeException(e);
-          }
-        });
+    Assertions.assertThatThrownBy(
+            () -> {
+              try {

Review Comment:
   same as above



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to