This is an automated email from the ASF dual-hosted git repository.
russellspitzer pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/iceberg.git
The following commit(s) were added to refs/heads/master by this push:
new b5f367bde Static Analysis: Fix Objects.requireNonNull usages (#4611)
b5f367bde is described below
commit b5f367bdedc26e0f676d88462912b57ff7bae167
Author: Piotr Findeisen <[email protected]>
AuthorDate: Fri Apr 22 22:29:25 2022 +0200
Static Analysis: Fix Objects.requireNonNull usages (#4611)
Per project code style, Preconditions.checkNotNull should be used instead.
---
nessie/src/main/java/org/apache/iceberg/nessie/NessieCatalog.java | 7 +++----
.../main/java/org/apache/iceberg/nessie/UpdateableReference.java | 3 +--
.../java/org/apache/iceberg/spark/actions/TestCreateActions.java | 4 ++--
3 files changed, 6 insertions(+), 8 deletions(-)
diff --git a/nessie/src/main/java/org/apache/iceberg/nessie/NessieCatalog.java
b/nessie/src/main/java/org/apache/iceberg/nessie/NessieCatalog.java
index a36a9f95f..4360896b9 100644
--- a/nessie/src/main/java/org/apache/iceberg/nessie/NessieCatalog.java
+++ b/nessie/src/main/java/org/apache/iceberg/nessie/NessieCatalog.java
@@ -21,7 +21,6 @@ package org.apache.iceberg.nessie;
import java.util.List;
import java.util.Map;
-import java.util.Objects;
import java.util.Set;
import java.util.function.Function;
import org.apache.hadoop.conf.Configurable;
@@ -103,9 +102,9 @@ public class NessieCatalog extends BaseMetastoreCatalog
implements AutoCloseable
@SuppressWarnings("checkstyle:HiddenField")
public void initialize(String name, NessieIcebergClient client, FileIO
fileIO, Map<String, String> catalogOptions) {
this.name = name == null ? "nessie" : name;
- this.client = Objects.requireNonNull(client, "client must be non-null");
- this.fileIO = Objects.requireNonNull(fileIO, "fileIO must be non-null");
- this.catalogOptions = Objects.requireNonNull(catalogOptions,
"catalogOptions must be non-null");
+ this.client = Preconditions.checkNotNull(client, "client must be
non-null");
+ this.fileIO = Preconditions.checkNotNull(fileIO, "fileIO must be
non-null");
+ this.catalogOptions = Preconditions.checkNotNull(catalogOptions,
"catalogOptions must be non-null");
this.warehouseLocation = validateWarehouseLocation(name, catalogOptions);
}
diff --git
a/nessie/src/main/java/org/apache/iceberg/nessie/UpdateableReference.java
b/nessie/src/main/java/org/apache/iceberg/nessie/UpdateableReference.java
index d1b3e6c3a..20f43433a 100644
--- a/nessie/src/main/java/org/apache/iceberg/nessie/UpdateableReference.java
+++ b/nessie/src/main/java/org/apache/iceberg/nessie/UpdateableReference.java
@@ -19,7 +19,6 @@
package org.apache.iceberg.nessie;
-import java.util.Objects;
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.projectnessie.client.api.NessieApiV1;
import org.projectnessie.error.NessieNotFoundException;
@@ -51,7 +50,7 @@ class UpdateableReference {
public void updateReference(Reference ref) {
Preconditions.checkState(mutable, "Hash references cannot be updated.");
- this.reference = Objects.requireNonNull(ref);
+ this.reference = Preconditions.checkNotNull(ref, "ref is null");
}
public boolean isBranch() {
diff --git
a/spark/v3.2/spark/src/test/java/org/apache/iceberg/spark/actions/TestCreateActions.java
b/spark/v3.2/spark/src/test/java/org/apache/iceberg/spark/actions/TestCreateActions.java
index d6530c1d6..c5b1bf31b 100644
---
a/spark/v3.2/spark/src/test/java/org/apache/iceberg/spark/actions/TestCreateActions.java
+++
b/spark/v3.2/spark/src/test/java/org/apache/iceberg/spark/actions/TestCreateActions.java
@@ -28,7 +28,6 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
-import java.util.Objects;
import java.util.stream.Collectors;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.filefilter.TrueFileFilter;
@@ -37,6 +36,7 @@ import org.apache.iceberg.Schema;
import org.apache.iceberg.Table;
import org.apache.iceberg.actions.MigrateTable;
import org.apache.iceberg.actions.SnapshotTable;
+import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
@@ -671,7 +671,7 @@ public class TestCreateActions extends SparkCatalogTestBase
{
JavaSparkContext.fromSparkContext(spark.sparkContext()).parallelize(testData))
.coalesce(1).write().format("parquet").mode(SaveMode.Append).save(location.getPath());
- File parquetFile =
Arrays.stream(Objects.requireNonNull(location.listFiles(new FilenameFilter() {
+ File parquetFile =
Arrays.stream(Preconditions.checkNotNull(location.listFiles(new
FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.endsWith("parquet");