This is an automated email from the ASF dual-hosted git repository.
zhangduo pushed a commit to branch branch-2.4
in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/branch-2.4 by this push:
new 82f966474ef HBASE-27373 Fix new spotbugs warnings after upgrading
spotbugs to 4.7.2 (#4787) (#4791)
82f966474ef is described below
commit 82f966474ef3ce9488804aea69199f0f10001806
Author: Duo Zhang <[email protected]>
AuthorDate: Mon Sep 19 12:02:00 2022 +0800
HBASE-27373 Fix new spotbugs warnings after upgrading spotbugs to 4.7.2
(#4787) (#4791)
Signed-off-by: Wellington Chevreuil <[email protected]>
---
.../hbase/exceptions/ClientExceptionsUtil.java | 5 ++
.../apache/hadoop/hbase/protobuf/ProtobufUtil.java | 8 +--
.../hbase/shaded/protobuf/RequestConverter.java | 63 ++++------------------
.../hbase/io/crypto/CryptoCipherProvider.java | 2 +
.../hbase/io/crypto/DefaultCipherProvider.java | 2 +
.../apache/hadoop/hbase/security/Superusers.java | 26 +++++----
.../hadoop/hbase/trace/SpanReceiverHost.java | 2 +
hbase-metrics/pom.xml | 6 +++
.../hbase/metrics/impl/FastLongHistogram.java | 6 +--
hbase-protocol/pom.xml | 6 +++
.../google/protobuf/HBaseZeroCopyByteString.java | 2 +
.../org/apache/hadoop/hbase/rest/RESTServlet.java | 6 ++-
.../java/org/apache/hadoop/hbase/io/FileLink.java | 44 ++++-----------
.../hadoop/hbase/io/hfile/FixedFileTrailer.java | 2 +-
.../apache/hadoop/hbase/io/hfile/HFileBlock.java | 2 +-
.../hadoop/hbase/io/hfile/HFilePreadReader.java | 4 --
.../apache/hadoop/hbase/io/hfile/HFileUtil.java | 42 ---------------
.../hbase/namequeues/NamedQueueRecorder.java | 4 +-
.../hadoop/hbase/quotas/NoOpRegionSizeStore.java | 2 +
.../MissingSnapshotViolationPolicyEnforcement.java | 2 +
.../hadoop/hbase/regionserver/ChunkCreator.java | 5 +-
.../hadoop/hbase/regionserver/HMobStore.java | 21 ++------
.../hbase/regionserver/NoLimitScannerContext.java | 6 ++-
.../compactions/StripeCompactionPolicy.java | 7 ++-
.../hbase/regionserver/wal/ProtobufLogWriter.java | 11 ++--
.../replication/regionserver/WALEntryStream.java | 4 ++
.../hbase/security/token/FsDelegationToken.java | 7 +--
.../security/visibility/VisibilityLabelsCache.java | 8 ++-
.../apache/hadoop/hbase/util/BloomFilterUtil.java | 2 +
.../hadoop/hbase/util/FSTableDescriptors.java | 2 +
.../org/apache/hadoop/hbase/util/HBaseFsck.java | 5 +-
.../hadoop/hbase/wal/AbstractFSWALProvider.java | 5 +-
.../org/apache/hadoop/hbase/MiniHBaseCluster.java | 2 -
33 files changed, 118 insertions(+), 203 deletions(-)
diff --git
a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ClientExceptionsUtil.java
b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ClientExceptionsUtil.java
index 93b970e3387..33b5d9cb57f 100644
---
a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ClientExceptionsUtil.java
+++
b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ClientExceptionsUtil.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hbase.exceptions;
+import com.google.errorprone.annotations.RestrictedApi;
import java.io.EOFException;
import java.io.IOException;
import java.io.SyncFailedException;
@@ -140,6 +141,10 @@ public final class ClientExceptionsUtil {
* For test only. Usually you should use the {@link
#isConnectionException(Throwable)} method
* below.
*/
+ @RestrictedApi(explanation = "Should only be called in tests", link = "",
+ allowedOnPath = ".*/src/test/.*")
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP",
+ justification = "test only")
public static Set<Class<? extends Throwable>> getConnectionExceptionTypes() {
return CONNECTION_EXCEPTION_TYPES;
}
diff --git
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index 03e408aed02..28afef07e55 100644
---
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -1473,18 +1473,12 @@ public final class ProtobufUtil {
}
}
- /**
- * @see #buildGetServerInfoRequest()
- */
- private static GetServerInfoRequest GET_SERVER_INFO_REQUEST =
- GetServerInfoRequest.newBuilder().build();
-
/**
* Create a new GetServerInfoRequest
* @return a GetServerInfoRequest
*/
public static GetServerInfoRequest buildGetServerInfoRequest() {
- return GET_SERVER_INFO_REQUEST;
+ return GetServerInfoRequest.getDefaultInstance();
}
public static ScanMetrics toScanMetrics(final byte[] bytes) {
diff --git
a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
index b27f61bf6aa..89d8acceae6 100644
---
a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
+++
b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
@@ -1032,32 +1032,20 @@ public final class RequestConverter {
return builder.build();
}
- /**
- * @see #buildRollWALWriterRequest()
- */
- private static RollWALWriterRequest ROLL_WAL_WRITER_REQUEST =
- RollWALWriterRequest.newBuilder().build();
-
/**
* Create a new RollWALWriterRequest
* @return a ReplicateWALEntryRequest
*/
public static RollWALWriterRequest buildRollWALWriterRequest() {
- return ROLL_WAL_WRITER_REQUEST;
+ return RollWALWriterRequest.getDefaultInstance();
}
- /**
- * @see #buildGetServerInfoRequest()
- */
- private static GetServerInfoRequest GET_SERVER_INFO_REQUEST =
- GetServerInfoRequest.newBuilder().build();
-
/**
* Create a new GetServerInfoRequest
* @return a GetServerInfoRequest
*/
public static GetServerInfoRequest buildGetServerInfoRequest() {
- return GET_SERVER_INFO_REQUEST;
+ return GetServerInfoRequest.getDefaultInstance();
}
/**
@@ -1438,18 +1426,12 @@ public final class RequestConverter {
.addAllOptions(ClusterMetricsBuilder.toOptions(options)).build();
}
- /**
- * @see #buildCatalogScanRequest
- */
- private static final RunCatalogScanRequest CATALOG_SCAN_REQUEST =
- RunCatalogScanRequest.newBuilder().build();
-
/**
* Creates a request for running a catalog scan
* @return A {@link RunCatalogScanRequest}
*/
public static RunCatalogScanRequest buildCatalogScanRequest() {
- return CATALOG_SCAN_REQUEST;
+ return RunCatalogScanRequest.getDefaultInstance();
}
/**
@@ -1460,32 +1442,20 @@ public final class RequestConverter {
return EnableCatalogJanitorRequest.newBuilder().setEnable(enable).build();
}
- /**
- * @see #buildIsCatalogJanitorEnabledRequest()
- */
- private static final IsCatalogJanitorEnabledRequest
IS_CATALOG_JANITOR_ENABLED_REQUEST =
- IsCatalogJanitorEnabledRequest.newBuilder().build();
-
/**
* Creates a request for querying the master whether the catalog janitor is
enabled
* @return A {@link IsCatalogJanitorEnabledRequest}
*/
public static IsCatalogJanitorEnabledRequest
buildIsCatalogJanitorEnabledRequest() {
- return IS_CATALOG_JANITOR_ENABLED_REQUEST;
+ return IsCatalogJanitorEnabledRequest.getDefaultInstance();
}
- /**
- * @see #buildRunCleanerChoreRequest()
- */
- private static final RunCleanerChoreRequest CLEANER_CHORE_REQUEST =
- RunCleanerChoreRequest.newBuilder().build();
-
/**
* Creates a request for running cleaner chore
* @return A {@link RunCleanerChoreRequest}
*/
public static RunCleanerChoreRequest buildRunCleanerChoreRequest() {
- return CLEANER_CHORE_REQUEST;
+ return RunCleanerChoreRequest.getDefaultInstance();
}
/**
@@ -1496,18 +1466,12 @@ public final class RequestConverter {
return SetCleanerChoreRunningRequest.newBuilder().setOn(on).build();
}
- /**
- * @see #buildIsCleanerChoreEnabledRequest()
- */
- private static final IsCleanerChoreEnabledRequest
IS_CLEANER_CHORE_ENABLED_REQUEST =
- IsCleanerChoreEnabledRequest.newBuilder().build();
-
/**
* Creates a request for querying the master whether the cleaner chore is
enabled
* @return A {@link IsCleanerChoreEnabledRequest}
*/
public static IsCleanerChoreEnabledRequest
buildIsCleanerChoreEnabledRequest() {
- return IS_CLEANER_CHORE_ENABLED_REQUEST;
+ return IsCleanerChoreEnabledRequest.getDefaultInstance();
}
/**
@@ -1727,34 +1691,25 @@ public final class RequestConverter {
return builder.build();
}
- private static final GetSpaceQuotaRegionSizesRequest
GET_SPACE_QUOTA_REGION_SIZES_REQUEST =
- GetSpaceQuotaRegionSizesRequest.newBuilder().build();
-
/**
* Returns a {@link GetSpaceQuotaRegionSizesRequest} object.
*/
public static GetSpaceQuotaRegionSizesRequest
buildGetSpaceQuotaRegionSizesRequest() {
- return GET_SPACE_QUOTA_REGION_SIZES_REQUEST;
+ return GetSpaceQuotaRegionSizesRequest.getDefaultInstance();
}
- private static final GetSpaceQuotaSnapshotsRequest
GET_SPACE_QUOTA_SNAPSHOTS_REQUEST =
- GetSpaceQuotaSnapshotsRequest.newBuilder().build();
-
/**
* Returns a {@link GetSpaceQuotaSnapshotsRequest} object.
*/
public static GetSpaceQuotaSnapshotsRequest
buildGetSpaceQuotaSnapshotsRequest() {
- return GET_SPACE_QUOTA_SNAPSHOTS_REQUEST;
+ return GetSpaceQuotaSnapshotsRequest.getDefaultInstance();
}
- private static final GetQuotaStatesRequest GET_QUOTA_STATES_REQUEST =
- GetQuotaStatesRequest.newBuilder().build();
-
/**
* Returns a {@link GetQuotaStatesRequest} object.
*/
public static GetQuotaStatesRequest buildGetQuotaStatesRequest() {
- return GET_QUOTA_STATES_REQUEST;
+ return GetQuotaStatesRequest.getDefaultInstance();
}
public static DecommissionRegionServersRequest
diff --git
a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CryptoCipherProvider.java
b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CryptoCipherProvider.java
index 8eb4bdea71b..7f5c58883f2 100644
---
a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CryptoCipherProvider.java
+++
b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CryptoCipherProvider.java
@@ -30,6 +30,8 @@ public final class CryptoCipherProvider implements
CipherProvider {
private static CryptoCipherProvider instance;
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP",
+ justification = "singleton pattern")
public static CryptoCipherProvider getInstance() {
if (instance != null) {
return instance;
diff --git
a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/DefaultCipherProvider.java
b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/DefaultCipherProvider.java
index 33e19575d94..a4c3f3b7ca8 100644
---
a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/DefaultCipherProvider.java
+++
b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/DefaultCipherProvider.java
@@ -30,6 +30,8 @@ public final class DefaultCipherProvider implements
CipherProvider {
private static DefaultCipherProvider instance;
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP",
+ justification = "singleton pattern")
public static DefaultCipherProvider getInstance() {
if (instance != null) {
return instance;
diff --git
a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java
b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java
index 11703603587..44b90bbd03f 100644
---
a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java
+++
b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java
@@ -19,14 +19,14 @@ package org.apache.hadoop.hbase.security;
import java.io.IOException;
import java.util.Collection;
-import java.util.HashSet;
-import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet;
+
/**
* Keeps lists of superusers and super groups loaded from HBase configuration,
checks if certain
* user is regarded as superuser.
@@ -38,8 +38,8 @@ public final class Superusers {
/** Configuration key for superusers */
public static final String SUPERUSER_CONF_KEY = "hbase.superuser"; // Not
getting a name
- private static Set<String> superUsers;
- private static Set<String> superGroups;
+ private static ImmutableSet<String> superUsers;
+ private static ImmutableSet<String> superGroups;
private static User systemUser;
private Superusers() {
@@ -53,8 +53,8 @@ public final class Superusers {
* @throws IllegalStateException if current user is null
*/
public static void initialize(Configuration conf) throws IOException {
- superUsers = new HashSet<>();
- superGroups = new HashSet<>();
+ ImmutableSet.Builder<String> superUsersBuilder = ImmutableSet.builder();
+ ImmutableSet.Builder<String> superGroupsBuilder = ImmutableSet.builder();
systemUser = User.getCurrent();
if (systemUser == null) {
@@ -64,17 +64,19 @@ public final class Superusers {
String currentUser = systemUser.getShortName();
LOG.trace("Current user name is {}", currentUser);
- superUsers.add(currentUser);
+ superUsersBuilder.add(currentUser);
String[] superUserList = conf.getStrings(SUPERUSER_CONF_KEY, new
String[0]);
for (String name : superUserList) {
if (AuthUtil.isGroupPrincipal(name)) {
// Let's keep the '@' for distinguishing from user.
- superGroups.add(name);
+ superGroupsBuilder.add(name);
} else {
- superUsers.add(name);
+ superUsersBuilder.add(name);
}
}
+ superUsers = superUsersBuilder.build();
+ superGroups = superGroupsBuilder.build();
}
/**
@@ -111,14 +113,20 @@ public final class Superusers {
return superUsers.contains(user) || superGroups.contains(user);
}
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP",
+ justification = "immutable")
public static Collection<String> getSuperUsers() {
return superUsers;
}
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP",
+ justification = "immutable")
public static Collection<String> getSuperGroups() {
return superGroups;
}
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP",
+ justification = "by design")
public static User getSystemUser() {
return systemUser;
}
diff --git
a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
index dc468d5f0c5..a0e12ff5cfd 100644
---
a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
+++
b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
@@ -59,6 +59,8 @@ public class SpanReceiverHost {
}
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP",
+ justification = "by design")
public static Configuration getConfiguration() {
synchronized (SingletonHolder.INSTANCE.lock) {
if (SingletonHolder.INSTANCE.host == null ||
SingletonHolder.INSTANCE.host.conf == null) {
diff --git a/hbase-metrics/pom.xml b/hbase-metrics/pom.xml
index 01dfaf92e73..fc11058c986 100644
--- a/hbase-metrics/pom.xml
+++ b/hbase-metrics/pom.xml
@@ -76,6 +76,12 @@
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId>
</dependency>
+ <dependency>
+ <groupId>com.github.stephenc.findbugs</groupId>
+ <artifactId>findbugs-annotations</artifactId>
+ <scope>compile</scope>
+ <optional>true</optional>
+ </dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
diff --git
a/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/FastLongHistogram.java
b/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/FastLongHistogram.java
index b1b47e3904e..768435d1eb1 100644
---
a/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/FastLongHistogram.java
+++
b/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/FastLongHistogram.java
@@ -138,6 +138,8 @@ public class FastLongHistogram {
/**
* Computes the quantiles give the ratios.
*/
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value =
"FL_FLOATS_AS_LOOP_COUNTERS",
+ justification = "valid usage")
public long[] getQuantiles(double[] quantiles) {
if (!hasData) {
// No data yet.
@@ -266,10 +268,6 @@ public class FastLongHistogram {
this.bins = new Bins(bins, numOfBins, 0.01, 0.999);
}
- private FastLongHistogram(Bins bins) {
- this.bins = bins;
- }
-
/**
* Adds a value to the histogram.
*/
diff --git a/hbase-protocol/pom.xml b/hbase-protocol/pom.xml
index c5175ceeb93..98b65b5a0c9 100644
--- a/hbase-protocol/pom.xml
+++ b/hbase-protocol/pom.xml
@@ -42,6 +42,12 @@
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
+ <dependency>
+ <groupId>com.github.stephenc.findbugs</groupId>
+ <artifactId>findbugs-annotations</artifactId>
+ <scope>compile</scope>
+ <optional>true</optional>
+ </dependency>
</dependencies>
<build>
<plugins>
diff --git
a/hbase-protocol/src/main/java/com/google/protobuf/HBaseZeroCopyByteString.java
b/hbase-protocol/src/main/java/com/google/protobuf/HBaseZeroCopyByteString.java
index 0720656aac2..69f4a00f5ec 100644
---
a/hbase-protocol/src/main/java/com/google/protobuf/HBaseZeroCopyByteString.java
+++
b/hbase-protocol/src/main/java/com/google/protobuf/HBaseZeroCopyByteString.java
@@ -65,6 +65,8 @@ public final class HBaseZeroCopyByteString extends
LiteralByteString {
* of a {@code LiteralByteString}.
* @return byte[] representation
*/
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP",
+ justification = "by design")
public static byte[] zeroCopyGetBytes(final ByteString buf) {
if (buf instanceof LiteralByteString) {
return ((LiteralByteString) buf).bytes;
diff --git
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
index 66834f36176..79760aead9d 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
@@ -53,6 +53,8 @@ public class RESTServlet implements Constants {
}
/** Returns the RESTServlet singleton instance */
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP",
+ justification = "singleton pattern")
public synchronized static RESTServlet getInstance() {
assert (INSTANCE != null);
return INSTANCE;
@@ -66,8 +68,10 @@ public class RESTServlet implements Constants {
/**
* @param conf Existing configuration to use in rest servlet
* @param userProvider the login user provider
- * @return the RESTServlet singleton instance n
+ * @return the RESTServlet singleton instance
*/
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP",
+ justification = "singleton pattern")
public synchronized static RESTServlet getInstance(Configuration conf,
UserProvider userProvider)
throws IOException {
if (INSTANCE == null) {
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java
index 9dd4394489d..4f140b0774d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java
@@ -113,12 +113,10 @@ public class FileLink {
res = in.read();
} catch (FileNotFoundException e) {
res = tryOpen().read();
- } catch (NullPointerException e) { // HDFS 1.x -
DFSInputStream.getBlockAt()
- res = tryOpen().read();
- } catch (AssertionError e) { // assert in HDFS 1.x -
DFSInputStream.getBlockAt()
- res = tryOpen().read();
}
- if (res > 0) pos += 1;
+ if (res > 0) {
+ pos += 1;
+ }
return res;
}
@@ -134,12 +132,10 @@ public class FileLink {
n = in.read(b, off, len);
} catch (FileNotFoundException e) {
n = tryOpen().read(b, off, len);
- } catch (NullPointerException e) { // HDFS 1.x -
DFSInputStream.getBlockAt()
- n = tryOpen().read(b, off, len);
- } catch (AssertionError e) { // assert in HDFS 1.x -
DFSInputStream.getBlockAt()
- n = tryOpen().read(b, off, len);
}
- if (n > 0) pos += n;
+ if (n > 0) {
+ pos += n;
+ }
assert (in.getPos() == pos);
return n;
}
@@ -151,10 +147,6 @@ public class FileLink {
n = in.read(position, buffer, offset, length);
} catch (FileNotFoundException e) {
n = tryOpen().read(position, buffer, offset, length);
- } catch (NullPointerException e) { // HDFS 1.x -
DFSInputStream.getBlockAt()
- n = tryOpen().read(position, buffer, offset, length);
- } catch (AssertionError e) { // assert in HDFS 1.x -
DFSInputStream.getBlockAt()
- n = tryOpen().read(position, buffer, offset, length);
}
return n;
}
@@ -170,10 +162,6 @@ public class FileLink {
in.readFully(position, buffer, offset, length);
} catch (FileNotFoundException e) {
tryOpen().readFully(position, buffer, offset, length);
- } catch (NullPointerException e) { // HDFS 1.x -
DFSInputStream.getBlockAt()
- tryOpen().readFully(position, buffer, offset, length);
- } catch (AssertionError e) { // assert in HDFS 1.x -
DFSInputStream.getBlockAt()
- tryOpen().readFully(position, buffer, offset, length);
}
}
@@ -185,13 +173,11 @@ public class FileLink {
skipped = in.skip(n);
} catch (FileNotFoundException e) {
skipped = tryOpen().skip(n);
- } catch (NullPointerException e) { // HDFS 1.x -
DFSInputStream.getBlockAt()
- skipped = tryOpen().skip(n);
- } catch (AssertionError e) { // assert in HDFS 1.x -
DFSInputStream.getBlockAt()
- skipped = tryOpen().skip(n);
}
- if (skipped > 0) pos += skipped;
+ if (skipped > 0) {
+ pos += skipped;
+ }
return skipped;
}
@@ -201,10 +187,6 @@ public class FileLink {
return in.available();
} catch (FileNotFoundException e) {
return tryOpen().available();
- } catch (NullPointerException e) { // HDFS 1.x -
DFSInputStream.getBlockAt()
- return tryOpen().available();
- } catch (AssertionError e) { // assert in HDFS 1.x -
DFSInputStream.getBlockAt()
- return tryOpen().available();
}
}
@@ -214,10 +196,6 @@ public class FileLink {
in.seek(pos);
} catch (FileNotFoundException e) {
tryOpen().seek(pos);
- } catch (NullPointerException e) { // HDFS 1.x -
DFSInputStream.getBlockAt()
- tryOpen().seek(pos);
- } catch (AssertionError e) { // assert in HDFS 1.x -
DFSInputStream.getBlockAt()
- tryOpen().seek(pos);
}
this.pos = pos;
}
@@ -234,10 +212,6 @@ public class FileLink {
res = in.seekToNewSource(targetPos);
} catch (FileNotFoundException e) {
res = tryOpen().seekToNewSource(targetPos);
- } catch (NullPointerException e) { // HDFS 1.x -
DFSInputStream.getBlockAt()
- res = tryOpen().seekToNewSource(targetPos);
- } catch (AssertionError e) { // assert in HDFS 1.x -
DFSInputStream.getBlockAt()
- res = tryOpen().seekToNewSource(targetPos);
}
if (res) pos = targetPos;
return res;
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
index 15ba800ea3f..02171f9bf6c 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
@@ -388,7 +388,7 @@ public class FixedFileTrailer {
bufferSize = (int) fileSize;
}
- HFileUtil.seekOnMultipleSources(istream, seekPoint);
+ istream.seek(seekPoint);
ByteBuffer buf = ByteBuffer.allocate(bufferSize);
istream.readFully(buf.array(), buf.arrayOffset(), buf.arrayOffset() +
buf.limit());
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
index 41e8526f22a..1d0bcced50b 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
@@ -1427,7 +1427,7 @@ public class HFileBlock implements Cacheable {
boolean peekIntoNextBlock, long fileOffset, boolean pread) throws
IOException {
if (!pread) {
// Seek + read. Better for scanning.
- HFileUtil.seekOnMultipleSources(istream, fileOffset);
+ istream.seek(fileOffset);
long realOffset = istream.getPos();
if (realOffset != fileOffset) {
throw new IOException("Tried to seek to " + fileOffset + " to read "
+ size
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePreadReader.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePreadReader.java
index 25627c34f51..98401c46bee 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePreadReader.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePreadReader.java
@@ -73,10 +73,6 @@ public class HFilePreadReader extends HFileReaderImpl {
if (LOG.isTraceEnabled()) {
LOG.trace("Prefetch " + getPathOffsetEndStr(path, offset, end),
e);
}
- } catch (NullPointerException e) {
- LOG.warn(
- "Stream moved/closed or prefetch cancelled?" +
getPathOffsetEndStr(path, offset, end),
- e);
} catch (Exception e) {
// Other exceptions are interesting
LOG.warn("Prefetch " + getPathOffsetEndStr(path, offset, end), e);
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileUtil.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileUtil.java
deleted file mode 100644
index 612f127e11e..00000000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileUtil.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.io.hfile;
-
-import java.io.IOException;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.yetus.audience.InterfaceAudience;
-
[email protected]
-class HFileUtil {
-
- /**
- * guards against NullPointer utility which tries to seek on the DFSIS and
will try an alternative
- * source if the FSDataInputStream throws an NPE HBASE-17501 nnn
- */
- static public void seekOnMultipleSources(FSDataInputStream istream, long
offset)
- throws IOException {
- try {
- // attempt to seek inside of current blockReader
- istream.seek(offset);
- } catch (NullPointerException e) {
- // retry the seek on an alternate copy of the data
- // this can occur if the blockReader on the DFSInputStream is null
- istream.seekToNewSource(offset);
- }
- }
-}
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/namequeues/NamedQueueRecorder.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/namequeues/NamedQueueRecorder.java
index 38f63fd09be..efe512b1a85 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/namequeues/NamedQueueRecorder.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/namequeues/NamedQueueRecorder.java
@@ -43,7 +43,7 @@ public class NamedQueueRecorder {
private final Disruptor<RingBufferEnvelope> disruptor;
private final LogEventHandler logEventHandler;
- private static NamedQueueRecorder namedQueueRecorder;
+ private static volatile NamedQueueRecorder namedQueueRecorder;
private static boolean isInit = false;
private static final Object LOCK = new Object();
@@ -71,6 +71,8 @@ public class NamedQueueRecorder {
this.disruptor.start();
}
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP",
+ justification = "singleton pattern")
public static NamedQueueRecorder getInstance(Configuration conf) {
if (namedQueueRecorder != null) {
return namedQueueRecorder;
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoOpRegionSizeStore.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoOpRegionSizeStore.java
index dcc32d766b9..cb463b8729d 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoOpRegionSizeStore.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoOpRegionSizeStore.java
@@ -32,6 +32,8 @@ public final class NoOpRegionSizeStore implements
RegionSizeStore {
private NoOpRegionSizeStore() {
}
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP",
+ justification = "singleton pattern")
public static NoOpRegionSizeStore getInstance() {
return INSTANCE;
}
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/MissingSnapshotViolationPolicyEnforcement.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/MissingSnapshotViolationPolicyEnforcement.java
index c747d0c8b3d..732318ac870 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/MissingSnapshotViolationPolicyEnforcement.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/MissingSnapshotViolationPolicyEnforcement.java
@@ -39,6 +39,8 @@ public final class MissingSnapshotViolationPolicyEnforcement
private MissingSnapshotViolationPolicyEnforcement() {
}
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP",
+ justification = "singleton pattern")
public static SpaceViolationPolicyEnforcement getInstance() {
return SINGLETON;
}
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java
index 7e1a980705b..1ea692ba5a5 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java
@@ -114,7 +114,8 @@ public class ChunkCreator {
* @param heapMemoryManager the heapmemory manager
* @return singleton MSLABChunkCreator
*/
- @edu.umd.cs.findbugs.annotations.SuppressWarnings(value =
"LI_LAZY_INIT_STATIC",
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(
+ value = { "LI_LAZY_INIT_STATIC", "MS_EXPOSE_REP" },
justification = "Method is called by single thread at the starting of
RS")
public static ChunkCreator initialize(int chunkSize, boolean offheap, long
globalMemStoreSize,
float poolSizePercentage, float initialCountPercentage, HeapMemoryManager
heapMemoryManager,
@@ -127,6 +128,8 @@ public class ChunkCreator {
return instance;
}
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP",
+ justification = "singleton pattern")
public static ChunkCreator getInstance() {
return instance;
}
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java
index 290ba85ca51..41737e6dd54 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java
@@ -368,7 +368,7 @@ public class HMobStore extends HStore {
private MobCell readCell(List<Path> locations, String fileName, Cell search,
boolean cacheMobBlocks, long readPt, boolean readEmptyValueOnMobCellMiss)
throws IOException {
FileSystem fs = getFileSystem();
- Throwable throwable = null;
+ IOException ioe = null;
for (Path location : locations) {
MobFile file = null;
Path path = new Path(location, fileName);
@@ -379,7 +379,7 @@ public class HMobStore extends HStore {
: file.readCell(search, cacheMobBlocks);
} catch (IOException e) {
mobFileCache.evictFile(fileName);
- throwable = e;
+ ioe = e;
if (
(e instanceof FileNotFoundException) || (e.getCause() instanceof
FileNotFoundException)
) {
@@ -390,14 +390,6 @@ public class HMobStore extends HStore {
} else {
throw e;
}
- } catch (NullPointerException e) { // HDFS 1.x -
DFSInputStream.getBlockAt()
- mobFileCache.evictFile(fileName);
- LOG.debug("Fail to read the cell", e);
- throwable = e;
- } catch (AssertionError e) { // assert in HDFS 1.x -
DFSInputStream.getBlockAt()
- mobFileCache.evictFile(fileName);
- LOG.debug("Fail to read the cell", e);
- throwable = e;
} finally {
if (file != null) {
mobFileCache.closeFile(file);
@@ -409,18 +401,15 @@ public class HMobStore extends HStore {
if (readEmptyValueOnMobCellMiss) {
return null;
} else if (
- (throwable instanceof FileNotFoundException)
- || (throwable.getCause() instanceof FileNotFoundException)
+ (ioe instanceof FileNotFoundException) || (ioe.getCause() instanceof
FileNotFoundException)
) {
// The region is re-opened when FileNotFoundException is thrown.
// This is not necessary when MOB files cannot be found, because the
store files
// in a region only contain the references to MOB files and a re-open on
a region
// doesn't help fix the lost MOB files.
- throw new DoNotRetryIOException(throwable);
- } else if (throwable instanceof IOException) {
- throw (IOException) throwable;
+ throw new DoNotRetryIOException(ioe);
} else {
- throw new IOException(throwable);
+ throw ioe;
}
}
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NoLimitScannerContext.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NoLimitScannerContext.java
index 5b92cc07b57..94c97374305 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NoLimitScannerContext.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NoLimitScannerContext.java
@@ -45,9 +45,11 @@ public class NoLimitScannerContext extends ScannerContext {
private static final ScannerContext NO_LIMIT = new NoLimitScannerContext();
/**
- * @return The static, immutable instance of {@link NoLimitScannerContext}
to be used whenever
- * limits should not be enforced
+ * Returns the static, immutable instance of {@link NoLimitScannerContext}
to be used whenever
+ * limits should not be enforced
*/
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP",
+ justification = "singleton pattern")
public static final ScannerContext getInstance() {
return NO_LIMIT;
}
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java
index adcd41f3e26..f5be2b38038 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java
@@ -421,6 +421,8 @@ public class StripeCompactionPolicy extends
CompactionPolicy {
return totalSize;
}
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value =
"FL_FLOATS_AS_LOOP_COUNTERS",
+ justification = "valid usage")
private Pair<Long, Integer> estimateTargetKvs(Collection<HStoreFile> files,
double splitCount) {
// If the size is larger than what we target, we don't want to split into
proportionally
// larger parts and then have to split again very soon. So, we will
increase the multiplier
@@ -433,7 +435,10 @@ public class StripeCompactionPolicy extends
CompactionPolicy {
while (ratio > 1.0) {
// Ratio of real to desired size if we increase the multiplier.
double newRatio = totalSize / ((splitCount + 1.0) * targetPartSize);
- if ((1.0 / newRatio) >= ratio) break; // New ratio is < 1.0, but further
than the last one.
+ if ((1.0 / newRatio) >= ratio) {
+ // New ratio is < 1.0, but further than the last one.
+ break;
+ }
ratio = newRatio;
splitCount += 1.0;
}
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java
index bfa16c2bf7e..f4da7c459cf 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java
@@ -63,15 +63,10 @@ public class ProtobufLogWriter extends
AbstractProtobufLogWriter implements FSHL
@Override
public void close() throws IOException {
if (this.output != null) {
- try {
- if (!trailerWritten) {
- writeWALTrailer();
- }
- this.output.close();
- } catch (NullPointerException npe) {
- // Can get a NPE coming up from down in DFSClient$DFSOutputStream#close
- LOG.warn(npe.toString(), npe);
+ if (!trailerWritten) {
+ writeWALTrailer();
}
+ this.output.close();
this.output = null;
}
}
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java
index 77c0eee67e9..aa059aa30a2 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java
@@ -320,6 +320,8 @@ class WALEntryStream implements Closeable {
}
}
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value =
"DCN_NULLPOINTER_EXCEPTION",
+ justification = "HDFS-4380")
private void openReader(Path path) throws IOException {
try {
// Detect if this is a new file, if so get a new reader else
@@ -370,6 +372,8 @@ class WALEntryStream implements Closeable {
}
}
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value =
"DCN_NULLPOINTER_EXCEPTION",
+ justification = "HDFS-4380")
private void resetReader() throws IOException {
try {
currentEntry = null;
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/FsDelegationToken.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/FsDelegationToken.java
index 1eb88a3d12f..51961a92370 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/FsDelegationToken.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/FsDelegationToken.java
@@ -98,12 +98,7 @@ public class FsDelegationToken {
userToken = userProvider.getCurrent().getToken(tokenKind,
fs.getCanonicalServiceName());
if (userToken == null) {
hasForwardedToken = false;
- try {
- userToken = fs.getDelegationToken(renewer);
- } catch (NullPointerException npe) {
- // we need to handle NullPointerException in case HADOOP-10009 is
missing
- LOG.error("Failed to get token for " + renewer);
- }
+ userToken = fs.getDelegationToken(renewer);
} else {
hasForwardedToken = true;
LOG.info("Use the existing token: " + userToken);
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java
index a3584168e43..2060223e69f 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java
@@ -74,9 +74,11 @@ public class VisibilityLabelsCache implements
VisibilityLabelOrdinalProvider {
}
/**
- * Creates the singleton instance, if not yet present, and returns the same.
nn * @return
- * Singleton instance of VisibilityLabelsCache n
+ * Creates the singleton instance, if not yet present, and returns the same.
+ * @return Singleton instance of VisibilityLabelsCache
*/
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP",
+ justification = "singleton pattern")
public synchronized static VisibilityLabelsCache createAndGet(ZKWatcher
watcher,
Configuration conf) throws IOException {
// VisibilityLabelService#init() for different regions (in same RS) passes
same instance of
@@ -95,6 +97,8 @@ public class VisibilityLabelsCache implements
VisibilityLabelOrdinalProvider {
* @return Singleton instance of VisibilityLabelsCache n * when this is
called before calling
* {@link #createAndGet(ZKWatcher, Configuration)}
*/
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP",
+ justification = "singleton pattern")
public static VisibilityLabelsCache get() {
// By the time this method is called, the singleton instance of
VisibilityLabelsCache should
// have been created.
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java
index b35e8258ddf..7b8a5cd241a 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java
@@ -76,6 +76,8 @@ public final class BloomFilterUtil {
* This gets used in {@link #contains(ByteBuff, int, int, Hash, int,
HashKey)}
* @param random The random number source to use, or null to compute actual
hashes
*/
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value =
"EI_EXPOSE_STATIC_REP2",
+ justification = "ignore for now, improve TestCompoundBloomFilter later")
public static void setRandomGeneratorForTest(Random random) {
randomGeneratorForTest = random;
}
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
index b0187f99dab..d22cf12ae1c 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
@@ -186,6 +186,8 @@ public class FSTableDescriptors implements TableDescriptors
{
* {@link #fsvisited} is not {@code true}, i.e, we haven't done a full scan
yet, to see if a newer
* file has been created since the cached one was read.
*/
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value =
"DCN_NULLPOINTER_EXCEPTION",
+ justification = "Fixed in newer minor releases, not a blocker, just keep
it as is for now")
@Override
@Nullable
public TableDescriptor get(TableName tableName) {
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
index 0ff485dbaec..3121196ecbc 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
@@ -985,12 +985,9 @@ public class HBaseFsck extends Configured implements
Closeable {
start = CellUtil.cloneRow(startKv.get());
Optional<Cell> endKv = hf.getLastKey();
end = CellUtil.cloneRow(endKv.get());
- } catch (IOException ioe) {
+ } catch (Exception ioe) {
LOG.warn("Problem reading orphan file " + hfile + ", skipping");
continue;
- } catch (NullPointerException ioe) {
- LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile,
skipping");
- continue;
} finally {
if (hf != null) {
hf.close();
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java
index 2a6eef88d2a..7e5e33098c2 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java
@@ -483,8 +483,9 @@ public abstract class AbstractFSWALProvider<T extends
AbstractFSWAL<?>> implemen
* @param conf configuration
* @return WAL Reader instance
*/
- public static org.apache.hadoop.hbase.wal.WAL.Reader openReader(Path path,
Configuration conf)
- throws IOException {
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value =
"DCN_NULLPOINTER_EXCEPTION",
+ justification = "HDFS-4380")
+ public static WAL.Reader openReader(Path path, Configuration conf) throws
IOException {
long retryInterval = 2000; // 2 sec
int maxAttempts = 30;
int attempt = 0;
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
index 5a1eb98fc38..78a3dac8a9c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
@@ -214,8 +214,6 @@ public class MiniHBaseCluster extends HBaseCluster {
try {
LOG.info("Hook closing fs=" + this.fs);
this.fs.close();
- } catch (NullPointerException npe) {
- LOG.debug("Need to fix these: " + npe.toString());
} catch (IOException e) {
LOG.warn("Running hook", e);
}