Author: stack
Date: Tue May 20 20:12:41 2014
New Revision: 1596383

URL: http://svn.apache.org/r1596383
Log:
HBASE-11203 Clean up javadoc and findbugs warnings in trunk

Modified:
    
hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
    
hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
    
hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
    
hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java
    
hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java
    
hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java

Modified: 
hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
URL: 
http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java?rev=1596383&r1=1596382&r2=1596383&view=diff
==============================================================================
--- 
hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
 (original)
+++ 
hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
 Tue May 20 20:12:41 2014
@@ -450,7 +450,7 @@ public class AggregationClient {
       S sum = null;
       Long rowCount = 0l;
 
-      public Pair<S, Long> getAvgArgs() {
+      public synchronized Pair<S, Long> getAvgArgs() {
         return new Pair<S, Long>(sum, rowCount);
       }
 
@@ -547,7 +547,7 @@ public class AggregationClient {
       long rowCountVal = 0l;
       S sumVal = null, sumSqVal = null;
 
-      public Pair<List<S>, Long> getStdParams() {
+      public synchronized Pair<List<S>, Long> getStdParams() {
         List<S> l = new ArrayList<S>();
         l.add(sumVal);
         l.add(sumSqVal);
@@ -670,7 +670,7 @@ public class AggregationClient {
     class StdCallback implements Batch.Callback<List<S>> {
       S sumVal = null, sumWeights = null;
 
-      public Pair<NavigableMap<byte[], List<S>>, List<S>> getMedianParams() {
+      public synchronized Pair<NavigableMap<byte[], List<S>>, List<S>> 
getMedianParams() {
         List<S> l = new ArrayList<S>();
         l.add(sumVal);
         l.add(sumWeights);

Modified: 
hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
URL: 
http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java?rev=1596383&r1=1596382&r2=1596383&view=diff
==============================================================================
--- 
hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
 (original)
+++ 
hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
 Tue May 20 20:12:41 2014
@@ -235,6 +235,8 @@ final public class FilterList extends Fi
 
   
   @Override
+  
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SF_SWITCH_FALLTHROUGH",
+    justification="Intentional")
   public ReturnCode filterKeyValue(Cell v) throws IOException {
     this.referenceKV = v;
 
@@ -252,7 +254,7 @@ final public class FilterList extends Fi
         switch (code) {
         // Override INCLUDE and continue to evaluate.
         case INCLUDE_AND_NEXT_COL:
-          rc = ReturnCode.INCLUDE_AND_NEXT_COL;
+          rc = ReturnCode.INCLUDE_AND_NEXT_COL; // FindBugs 
SF_SWITCH_FALLTHROUGH
         case INCLUDE:
           transformed = filter.transformCell(transformed);
           continue;

Modified: 
hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
URL: 
http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java?rev=1596383&r1=1596382&r2=1596383&view=diff
==============================================================================
--- 
hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
 (original)
+++ 
hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
 Tue May 20 20:12:41 2014
@@ -77,8 +77,8 @@ public class NullComparator extends Byte
   public static NullComparator parseFrom(final byte [] pbBytes)
   throws DeserializationException {
     try {
-      @SuppressWarnings("unused")
-      ComparatorProtos.NullComparator proto = 
ComparatorProtos.NullComparator.parseFrom(pbBytes);
+      // Just parse.  Don't use what we parse since on end we are returning 
new NullComparator.
+      ComparatorProtos.NullComparator.parseFrom(pbBytes);
     } catch (InvalidProtocolBufferException e) {
       throw new DeserializationException(e);
     }

Modified: 
hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java
URL: 
http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java?rev=1596383&r1=1596382&r2=1596383&view=diff
==============================================================================
--- 
hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java
 (original)
+++ 
hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java
 Tue May 20 20:12:41 2014
@@ -76,7 +76,7 @@ public class ClientSideRegionScanner ext
     values.clear();
 
     scanner.nextRaw(values, -1); // pass -1 as limit so that we see the whole 
row.
-    if (values == null || values.isEmpty()) {
+    if (values.isEmpty()) {
       //we are done
       return null;
     }

Modified: 
hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java
URL: 
http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java?rev=1596383&r1=1596382&r2=1596383&view=diff
==============================================================================
--- 
hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java
 (original)
+++ 
hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java
 Tue May 20 20:12:41 2014
@@ -37,14 +37,15 @@ import org.apache.hadoop.hbase.io.hfile.
  * Common functionality needed by all versions of {@link HFile} readers.
  */
 @InterfaceAudience.Private
[email protected](value="URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD")
 public abstract class AbstractHFileReader
     implements HFile.Reader, Configurable {
   /** Stream to read from. Does checksum verifications in file system */
-  protected FSDataInputStream istream;
+  protected FSDataInputStream istream; // UUF_UNUSED_PUBLIC_OR_PROTECTED_FIELD
 
   /** The file system stream of the underlying {@link HFile} that
    * does not do checksum verification in the file system */
-  protected FSDataInputStream istreamNoFsChecksum;
+  protected FSDataInputStream istreamNoFsChecksum;  // 
UUF_UNUSED_PUBLIC_OR_PROTECTED_FIELD
 
   /** Data block index reader keeping the root data index in memory */
   protected HFileBlockIndex.BlockIndexReader dataBlockIndexReader;
@@ -95,6 +96,7 @@ public abstract class AbstractHFileReade
 
   protected Configuration conf;
 
+  
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD")
   protected AbstractHFileReader(Path path, FixedFileTrailer trailer,
       final long fileSize, final CacheConfig cacheConf, final HFileSystem hfs,
       final Configuration conf) {
@@ -104,7 +106,7 @@ public abstract class AbstractHFileReade
     this.fileSize = fileSize;
     this.path = path;
     this.name = path.getName();
-    this.hfs = hfs;
+    this.hfs = hfs; // URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD
     this.conf = conf;
   }
 

Modified: 
hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
URL: 
http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java?rev=1596383&r1=1596382&r2=1596383&view=diff
==============================================================================
--- 
hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
 (original)
+++ 
hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
 Tue May 20 20:12:41 2014
@@ -1165,8 +1165,7 @@ public class RpcServer implements RpcSer
     // Fake 'call' for failed authorization response
     private static final int AUTHROIZATION_FAILED_CALLID = -1;
     private final Call authFailedCall =
-      new Call(AUTHROIZATION_FAILED_CALLID, this.service, null,
-        null, null, null, this, null, 0, null);
+      new Call(AUTHROIZATION_FAILED_CALLID, null, null, null, null, null, 
this, null, 0, null);
     private ByteArrayOutputStream authFailedResponse =
         new ByteArrayOutputStream();
     // Fake 'call' for SASL context setup


Reply via email to