[16/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FilterProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FilterProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FilterProtos.java
index b63acb2..7204813 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FilterProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FilterProtos.java
@@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class FilterProtos {
   private FilterProtos() {}
   public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
+registerAllExtensions(
+(com.google.protobuf.ExtensionRegistryLite) registry);
   }
-  public interface FilterOrBuilder
-  extends com.google.protobuf.MessageOrBuilder {
+  public interface FilterOrBuilder extends
+  // @@protoc_insertion_point(interface_extends:hbase.pb.Filter)
+  com.google.protobuf.MessageOrBuilder {
 
-// required string name = 1;
 /**
  * required string name = 1;
  */
@@ -26,7 +32,6 @@ public final class FilterProtos {
 com.google.protobuf.ByteString
 getNameBytes();
 
-// optional bytes serialized_filter = 2;
 /**
  * optional bytes serialized_filter = 2;
  */
@@ -39,36 +44,29 @@ public final class FilterProtos {
   /**
* Protobuf type {@code hbase.pb.Filter}
*/
-  public static final class Filter extends
-  com.google.protobuf.GeneratedMessage
-  implements FilterOrBuilder {
+  public  static final class Filter extends
+  com.google.protobuf.GeneratedMessageV3 implements
+  // @@protoc_insertion_point(message_implements:hbase.pb.Filter)
+  FilterOrBuilder {
 // Use Filter.newBuilder() to construct.
-private Filter(com.google.protobuf.GeneratedMessage.Builder builder) {
+private Filter(com.google.protobuf.GeneratedMessageV3.Builder builder) {
   super(builder);
-  this.unknownFields = builder.getUnknownFields();
-}
-private Filter(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-private static final Filter defaultInstance;
-public static Filter getDefaultInstance() {
-  return defaultInstance;
 }
-
-public Filter getDefaultInstanceForType() {
-  return defaultInstance;
+private Filter() {
+  name_ = "";
+  serializedFilter_ = com.google.protobuf.ByteString.EMPTY;
 }
 
-private final com.google.protobuf.UnknownFieldSet unknownFields;
 @java.lang.Override
 public final com.google.protobuf.UnknownFieldSet
-getUnknownFields() {
+getUnknownFields() {
   return this.unknownFields;
 }
 private Filter(
 com.google.protobuf.CodedInputStream input,
 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
 throws com.google.protobuf.InvalidProtocolBufferException {
-  initFields();
+  this();
   int mutable_bitField0_ = 0;
   com.google.protobuf.UnknownFieldSet.Builder unknownFields =
   com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -88,8 +86,9 @@ public final class FilterProtos {
   break;
 }
 case 10: {
+  com.google.protobuf.ByteString bs = input.readBytes();
   bitField0_ |= 0x0001;
-  name_ = input.readBytes();
+  name_ = bs;
   break;
 }
 case 18: {
@@ -103,7 +102,7 @@ public final class FilterProtos {
 throw e.setUnfinishedMessage(this);
   } catch (java.io.IOException e) {
 throw new com.google.protobuf.InvalidProtocolBufferException(
-e.getMessage()).setUnfinishedMessage(this);
+e).setUnfinishedMessage(this);
   } finally {
 this.unknownFields = unknownFields.build();
 makeExtensionsImmutable();
@@ -114,32 +113,16 @@ public final class FilterProtos {
   return 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_Filter_descriptor;
 }
 
-protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
 internalGetFieldAccessorTable() {
   return 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_Filter_fieldAccessorTable
   .ensureFieldAccessorsInitialized(
   
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.class, 

[23/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClientProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClientProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClientProtos.java
index a43b5a0..c4ec758 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClientProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClientProtos.java
@@ -6,26 +6,32 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class ClientProtos {
   private ClientProtos() {}
   public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
+registerAllExtensions(
+(com.google.protobuf.ExtensionRegistryLite) registry);
   }
   /**
-   * Protobuf enum {@code hbase.pb.Consistency}
-   *
* 
**
* Consistency defines the expected consistency level for an operation.
* 
+   *
+   * Protobuf enum {@code hbase.pb.Consistency}
*/
   public enum Consistency
   implements com.google.protobuf.ProtocolMessageEnum {
 /**
  * STRONG = 0;
  */
-STRONG(0, 0),
+STRONG(0),
 /**
  * TIMELINE = 1;
  */
-TIMELINE(1, 1),
+TIMELINE(1),
 ;
 
 /**
@@ -38,9 +44,19 @@ public final class ClientProtos {
 public static final int TIMELINE_VALUE = 1;
 
 
-public final int getNumber() { return value; }
+public final int getNumber() {
+  return value;
+}
 
+/**
+ * @deprecated Use {@link #forNumber(int)} instead.
+ */
+@java.lang.Deprecated
 public static Consistency valueOf(int value) {
+  return forNumber(value);
+}
+
+public static Consistency forNumber(int value) {
   switch (value) {
 case 0: return STRONG;
 case 1: return TIMELINE;
@@ -52,17 +68,17 @@ public final class ClientProtos {
 internalGetValueMap() {
   return internalValueMap;
 }
-private static com.google.protobuf.Internal.EnumLiteMap
-internalValueMap =
+private static final com.google.protobuf.Internal.EnumLiteMap<
+Consistency> internalValueMap =
   new com.google.protobuf.Internal.EnumLiteMap() {
 public Consistency findValueByNumber(int number) {
-  return Consistency.valueOf(number);
+  return Consistency.forNumber(number);
 }
   };
 
 public final com.google.protobuf.Descriptors.EnumValueDescriptor
 getValueDescriptor() {
-  return getDescriptor().getValues().get(index);
+  return getDescriptor().getValues().get(ordinal());
 }
 public final com.google.protobuf.Descriptors.EnumDescriptor
 getDescriptorForType() {
@@ -84,26 +100,24 @@ public final class ClientProtos {
   return VALUES[desc.getIndex()];
 }
 
-private final int index;
 private final int value;
 
-private Consistency(int index, int value) {
-  this.index = index;
+private Consistency(int value) {
   this.value = value;
 }
 
 // @@protoc_insertion_point(enum_scope:hbase.pb.Consistency)
   }
 
-  public interface AuthorizationsOrBuilder
-  extends com.google.protobuf.MessageOrBuilder {
+  public interface AuthorizationsOrBuilder extends
+  // @@protoc_insertion_point(interface_extends:hbase.pb.Authorizations)
+  com.google.protobuf.MessageOrBuilder {
 
-// repeated string label = 1;
 /**
  * repeated string label = 1;
  */
 java.util.List
-getLabelList();
+getLabelList();
 /**
  * repeated string label = 1;
  */
@@ -119,43 +133,35 @@ public final class ClientProtos {
 getLabelBytes(int index);
   }
   /**
-   * Protobuf type {@code hbase.pb.Authorizations}
-   *
* 
**
* The protocol buffer version of Authorizations.
* 
+   *
+   * Protobuf type {@code hbase.pb.Authorizations}
*/
-  public static final class Authorizations extends
-  com.google.protobuf.GeneratedMessage
-  implements AuthorizationsOrBuilder {
+  public  static final class Authorizations extends
+  com.google.protobuf.GeneratedMessageV3 implements
+  // @@protoc_insertion_point(message_implements:hbase.pb.Authorizations)
+  AuthorizationsOrBuilder {
 // Use Authorizations.newBuilder() to construct.
-private Authorizations(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+private Authorizations(com.google.protobuf.GeneratedMessageV3.Builder 
builder) {
   super(builder);
-  this.unknownFields = builder.getUnknownFields();
-}
-private Authorizations(boolean noInit) { 

[14/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HFileProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HFileProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HFileProtos.java
index 99dbd72..2a3d649 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HFileProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HFileProtos.java
@@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class HFileProtos {
   private HFileProtos() {}
   public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
+registerAllExtensions(
+(com.google.protobuf.ExtensionRegistryLite) registry);
   }
-  public interface FileInfoProtoOrBuilder
-  extends com.google.protobuf.MessageOrBuilder {
+  public interface FileInfoProtoOrBuilder extends
+  // @@protoc_insertion_point(interface_extends:hbase.pb.FileInfoProto)
+  com.google.protobuf.MessageOrBuilder {
 
-// repeated .hbase.pb.BytesBytesPair map_entry = 1;
 /**
  * repeated .hbase.pb.BytesBytesPair map_entry = 1;
  */
@@ -37,42 +43,34 @@ public final class HFileProtos {
 int index);
   }
   /**
-   * Protobuf type {@code hbase.pb.FileInfoProto}
-   *
* 
* Map of name/values
* 
+   *
+   * Protobuf type {@code hbase.pb.FileInfoProto}
*/
-  public static final class FileInfoProto extends
-  com.google.protobuf.GeneratedMessage
-  implements FileInfoProtoOrBuilder {
+  public  static final class FileInfoProto extends
+  com.google.protobuf.GeneratedMessageV3 implements
+  // @@protoc_insertion_point(message_implements:hbase.pb.FileInfoProto)
+  FileInfoProtoOrBuilder {
 // Use FileInfoProto.newBuilder() to construct.
-private FileInfoProto(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+private FileInfoProto(com.google.protobuf.GeneratedMessageV3.Builder 
builder) {
   super(builder);
-  this.unknownFields = builder.getUnknownFields();
-}
-private FileInfoProto(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-private static final FileInfoProto defaultInstance;
-public static FileInfoProto getDefaultInstance() {
-  return defaultInstance;
 }
-
-public FileInfoProto getDefaultInstanceForType() {
-  return defaultInstance;
+private FileInfoProto() {
+  mapEntry_ = java.util.Collections.emptyList();
 }
 
-private final com.google.protobuf.UnknownFieldSet unknownFields;
 @java.lang.Override
 public final com.google.protobuf.UnknownFieldSet
-getUnknownFields() {
+getUnknownFields() {
   return this.unknownFields;
 }
 private FileInfoProto(
 com.google.protobuf.CodedInputStream input,
 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
 throws com.google.protobuf.InvalidProtocolBufferException {
-  initFields();
+  this();
   int mutable_bitField0_ = 0;
   com.google.protobuf.UnknownFieldSet.Builder unknownFields =
   com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -96,7 +94,8 @@ public final class HFileProtos {
 mapEntry_ = new 
java.util.ArrayList();
 mutable_bitField0_ |= 0x0001;
   }
-  
mapEntry_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER,
 extensionRegistry));
+  mapEntry_.add(
+  
input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER,
 extensionRegistry));
   break;
 }
   }
@@ -105,7 +104,7 @@ public final class HFileProtos {
 throw e.setUnfinishedMessage(this);
   } catch (java.io.IOException e) {
 throw new com.google.protobuf.InvalidProtocolBufferException(
-e.getMessage()).setUnfinishedMessage(this);
+e).setUnfinishedMessage(this);
   } finally {
 if (((mutable_bitField0_ & 0x0001) == 0x0001)) {
   mapEntry_ = java.util.Collections.unmodifiableList(mapEntry_);
@@ -119,29 +118,13 @@ public final class HFileProtos {
   return 
org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileInfoProto_descriptor;
 }
 
-protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
 

[21/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterStatusProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterStatusProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterStatusProtos.java
index c9e34d9..f945184 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterStatusProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterStatusProtos.java
@@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class ClusterStatusProtos {
   private ClusterStatusProtos() {}
   public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
+registerAllExtensions(
+(com.google.protobuf.ExtensionRegistryLite) registry);
   }
-  public interface RegionStateOrBuilder
-  extends com.google.protobuf.MessageOrBuilder {
+  public interface RegionStateOrBuilder extends
+  // @@protoc_insertion_point(interface_extends:hbase.pb.RegionState)
+  com.google.protobuf.MessageOrBuilder {
 
-// required .hbase.pb.RegionInfo region_info = 1;
 /**
  * required .hbase.pb.RegionInfo region_info = 1;
  */
@@ -25,7 +31,6 @@ public final class ClusterStatusProtos {
  */
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder
 getRegionInfoOrBuilder();
 
-// required .hbase.pb.RegionState.State state = 2;
 /**
  * required .hbase.pb.RegionState.State state = 2;
  */
@@ -35,7 +40,6 @@ public final class ClusterStatusProtos {
  */
 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State
 getState();
 
-// optional uint64 stamp = 3;
 /**
  * optional uint64 stamp = 3;
  */
@@ -48,36 +52,29 @@ public final class ClusterStatusProtos {
   /**
* Protobuf type {@code hbase.pb.RegionState}
*/
-  public static final class RegionState extends
-  com.google.protobuf.GeneratedMessage
-  implements RegionStateOrBuilder {
+  public  static final class RegionState extends
+  com.google.protobuf.GeneratedMessageV3 implements
+  // @@protoc_insertion_point(message_implements:hbase.pb.RegionState)
+  RegionStateOrBuilder {
 // Use RegionState.newBuilder() to construct.
-private RegionState(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+private RegionState(com.google.protobuf.GeneratedMessageV3.Builder 
builder) {
   super(builder);
-  this.unknownFields = builder.getUnknownFields();
-}
-private RegionState(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-private static final RegionState defaultInstance;
-public static RegionState getDefaultInstance() {
-  return defaultInstance;
 }
-
-public RegionState getDefaultInstanceForType() {
-  return defaultInstance;
+private RegionState() {
+  state_ = 0;
+  stamp_ = 0L;
 }
 
-private final com.google.protobuf.UnknownFieldSet unknownFields;
 @java.lang.Override
 public final com.google.protobuf.UnknownFieldSet
-getUnknownFields() {
+getUnknownFields() {
   return this.unknownFields;
 }
 private RegionState(
 com.google.protobuf.CodedInputStream input,
 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
 throws com.google.protobuf.InvalidProtocolBufferException {
-  initFields();
+  this();
   int mutable_bitField0_ = 0;
   com.google.protobuf.UnknownFieldSet.Builder unknownFields =
   com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -116,7 +113,7 @@ public final class ClusterStatusProtos {
 unknownFields.mergeVarintField(2, rawValue);
   } else {
 bitField0_ |= 0x0002;
-state_ = value;
+state_ = rawValue;
   }
   break;
 }
@@ -131,7 +128,7 @@ public final class ClusterStatusProtos {
 throw e.setUnfinishedMessage(this);
   } catch (java.io.IOException e) {
 throw new com.google.protobuf.InvalidProtocolBufferException(
-e.getMessage()).setUnfinishedMessage(this);
+e).setUnfinishedMessage(this);
   } finally {
 this.unknownFields = unknownFields.build();
 makeExtensionsImmutable();
@@ -142,282 +139,277 @@ public final class ClusterStatusProtos {
   return 

[03/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/WALProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/WALProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/WALProtos.java
index 9513ccb..e032be7 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/WALProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/WALProtos.java
@@ -6,7 +6,13 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class WALProtos {
   private WALProtos() {}
   public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
+registerAllExtensions(
+(com.google.protobuf.ExtensionRegistryLite) registry);
   }
   /**
* Protobuf enum {@code hbase.pb.ScopeType}
@@ -16,15 +22,15 @@ public final class WALProtos {
 /**
  * REPLICATION_SCOPE_LOCAL = 0;
  */
-REPLICATION_SCOPE_LOCAL(0, 0),
+REPLICATION_SCOPE_LOCAL(0),
 /**
  * REPLICATION_SCOPE_GLOBAL = 1;
  */
-REPLICATION_SCOPE_GLOBAL(1, 1),
+REPLICATION_SCOPE_GLOBAL(1),
 /**
  * REPLICATION_SCOPE_SERIAL = 2;
  */
-REPLICATION_SCOPE_SERIAL(2, 2),
+REPLICATION_SCOPE_SERIAL(2),
 ;
 
 /**
@@ -41,9 +47,19 @@ public final class WALProtos {
 public static final int REPLICATION_SCOPE_SERIAL_VALUE = 2;
 
 
-public final int getNumber() { return value; }
+public final int getNumber() {
+  return value;
+}
 
+/**
+ * @deprecated Use {@link #forNumber(int)} instead.
+ */
+@java.lang.Deprecated
 public static ScopeType valueOf(int value) {
+  return forNumber(value);
+}
+
+public static ScopeType forNumber(int value) {
   switch (value) {
 case 0: return REPLICATION_SCOPE_LOCAL;
 case 1: return REPLICATION_SCOPE_GLOBAL;
@@ -56,17 +72,17 @@ public final class WALProtos {
 internalGetValueMap() {
   return internalValueMap;
 }
-private static com.google.protobuf.Internal.EnumLiteMap
-internalValueMap =
+private static final com.google.protobuf.Internal.EnumLiteMap<
+ScopeType> internalValueMap =
   new com.google.protobuf.Internal.EnumLiteMap() {
 public ScopeType findValueByNumber(int number) {
-  return ScopeType.valueOf(number);
+  return ScopeType.forNumber(number);
 }
   };
 
 public final com.google.protobuf.Descriptors.EnumValueDescriptor
 getValueDescriptor() {
-  return getDescriptor().getValues().get(index);
+  return getDescriptor().getValues().get(ordinal());
 }
 public final com.google.protobuf.Descriptors.EnumDescriptor
 getDescriptorForType() {
@@ -88,21 +104,19 @@ public final class WALProtos {
   return VALUES[desc.getIndex()];
 }
 
-private final int index;
 private final int value;
 
-private ScopeType(int index, int value) {
-  this.index = index;
+private ScopeType(int value) {
   this.value = value;
 }
 
 // @@protoc_insertion_point(enum_scope:hbase.pb.ScopeType)
   }
 
-  public interface WALHeaderOrBuilder
-  extends com.google.protobuf.MessageOrBuilder {
+  public interface WALHeaderOrBuilder extends
+  // @@protoc_insertion_point(interface_extends:hbase.pb.WALHeader)
+  com.google.protobuf.MessageOrBuilder {
 
-// optional bool has_compression = 1;
 /**
  * optional bool has_compression = 1;
  */
@@ -112,7 +126,6 @@ public final class WALProtos {
  */
 boolean getHasCompression();
 
-// optional bytes encryption_key = 2;
 /**
  * optional bytes encryption_key = 2;
  */
@@ -122,7 +135,6 @@ public final class WALProtos {
  */
 com.google.protobuf.ByteString getEncryptionKey();
 
-// optional bool has_tag_compression = 3;
 /**
  * optional bool has_tag_compression = 3;
  */
@@ -132,7 +144,6 @@ public final class WALProtos {
  */
 boolean getHasTagCompression();
 
-// optional string writer_cls_name = 4;
 /**
  * optional string writer_cls_name = 4;
  */
@@ -147,7 +158,6 @@ public final class WALProtos {
 com.google.protobuf.ByteString
 getWriterClsNameBytes();
 
-// optional string cell_codec_cls_name = 5;
 /**
  * optional string cell_codec_cls_name = 5;
  */
@@ -165,36 +175,32 @@ public final class WALProtos {
   /**
* Protobuf type {@code hbase.pb.WALHeader}
*/
-  public static final class WALHeader extends
-  com.google.protobuf.GeneratedMessage
-  implements 

[05/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/SnapshotProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/SnapshotProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/SnapshotProtos.java
index bb09cde..4d3b294 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/SnapshotProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/SnapshotProtos.java
@@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class SnapshotProtos {
   private SnapshotProtos() {}
   public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
+registerAllExtensions(
+(com.google.protobuf.ExtensionRegistryLite) registry);
   }
-  public interface SnapshotFileInfoOrBuilder
-  extends com.google.protobuf.MessageOrBuilder {
+  public interface SnapshotFileInfoOrBuilder extends
+  // @@protoc_insertion_point(interface_extends:hbase.pb.SnapshotFileInfo)
+  com.google.protobuf.MessageOrBuilder {
 
-// required .hbase.pb.SnapshotFileInfo.Type type = 1;
 /**
  * required .hbase.pb.SnapshotFileInfo.Type type = 1;
  */
@@ -21,7 +27,6 @@ public final class SnapshotProtos {
  */
 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type
 getType();
 
-// optional string hfile = 3;
 /**
  * optional string hfile = 3;
  */
@@ -36,7 +41,6 @@ public final class SnapshotProtos {
 com.google.protobuf.ByteString
 getHfileBytes();
 
-// optional string wal_server = 4;
 /**
  * optional string wal_server = 4;
  */
@@ -51,7 +55,6 @@ public final class SnapshotProtos {
 com.google.protobuf.ByteString
 getWalServerBytes();
 
-// optional string wal_name = 5;
 /**
  * optional string wal_name = 5;
  */
@@ -69,36 +72,31 @@ public final class SnapshotProtos {
   /**
* Protobuf type {@code hbase.pb.SnapshotFileInfo}
*/
-  public static final class SnapshotFileInfo extends
-  com.google.protobuf.GeneratedMessage
-  implements SnapshotFileInfoOrBuilder {
+  public  static final class SnapshotFileInfo extends
+  com.google.protobuf.GeneratedMessageV3 implements
+  // @@protoc_insertion_point(message_implements:hbase.pb.SnapshotFileInfo)
+  SnapshotFileInfoOrBuilder {
 // Use SnapshotFileInfo.newBuilder() to construct.
-private SnapshotFileInfo(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+private SnapshotFileInfo(com.google.protobuf.GeneratedMessageV3.Builder 
builder) {
   super(builder);
-  this.unknownFields = builder.getUnknownFields();
 }
-private SnapshotFileInfo(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-private static final SnapshotFileInfo defaultInstance;
-public static SnapshotFileInfo getDefaultInstance() {
-  return defaultInstance;
-}
-
-public SnapshotFileInfo getDefaultInstanceForType() {
-  return defaultInstance;
+private SnapshotFileInfo() {
+  type_ = 1;
+  hfile_ = "";
+  walServer_ = "";
+  walName_ = "";
 }
 
-private final com.google.protobuf.UnknownFieldSet unknownFields;
 @java.lang.Override
 public final com.google.protobuf.UnknownFieldSet
-getUnknownFields() {
+getUnknownFields() {
   return this.unknownFields;
 }
 private SnapshotFileInfo(
 com.google.protobuf.CodedInputStream input,
 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
 throws com.google.protobuf.InvalidProtocolBufferException {
-  initFields();
+  this();
   int mutable_bitField0_ = 0;
   com.google.protobuf.UnknownFieldSet.Builder unknownFields =
   com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -124,23 +122,26 @@ public final class SnapshotProtos {
 unknownFields.mergeVarintField(1, rawValue);
   } else {
 bitField0_ |= 0x0001;
-type_ = value;
+type_ = rawValue;
   }
   break;
 }
 case 26: {
+  com.google.protobuf.ByteString bs = input.readBytes();
   bitField0_ |= 0x0002;
-  hfile_ = input.readBytes();
+  hfile_ = bs;
   break;
 }
 case 34: {
+  com.google.protobuf.ByteString bs = input.readBytes();
   bitField0_ |= 0x0004;
-  walServer_ = 

[18/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ErrorHandlingProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ErrorHandlingProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ErrorHandlingProtos.java
index 16e6b11..d696e90 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ErrorHandlingProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ErrorHandlingProtos.java
@@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class ErrorHandlingProtos {
   private ErrorHandlingProtos() {}
   public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
+registerAllExtensions(
+(com.google.protobuf.ExtensionRegistryLite) registry);
   }
-  public interface StackTraceElementMessageOrBuilder
-  extends com.google.protobuf.MessageOrBuilder {
+  public interface StackTraceElementMessageOrBuilder extends
+  // 
@@protoc_insertion_point(interface_extends:hbase.pb.StackTraceElementMessage)
+  com.google.protobuf.MessageOrBuilder {
 
-// optional string declaring_class = 1;
 /**
  * optional string declaring_class = 1;
  */
@@ -26,7 +32,6 @@ public final class ErrorHandlingProtos {
 com.google.protobuf.ByteString
 getDeclaringClassBytes();
 
-// optional string method_name = 2;
 /**
  * optional string method_name = 2;
  */
@@ -41,7 +46,6 @@ public final class ErrorHandlingProtos {
 com.google.protobuf.ByteString
 getMethodNameBytes();
 
-// optional string file_name = 3;
 /**
  * optional string file_name = 3;
  */
@@ -56,7 +60,6 @@ public final class ErrorHandlingProtos {
 com.google.protobuf.ByteString
 getFileNameBytes();
 
-// optional int32 line_number = 4;
 /**
  * optional int32 line_number = 4;
  */
@@ -67,44 +70,39 @@ public final class ErrorHandlingProtos {
 int getLineNumber();
   }
   /**
-   * Protobuf type {@code hbase.pb.StackTraceElementMessage}
-   *
* 
**
* Protobuf version of a java.lang.StackTraceElement
* so we can serialize exceptions.
* 
+   *
+   * Protobuf type {@code hbase.pb.StackTraceElementMessage}
*/
-  public static final class StackTraceElementMessage extends
-  com.google.protobuf.GeneratedMessage
-  implements StackTraceElementMessageOrBuilder {
+  public  static final class StackTraceElementMessage extends
+  com.google.protobuf.GeneratedMessageV3 implements
+  // 
@@protoc_insertion_point(message_implements:hbase.pb.StackTraceElementMessage)
+  StackTraceElementMessageOrBuilder {
 // Use StackTraceElementMessage.newBuilder() to construct.
-private 
StackTraceElementMessage(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+private 
StackTraceElementMessage(com.google.protobuf.GeneratedMessageV3.Builder 
builder) {
   super(builder);
-  this.unknownFields = builder.getUnknownFields();
-}
-private StackTraceElementMessage(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-private static final StackTraceElementMessage defaultInstance;
-public static StackTraceElementMessage getDefaultInstance() {
-  return defaultInstance;
 }
-
-public StackTraceElementMessage getDefaultInstanceForType() {
-  return defaultInstance;
+private StackTraceElementMessage() {
+  declaringClass_ = "";
+  methodName_ = "";
+  fileName_ = "";
+  lineNumber_ = 0;
 }
 
-private final com.google.protobuf.UnknownFieldSet unknownFields;
 @java.lang.Override
 public final com.google.protobuf.UnknownFieldSet
-getUnknownFields() {
+getUnknownFields() {
   return this.unknownFields;
 }
 private StackTraceElementMessage(
 com.google.protobuf.CodedInputStream input,
 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
 throws com.google.protobuf.InvalidProtocolBufferException {
-  initFields();
+  this();
   int mutable_bitField0_ = 0;
   com.google.protobuf.UnknownFieldSet.Builder unknownFields =
   com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -124,18 +122,21 @@ public final class ErrorHandlingProtos {
   break;
 }
 case 10: {
+  com.google.protobuf.ByteString bs = input.readBytes();
   bitField0_ |= 0x0001;
-  declaringClass_ = input.readBytes();
+  declaringClass_ = bs;
   

[07/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionNormalizerProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionNormalizerProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionNormalizerProtos.java
index fc6555a..66b6990 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionNormalizerProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionNormalizerProtos.java
@@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class RegionNormalizerProtos {
   private RegionNormalizerProtos() {}
   public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
+registerAllExtensions(
+(com.google.protobuf.ExtensionRegistryLite) registry);
   }
-  public interface RegionNormalizerStateOrBuilder
-  extends com.google.protobuf.MessageOrBuilder {
+  public interface RegionNormalizerStateOrBuilder extends
+  // @@protoc_insertion_point(interface_extends:RegionNormalizerState)
+  com.google.protobuf.MessageOrBuilder {
 
-// optional bool normalizer_on = 1;
 /**
  * optional bool normalizer_on = 1;
  */
@@ -24,36 +30,28 @@ public final class RegionNormalizerProtos {
   /**
* Protobuf type {@code RegionNormalizerState}
*/
-  public static final class RegionNormalizerState extends
-  com.google.protobuf.GeneratedMessage
-  implements RegionNormalizerStateOrBuilder {
+  public  static final class RegionNormalizerState extends
+  com.google.protobuf.GeneratedMessageV3 implements
+  // @@protoc_insertion_point(message_implements:RegionNormalizerState)
+  RegionNormalizerStateOrBuilder {
 // Use RegionNormalizerState.newBuilder() to construct.
-private 
RegionNormalizerState(com.google.protobuf.GeneratedMessage.Builder builder) {
+private 
RegionNormalizerState(com.google.protobuf.GeneratedMessageV3.Builder 
builder) {
   super(builder);
-  this.unknownFields = builder.getUnknownFields();
-}
-private RegionNormalizerState(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-private static final RegionNormalizerState defaultInstance;
-public static RegionNormalizerState getDefaultInstance() {
-  return defaultInstance;
 }
-
-public RegionNormalizerState getDefaultInstanceForType() {
-  return defaultInstance;
+private RegionNormalizerState() {
+  normalizerOn_ = false;
 }
 
-private final com.google.protobuf.UnknownFieldSet unknownFields;
 @java.lang.Override
 public final com.google.protobuf.UnknownFieldSet
-getUnknownFields() {
+getUnknownFields() {
   return this.unknownFields;
 }
 private RegionNormalizerState(
 com.google.protobuf.CodedInputStream input,
 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
 throws com.google.protobuf.InvalidProtocolBufferException {
-  initFields();
+  this();
   int mutable_bitField0_ = 0;
   com.google.protobuf.UnknownFieldSet.Builder unknownFields =
   com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -83,7 +81,7 @@ public final class RegionNormalizerProtos {
 throw e.setUnfinishedMessage(this);
   } catch (java.io.IOException e) {
 throw new com.google.protobuf.InvalidProtocolBufferException(
-e.getMessage()).setUnfinishedMessage(this);
+e).setUnfinishedMessage(this);
   } finally {
 this.unknownFields = unknownFields.build();
 makeExtensionsImmutable();
@@ -94,30 +92,14 @@ public final class RegionNormalizerProtos {
   return 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.internal_static_RegionNormalizerState_descriptor;
 }
 
-protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
 internalGetFieldAccessorTable() {
   return 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.internal_static_RegionNormalizerState_fieldAccessorTable
   .ensureFieldAccessorsInitialized(
   
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.RegionNormalizerState.class,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.RegionNormalizerState.Builder.class);
 }
 
-public static com.google.protobuf.Parser PARSER =
-new com.google.protobuf.AbstractParser() {
-  

[04/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/TracingProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/TracingProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/TracingProtos.java
index 893fc62..940a498 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/TracingProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/TracingProtos.java
@@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class TracingProtos {
   private TracingProtos() {}
   public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
+registerAllExtensions(
+(com.google.protobuf.ExtensionRegistryLite) registry);
   }
-  public interface RPCTInfoOrBuilder
-  extends com.google.protobuf.MessageOrBuilder {
+  public interface RPCTInfoOrBuilder extends
+  // @@protoc_insertion_point(interface_extends:hbase.pb.RPCTInfo)
+  com.google.protobuf.MessageOrBuilder {
 
-// optional int64 trace_id = 1;
 /**
  * optional int64 trace_id = 1;
  */
@@ -21,7 +27,6 @@ public final class TracingProtos {
  */
 long getTraceId();
 
-// optional int64 parent_id = 2;
 /**
  * optional int64 parent_id = 2;
  */
@@ -32,8 +37,6 @@ public final class TracingProtos {
 long getParentId();
   }
   /**
-   * Protobuf type {@code hbase.pb.RPCTInfo}
-   *
* 
*Used to pass through the information necessary to continue
*a trace after an RPC is made. All we need is the traceid 
@@ -41,37 +44,32 @@ public final class TracingProtos {
*the id of the current span when this message was sent, so we know 
*what span caused the new span we will create when this message is received.
* 
+   *
+   * Protobuf type {@code hbase.pb.RPCTInfo}
*/
-  public static final class RPCTInfo extends
-  com.google.protobuf.GeneratedMessage
-  implements RPCTInfoOrBuilder {
+  public  static final class RPCTInfo extends
+  com.google.protobuf.GeneratedMessageV3 implements
+  // @@protoc_insertion_point(message_implements:hbase.pb.RPCTInfo)
+  RPCTInfoOrBuilder {
 // Use RPCTInfo.newBuilder() to construct.
-private RPCTInfo(com.google.protobuf.GeneratedMessage.Builder builder) {
+private RPCTInfo(com.google.protobuf.GeneratedMessageV3.Builder 
builder) {
   super(builder);
-  this.unknownFields = builder.getUnknownFields();
 }
-private RPCTInfo(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-private static final RPCTInfo defaultInstance;
-public static RPCTInfo getDefaultInstance() {
-  return defaultInstance;
-}
-
-public RPCTInfo getDefaultInstanceForType() {
-  return defaultInstance;
+private RPCTInfo() {
+  traceId_ = 0L;
+  parentId_ = 0L;
 }
 
-private final com.google.protobuf.UnknownFieldSet unknownFields;
 @java.lang.Override
 public final com.google.protobuf.UnknownFieldSet
-getUnknownFields() {
+getUnknownFields() {
   return this.unknownFields;
 }
 private RPCTInfo(
 com.google.protobuf.CodedInputStream input,
 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
 throws com.google.protobuf.InvalidProtocolBufferException {
-  initFields();
+  this();
   int mutable_bitField0_ = 0;
   com.google.protobuf.UnknownFieldSet.Builder unknownFields =
   com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -106,7 +104,7 @@ public final class TracingProtos {
 throw e.setUnfinishedMessage(this);
   } catch (java.io.IOException e) {
 throw new com.google.protobuf.InvalidProtocolBufferException(
-e.getMessage()).setUnfinishedMessage(this);
+e).setUnfinishedMessage(this);
   } finally {
 this.unknownFields = unknownFields.build();
 makeExtensionsImmutable();
@@ -117,30 +115,14 @@ public final class TracingProtos {
   return 
org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.internal_static_hbase_pb_RPCTInfo_descriptor;
 }
 
-protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
 internalGetFieldAccessorTable() {
   return 
org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.internal_static_hbase_pb_RPCTInfo_fieldAccessorTable
   .ensureFieldAccessorsInitialized(
   

[19/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/EncryptionProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/EncryptionProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/EncryptionProtos.java
index dca6ea2..689d33f 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/EncryptionProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/EncryptionProtos.java
@@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class EncryptionProtos {
   private EncryptionProtos() {}
   public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
+registerAllExtensions(
+(com.google.protobuf.ExtensionRegistryLite) registry);
   }
-  public interface WrappedKeyOrBuilder
-  extends com.google.protobuf.MessageOrBuilder {
+  public interface WrappedKeyOrBuilder extends
+  // @@protoc_insertion_point(interface_extends:hbase.pb.WrappedKey)
+  com.google.protobuf.MessageOrBuilder {
 
-// required string algorithm = 1;
 /**
  * required string algorithm = 1;
  */
@@ -26,7 +32,6 @@ public final class EncryptionProtos {
 com.google.protobuf.ByteString
 getAlgorithmBytes();
 
-// required uint32 length = 2;
 /**
  * required uint32 length = 2;
  */
@@ -36,7 +41,6 @@ public final class EncryptionProtos {
  */
 int getLength();
 
-// required bytes data = 3;
 /**
  * required bytes data = 3;
  */
@@ -46,7 +50,6 @@ public final class EncryptionProtos {
  */
 com.google.protobuf.ByteString getData();
 
-// optional bytes iv = 4;
 /**
  * optional bytes iv = 4;
  */
@@ -56,7 +59,6 @@ public final class EncryptionProtos {
  */
 com.google.protobuf.ByteString getIv();
 
-// optional bytes hash = 5;
 /**
  * optional bytes hash = 5;
  */
@@ -69,36 +71,32 @@ public final class EncryptionProtos {
   /**
* Protobuf type {@code hbase.pb.WrappedKey}
*/
-  public static final class WrappedKey extends
-  com.google.protobuf.GeneratedMessage
-  implements WrappedKeyOrBuilder {
+  public  static final class WrappedKey extends
+  com.google.protobuf.GeneratedMessageV3 implements
+  // @@protoc_insertion_point(message_implements:hbase.pb.WrappedKey)
+  WrappedKeyOrBuilder {
 // Use WrappedKey.newBuilder() to construct.
-private WrappedKey(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+private WrappedKey(com.google.protobuf.GeneratedMessageV3.Builder 
builder) {
   super(builder);
-  this.unknownFields = builder.getUnknownFields();
-}
-private WrappedKey(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-private static final WrappedKey defaultInstance;
-public static WrappedKey getDefaultInstance() {
-  return defaultInstance;
 }
-
-public WrappedKey getDefaultInstanceForType() {
-  return defaultInstance;
+private WrappedKey() {
+  algorithm_ = "";
+  length_ = 0;
+  data_ = com.google.protobuf.ByteString.EMPTY;
+  iv_ = com.google.protobuf.ByteString.EMPTY;
+  hash_ = com.google.protobuf.ByteString.EMPTY;
 }
 
-private final com.google.protobuf.UnknownFieldSet unknownFields;
 @java.lang.Override
 public final com.google.protobuf.UnknownFieldSet
-getUnknownFields() {
+getUnknownFields() {
   return this.unknownFields;
 }
 private WrappedKey(
 com.google.protobuf.CodedInputStream input,
 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
 throws com.google.protobuf.InvalidProtocolBufferException {
-  initFields();
+  this();
   int mutable_bitField0_ = 0;
   com.google.protobuf.UnknownFieldSet.Builder unknownFields =
   com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -118,8 +116,9 @@ public final class EncryptionProtos {
   break;
 }
 case 10: {
+  com.google.protobuf.ByteString bs = input.readBytes();
   bitField0_ |= 0x0001;
-  algorithm_ = input.readBytes();
+  algorithm_ = bs;
   break;
 }
 case 16: {
@@ -148,7 +147,7 @@ public final class EncryptionProtos {
 throw e.setUnfinishedMessage(this);
   } catch (java.io.IOException e) {
 throw new com.google.protobuf.InvalidProtocolBufferException(
-e.getMessage()).setUnfinishedMessage(this);
+   

[24/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/CellProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/CellProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/CellProtos.java
index 5b86b1d..f049d50 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/CellProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/CellProtos.java
@@ -6,46 +6,52 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class CellProtos {
   private CellProtos() {}
   public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
+registerAllExtensions(
+(com.google.protobuf.ExtensionRegistryLite) registry);
   }
   /**
-   * Protobuf enum {@code hbase.pb.CellType}
-   *
* 
**
* The type of the key in a Cell
* 
+   *
+   * Protobuf enum {@code hbase.pb.CellType}
*/
   public enum CellType
   implements com.google.protobuf.ProtocolMessageEnum {
 /**
  * MINIMUM = 0;
  */
-MINIMUM(0, 0),
+MINIMUM(0),
 /**
  * PUT = 4;
  */
-PUT(1, 4),
+PUT(4),
 /**
  * DELETE = 8;
  */
-DELETE(2, 8),
+DELETE(8),
 /**
  * DELETE_COLUMN = 12;
  */
-DELETE_COLUMN(3, 12),
+DELETE_COLUMN(12),
 /**
  * DELETE_FAMILY = 14;
  */
-DELETE_FAMILY(4, 14),
+DELETE_FAMILY(14),
 /**
- * MAXIMUM = 255;
- *
  * 
  * MAXIMUM is used when searching; you look from maximum on down.
  * 
+ *
+ * MAXIMUM = 255;
  */
-MAXIMUM(5, 255),
+MAXIMUM(255),
 ;
 
 /**
@@ -69,18 +75,28 @@ public final class CellProtos {
  */
 public static final int DELETE_FAMILY_VALUE = 14;
 /**
- * MAXIMUM = 255;
- *
  * 
  * MAXIMUM is used when searching; you look from maximum on down.
  * 
+ *
+ * MAXIMUM = 255;
  */
 public static final int MAXIMUM_VALUE = 255;
 
 
-public final int getNumber() { return value; }
+public final int getNumber() {
+  return value;
+}
 
+/**
+ * @deprecated Use {@link #forNumber(int)} instead.
+ */
+@java.lang.Deprecated
 public static CellType valueOf(int value) {
+  return forNumber(value);
+}
+
+public static CellType forNumber(int value) {
   switch (value) {
 case 0: return MINIMUM;
 case 4: return PUT;
@@ -96,17 +112,17 @@ public final class CellProtos {
 internalGetValueMap() {
   return internalValueMap;
 }
-private static com.google.protobuf.Internal.EnumLiteMap
-internalValueMap =
+private static final com.google.protobuf.Internal.EnumLiteMap<
+CellType> internalValueMap =
   new com.google.protobuf.Internal.EnumLiteMap() {
 public CellType findValueByNumber(int number) {
-  return CellType.valueOf(number);
+  return CellType.forNumber(number);
 }
   };
 
 public final com.google.protobuf.Descriptors.EnumValueDescriptor
 getValueDescriptor() {
-  return getDescriptor().getValues().get(index);
+  return getDescriptor().getValues().get(ordinal());
 }
 public final com.google.protobuf.Descriptors.EnumDescriptor
 getDescriptorForType() {
@@ -128,21 +144,19 @@ public final class CellProtos {
   return VALUES[desc.getIndex()];
 }
 
-private final int index;
 private final int value;
 
-private CellType(int index, int value) {
-  this.index = index;
+private CellType(int value) {
   this.value = value;
 }
 
 // @@protoc_insertion_point(enum_scope:hbase.pb.CellType)
   }
 
-  public interface CellOrBuilder
-  extends com.google.protobuf.MessageOrBuilder {
+  public interface CellOrBuilder extends
+  // @@protoc_insertion_point(interface_extends:hbase.pb.Cell)
+  com.google.protobuf.MessageOrBuilder {
 
-// optional bytes row = 1;
 /**
  * optional bytes row = 1;
  */
@@ -152,7 +166,6 @@ public final class CellProtos {
  */
 com.google.protobuf.ByteString getRow();
 
-// optional bytes family = 2;
 /**
  * optional bytes family = 2;
  */
@@ -162,7 +175,6 @@ public final class CellProtos {
  */
 com.google.protobuf.ByteString getFamily();
 
-// optional bytes qualifier = 3;
 /**
  * optional bytes qualifier = 3;
  */
@@ -172,7 +184,6 @@ public final class CellProtos {
  */
 com.google.protobuf.ByteString getQualifier();
 
-// optional uint64 timestamp = 4;
 /**
  

[27/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProtos.java
index 417ee00..0fdd1fb 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProtos.java
@@ -6,44 +6,42 @@ package org.apache.hadoop.hbase.shaded.ipc.protobuf.generated;
 public final class TestProtos {
   private TestProtos() {}
   public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
+registerAllExtensions(
+(com.google.protobuf.ExtensionRegistryLite) registry);
   }
-  public interface EmptyRequestProtoOrBuilder
-  extends com.google.protobuf.MessageOrBuilder {
+  public interface EmptyRequestProtoOrBuilder extends
+  // @@protoc_insertion_point(interface_extends:EmptyRequestProto)
+  com.google.protobuf.MessageOrBuilder {
   }
   /**
* Protobuf type {@code EmptyRequestProto}
*/
-  public static final class EmptyRequestProto extends
-  com.google.protobuf.GeneratedMessage
-  implements EmptyRequestProtoOrBuilder {
+  public  static final class EmptyRequestProto extends
+  com.google.protobuf.GeneratedMessageV3 implements
+  // @@protoc_insertion_point(message_implements:EmptyRequestProto)
+  EmptyRequestProtoOrBuilder {
 // Use EmptyRequestProto.newBuilder() to construct.
-private EmptyRequestProto(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+private 
EmptyRequestProto(com.google.protobuf.GeneratedMessageV3.Builder builder) {
   super(builder);
-  this.unknownFields = builder.getUnknownFields();
 }
-private EmptyRequestProto(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-private static final EmptyRequestProto defaultInstance;
-public static EmptyRequestProto getDefaultInstance() {
-  return defaultInstance;
-}
-
-public EmptyRequestProto getDefaultInstanceForType() {
-  return defaultInstance;
+private EmptyRequestProto() {
 }
 
-private final com.google.protobuf.UnknownFieldSet unknownFields;
 @java.lang.Override
 public final com.google.protobuf.UnknownFieldSet
-getUnknownFields() {
+getUnknownFields() {
   return this.unknownFields;
 }
 private EmptyRequestProto(
 com.google.protobuf.CodedInputStream input,
 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
 throws com.google.protobuf.InvalidProtocolBufferException {
-  initFields();
+  this();
   com.google.protobuf.UnknownFieldSet.Builder unknownFields =
   com.google.protobuf.UnknownFieldSet.newBuilder();
   try {
@@ -67,7 +65,7 @@ public final class TestProtos {
 throw e.setUnfinishedMessage(this);
   } catch (java.io.IOException e) {
 throw new com.google.protobuf.InvalidProtocolBufferException(
-e.getMessage()).setUnfinishedMessage(this);
+e).setUnfinishedMessage(this);
   } finally {
 this.unknownFields = unknownFields.build();
 makeExtensionsImmutable();
@@ -78,34 +76,18 @@ public final class TestProtos {
   return 
org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_EmptyRequestProto_descriptor;
 }
 
-protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
 internalGetFieldAccessorTable() {
   return 
org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_EmptyRequestProto_fieldAccessorTable
   .ensureFieldAccessorsInitialized(
   
org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto.class,
 
org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto.Builder.class);
 }
 
-public static com.google.protobuf.Parser PARSER =
-new com.google.protobuf.AbstractParser() {
-  public EmptyRequestProto parsePartialFrom(
-  com.google.protobuf.CodedInputStream input,
-  com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-  throws com.google.protobuf.InvalidProtocolBufferException {
-return new EmptyRequestProto(input, extensionRegistry);
-  }
-};
-
-@java.lang.Override
-public com.google.protobuf.Parser getParserForType() {
-  

[29/29] hbase git commit: Fix the failing TestReplicationSyncUpToolWithBulkLoadedData test

2016-09-29 Thread stack
Fix the failing TestReplicationSyncUpToolWithBulkLoadedData test


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/35cf5504
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/35cf5504
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/35cf5504

Branch: refs/heads/HBASE-16264
Commit: 35cf55048fe033373f6ae29dd6d3716ed7b18842
Parents: b4a729e
Author: stack 
Authored: Thu Sep 29 17:27:20 2016 -0700
Committer: stack 
Committed: Thu Sep 29 17:27:20 2016 -0700

--
 .../replication/ReplicationStateZKBase.java |  20 +-
 hbase-endpoint/pom.xml  |  22 ++
 ...ReplicationSyncUpToolWithBulkLoadedData.java | 235 +++
 ...ReplicationSyncUpToolWithBulkLoadedData.java | 235 ---
 4 files changed, 274 insertions(+), 238 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/35cf5504/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java
index 496ab50..8fc7c07 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java
@@ -18,9 +18,13 @@
  */
 package org.apache.hadoop.hbase.replication;
 
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
 import java.util.List;
 
 import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Abortable;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
@@ -106,9 +110,19 @@ public abstract class ReplicationStateZKBase {
* /hbase/replication/peers/PEER_ID/peer-state.
*/
   protected static byte[] toByteArray(final 
ZooKeeperProtos.ReplicationState.State state) {
-byte[] bytes =
-
ZooKeeperProtos.ReplicationState.newBuilder().setState(state).build().toByteArray();
-return ProtobufUtil.prependPBMagic(bytes);
+ZooKeeperProtos.ReplicationState msg =
+ZooKeeperProtos.ReplicationState.newBuilder().setState(state).build();
+// There is no toByteArray on this pb Message?
+// 32 bytes is default which seems fair enough here.
+try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
+  CodedOutputStream cos = CodedOutputStream.newInstance(baos, 16);
+  msg.writeTo(cos);
+  cos.flush();
+  baos.flush();
+  return ProtobufUtil.prependPBMagic(baos.toByteArray());
+} catch (IOException e) {
+  throw new RuntimeException(e);
+}
   }
 
   protected boolean peerExists(String id) throws KeeperException {

http://git-wip-us.apache.org/repos/asf/hbase/blob/35cf5504/hbase-endpoint/pom.xml
--
diff --git a/hbase-endpoint/pom.xml b/hbase-endpoint/pom.xml
index 3b0ffd7..de05950 100644
--- a/hbase-endpoint/pom.xml
+++ b/hbase-endpoint/pom.xml
@@ -117,6 +117,28 @@
 org.apache.hbase
 hbase-common
   
+
+  org.apache.hbase
+  hbase-hadoop-compat
+
+
+  org.apache.hbase
+  hbase-hadoop-compat
+  test-jar
+  test
+
+
+  org.apache.hbase
+  ${compat.module}
+  ${project.version}
+
+
+  org.apache.hbase
+  ${compat.module}
+  ${project.version}
+  test-jar
+  test
+
   
 org.apache.hbase
 hbase-protocol

http://git-wip-us.apache.org/repos/asf/hbase/blob/35cf5504/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java
--
diff --git 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java
 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java
new file mode 100644
index 000..f54c632
--- /dev/null
+++ 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java
@@ -0,0 +1,235 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license
+ * agreements. See the NOTICE file distributed with this work for additional 
information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache 
License, Version 2.0 (the
+ * "License"); you may 

[08/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RPCProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RPCProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RPCProtos.java
index 222f383..ead61ed 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RPCProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RPCProtos.java
@@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class RPCProtos {
   private RPCProtos() {}
   public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
+registerAllExtensions(
+(com.google.protobuf.ExtensionRegistryLite) registry);
   }
-  public interface UserInformationOrBuilder
-  extends com.google.protobuf.MessageOrBuilder {
+  public interface UserInformationOrBuilder extends
+  // @@protoc_insertion_point(interface_extends:hbase.pb.UserInformation)
+  com.google.protobuf.MessageOrBuilder {
 
-// required string effective_user = 1;
 /**
  * required string effective_user = 1;
  */
@@ -26,7 +32,6 @@ public final class RPCProtos {
 com.google.protobuf.ByteString
 getEffectiveUserBytes();
 
-// optional string real_user = 2;
 /**
  * optional string real_user = 2;
  */
@@ -42,42 +47,35 @@ public final class RPCProtos {
 getRealUserBytes();
   }
   /**
-   * Protobuf type {@code hbase.pb.UserInformation}
-   *
* 
* User Information proto.  Included in ConnectionHeader on connection setup
* 
+   *
+   * Protobuf type {@code hbase.pb.UserInformation}
*/
-  public static final class UserInformation extends
-  com.google.protobuf.GeneratedMessage
-  implements UserInformationOrBuilder {
+  public  static final class UserInformation extends
+  com.google.protobuf.GeneratedMessageV3 implements
+  // @@protoc_insertion_point(message_implements:hbase.pb.UserInformation)
+  UserInformationOrBuilder {
 // Use UserInformation.newBuilder() to construct.
-private UserInformation(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+private UserInformation(com.google.protobuf.GeneratedMessageV3.Builder 
builder) {
   super(builder);
-  this.unknownFields = builder.getUnknownFields();
-}
-private UserInformation(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-private static final UserInformation defaultInstance;
-public static UserInformation getDefaultInstance() {
-  return defaultInstance;
 }
-
-public UserInformation getDefaultInstanceForType() {
-  return defaultInstance;
+private UserInformation() {
+  effectiveUser_ = "";
+  realUser_ = "";
 }
 
-private final com.google.protobuf.UnknownFieldSet unknownFields;
 @java.lang.Override
 public final com.google.protobuf.UnknownFieldSet
-getUnknownFields() {
+getUnknownFields() {
   return this.unknownFields;
 }
 private UserInformation(
 com.google.protobuf.CodedInputStream input,
 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
 throws com.google.protobuf.InvalidProtocolBufferException {
-  initFields();
+  this();
   int mutable_bitField0_ = 0;
   com.google.protobuf.UnknownFieldSet.Builder unknownFields =
   com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -97,13 +95,15 @@ public final class RPCProtos {
   break;
 }
 case 10: {
+  com.google.protobuf.ByteString bs = input.readBytes();
   bitField0_ |= 0x0001;
-  effectiveUser_ = input.readBytes();
+  effectiveUser_ = bs;
   break;
 }
 case 18: {
+  com.google.protobuf.ByteString bs = input.readBytes();
   bitField0_ |= 0x0002;
-  realUser_ = input.readBytes();
+  realUser_ = bs;
   break;
 }
   }
@@ -112,7 +112,7 @@ public final class RPCProtos {
 throw e.setUnfinishedMessage(this);
   } catch (java.io.IOException e) {
 throw new com.google.protobuf.InvalidProtocolBufferException(
-e.getMessage()).setUnfinishedMessage(this);
+e).setUnfinishedMessage(this);
   } finally {
 this.unknownFields = unknownFields.build();
 makeExtensionsImmutable();
@@ -123,32 +123,16 @@ public final class RPCProtos {
   return 

[20/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ComparatorProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ComparatorProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ComparatorProtos.java
index aa8e31c..e1c605d 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ComparatorProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ComparatorProtos.java
@@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class ComparatorProtos {
   private ComparatorProtos() {}
   public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
+registerAllExtensions(
+(com.google.protobuf.ExtensionRegistryLite) registry);
   }
-  public interface ComparatorOrBuilder
-  extends com.google.protobuf.MessageOrBuilder {
+  public interface ComparatorOrBuilder extends
+  // @@protoc_insertion_point(interface_extends:hbase.pb.Comparator)
+  com.google.protobuf.MessageOrBuilder {
 
-// required string name = 1;
 /**
  * required string name = 1;
  */
@@ -26,7 +32,6 @@ public final class ComparatorProtos {
 com.google.protobuf.ByteString
 getNameBytes();
 
-// optional bytes serialized_comparator = 2;
 /**
  * optional bytes serialized_comparator = 2;
  */
@@ -39,36 +44,29 @@ public final class ComparatorProtos {
   /**
* Protobuf type {@code hbase.pb.Comparator}
*/
-  public static final class Comparator extends
-  com.google.protobuf.GeneratedMessage
-  implements ComparatorOrBuilder {
+  public  static final class Comparator extends
+  com.google.protobuf.GeneratedMessageV3 implements
+  // @@protoc_insertion_point(message_implements:hbase.pb.Comparator)
+  ComparatorOrBuilder {
 // Use Comparator.newBuilder() to construct.
-private Comparator(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+private Comparator(com.google.protobuf.GeneratedMessageV3.Builder 
builder) {
   super(builder);
-  this.unknownFields = builder.getUnknownFields();
 }
-private Comparator(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-private static final Comparator defaultInstance;
-public static Comparator getDefaultInstance() {
-  return defaultInstance;
-}
-
-public Comparator getDefaultInstanceForType() {
-  return defaultInstance;
+private Comparator() {
+  name_ = "";
+  serializedComparator_ = com.google.protobuf.ByteString.EMPTY;
 }
 
-private final com.google.protobuf.UnknownFieldSet unknownFields;
 @java.lang.Override
 public final com.google.protobuf.UnknownFieldSet
-getUnknownFields() {
+getUnknownFields() {
   return this.unknownFields;
 }
 private Comparator(
 com.google.protobuf.CodedInputStream input,
 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
 throws com.google.protobuf.InvalidProtocolBufferException {
-  initFields();
+  this();
   int mutable_bitField0_ = 0;
   com.google.protobuf.UnknownFieldSet.Builder unknownFields =
   com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -88,8 +86,9 @@ public final class ComparatorProtos {
   break;
 }
 case 10: {
+  com.google.protobuf.ByteString bs = input.readBytes();
   bitField0_ |= 0x0001;
-  name_ = input.readBytes();
+  name_ = bs;
   break;
 }
 case 18: {
@@ -103,7 +102,7 @@ public final class ComparatorProtos {
 throw e.setUnfinishedMessage(this);
   } catch (java.io.IOException e) {
 throw new com.google.protobuf.InvalidProtocolBufferException(
-e.getMessage()).setUnfinishedMessage(this);
+e).setUnfinishedMessage(this);
   } finally {
 this.unknownFields = unknownFields.build();
 makeExtensionsImmutable();
@@ -114,32 +113,16 @@ public final class ComparatorProtos {
   return 
org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_Comparator_descriptor;
 }
 
-protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
 internalGetFieldAccessorTable() {
   return 
org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_Comparator_fieldAccessorTable

[10/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ProcedureProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ProcedureProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ProcedureProtos.java
index 58a6cf5..40e4611 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ProcedureProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ProcedureProtos.java
@@ -6,7 +6,13 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class ProcedureProtos {
   private ProcedureProtos() {}
   public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
+registerAllExtensions(
+(com.google.protobuf.ExtensionRegistryLite) registry);
   }
   /**
* Protobuf enum {@code hbase.pb.ProcedureState}
@@ -14,108 +20,118 @@ public final class ProcedureProtos {
   public enum ProcedureState
   implements com.google.protobuf.ProtocolMessageEnum {
 /**
- * INITIALIZING = 1;
- *
  * 
  * Procedure in construction, not yet added to the executor
  * 
+ *
+ * INITIALIZING = 1;
  */
-INITIALIZING(0, 1),
+INITIALIZING(1),
 /**
- * RUNNABLE = 2;
- *
  * 
  * Procedure added to the executor, and ready to be executed
  * 
+ *
+ * RUNNABLE = 2;
  */
-RUNNABLE(1, 2),
+RUNNABLE(2),
 /**
- * WAITING = 3;
- *
  * 
  * The procedure is waiting on children to be completed
  * 
+ *
+ * WAITING = 3;
  */
-WAITING(2, 3),
+WAITING(3),
 /**
- * WAITING_TIMEOUT = 4;
- *
  * 
  * The procedure is waiting a timout or an external event
  * 
+ *
+ * WAITING_TIMEOUT = 4;
  */
-WAITING_TIMEOUT(3, 4),
+WAITING_TIMEOUT(4),
 /**
- * ROLLEDBACK = 5;
- *
  * 
  * The procedure failed and was rolledback
  * 
+ *
+ * ROLLEDBACK = 5;
  */
-ROLLEDBACK(4, 5),
+ROLLEDBACK(5),
 /**
- * FINISHED = 6;
- *
  * 
  * The procedure execution is completed. may need a rollback if failed.
  * 
+ *
+ * FINISHED = 6;
  */
-FINISHED(5, 6),
+FINISHED(6),
 ;
 
 /**
- * INITIALIZING = 1;
- *
  * 
  * Procedure in construction, not yet added to the executor
  * 
+ *
+ * INITIALIZING = 1;
  */
 public static final int INITIALIZING_VALUE = 1;
 /**
- * RUNNABLE = 2;
- *
  * 
  * Procedure added to the executor, and ready to be executed
  * 
+ *
+ * RUNNABLE = 2;
  */
 public static final int RUNNABLE_VALUE = 2;
 /**
- * WAITING = 3;
- *
  * 
  * The procedure is waiting on children to be completed
  * 
+ *
+ * WAITING = 3;
  */
 public static final int WAITING_VALUE = 3;
 /**
- * WAITING_TIMEOUT = 4;
- *
  * 
  * The procedure is waiting a timout or an external event
  * 
+ *
+ * WAITING_TIMEOUT = 4;
  */
 public static final int WAITING_TIMEOUT_VALUE = 4;
 /**
- * ROLLEDBACK = 5;
- *
  * 
  * The procedure failed and was rolledback
  * 
+ *
+ * ROLLEDBACK = 5;
  */
 public static final int ROLLEDBACK_VALUE = 5;
 /**
- * FINISHED = 6;
- *
  * 
  * The procedure execution is completed. may need a rollback if failed.
  * 
+ *
+ * FINISHED = 6;
  */
 public static final int FINISHED_VALUE = 6;
 
 
-public final int getNumber() { return value; }
+public final int getNumber() {
+  return value;
+}
 
+/**
+ * @deprecated Use {@link #forNumber(int)} instead.
+ */
+@java.lang.Deprecated
 public static ProcedureState valueOf(int value) {
+  return forNumber(value);
+}
+
+public static ProcedureState forNumber(int value) {
   switch (value) {
 case 1: return INITIALIZING;
 case 2: return RUNNABLE;
@@ -131,17 +147,17 @@ public final class ProcedureProtos {
 internalGetValueMap() {
   return internalValueMap;
 }
-private static com.google.protobuf.Internal.EnumLiteMap
-internalValueMap =
+private static final com.google.protobuf.Internal.EnumLiteMap<
+ProcedureState> internalValueMap =
   new com.google.protobuf.Internal.EnumLiteMap() {
 public ProcedureState findValueByNumber(int number) {
-  return ProcedureState.valueOf(number);
+  return ProcedureState.forNumber(number);
 }
   };
 

[22/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterIdProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterIdProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterIdProtos.java
index 17f7dfb9..bd132af 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterIdProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterIdProtos.java
@@ -6,77 +6,75 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class ClusterIdProtos {
   private ClusterIdProtos() {}
   public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
+registerAllExtensions(
+(com.google.protobuf.ExtensionRegistryLite) registry);
   }
-  public interface ClusterIdOrBuilder
-  extends com.google.protobuf.MessageOrBuilder {
+  public interface ClusterIdOrBuilder extends
+  // @@protoc_insertion_point(interface_extends:hbase.pb.ClusterId)
+  com.google.protobuf.MessageOrBuilder {
 
-// required string cluster_id = 1;
 /**
- * required string cluster_id = 1;
- *
  * 
  * This is the cluster id, a uuid as a String
  * 
+ *
+ * required string cluster_id = 1;
  */
 boolean hasClusterId();
 /**
- * required string cluster_id = 1;
- *
  * 
  * This is the cluster id, a uuid as a String
  * 
+ *
+ * required string cluster_id = 1;
  */
 java.lang.String getClusterId();
 /**
- * required string cluster_id = 1;
- *
  * 
  * This is the cluster id, a uuid as a String
  * 
+ *
+ * required string cluster_id = 1;
  */
 com.google.protobuf.ByteString
 getClusterIdBytes();
   }
   /**
-   * Protobuf type {@code hbase.pb.ClusterId}
-   *
* 
**
* Content of the '/hbase/hbaseid', cluster id, znode.
* Also cluster of the ${HBASE_ROOTDIR}/hbase.id file.
* 
+   *
+   * Protobuf type {@code hbase.pb.ClusterId}
*/
-  public static final class ClusterId extends
-  com.google.protobuf.GeneratedMessage
-  implements ClusterIdOrBuilder {
+  public  static final class ClusterId extends
+  com.google.protobuf.GeneratedMessageV3 implements
+  // @@protoc_insertion_point(message_implements:hbase.pb.ClusterId)
+  ClusterIdOrBuilder {
 // Use ClusterId.newBuilder() to construct.
-private ClusterId(com.google.protobuf.GeneratedMessage.Builder builder) 
{
+private ClusterId(com.google.protobuf.GeneratedMessageV3.Builder 
builder) {
   super(builder);
-  this.unknownFields = builder.getUnknownFields();
-}
-private ClusterId(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-private static final ClusterId defaultInstance;
-public static ClusterId getDefaultInstance() {
-  return defaultInstance;
 }
-
-public ClusterId getDefaultInstanceForType() {
-  return defaultInstance;
+private ClusterId() {
+  clusterId_ = "";
 }
 
-private final com.google.protobuf.UnknownFieldSet unknownFields;
 @java.lang.Override
 public final com.google.protobuf.UnknownFieldSet
-getUnknownFields() {
+getUnknownFields() {
   return this.unknownFields;
 }
 private ClusterId(
 com.google.protobuf.CodedInputStream input,
 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
 throws com.google.protobuf.InvalidProtocolBufferException {
-  initFields();
+  this();
   int mutable_bitField0_ = 0;
   com.google.protobuf.UnknownFieldSet.Builder unknownFields =
   com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -96,8 +94,9 @@ public final class ClusterIdProtos {
   break;
 }
 case 10: {
+  com.google.protobuf.ByteString bs = input.readBytes();
   bitField0_ |= 0x0001;
-  clusterId_ = input.readBytes();
+  clusterId_ = bs;
   break;
 }
   }
@@ -106,7 +105,7 @@ public final class ClusterIdProtos {
 throw e.setUnfinishedMessage(this);
   } catch (java.io.IOException e) {
 throw new com.google.protobuf.InvalidProtocolBufferException(
-e.getMessage()).setUnfinishedMessage(this);
+e).setUnfinishedMessage(this);
   } finally {
 this.unknownFields = unknownFields.build();
 makeExtensionsImmutable();
@@ -117,48 +116,32 @@ public final class ClusterIdProtos {
   return 

[26/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestRpcServiceProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestRpcServiceProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestRpcServiceProtos.java
index 00a4c8d..86d12f3 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestRpcServiceProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestRpcServiceProtos.java
@@ -6,15 +6,21 @@ package org.apache.hadoop.hbase.shaded.ipc.protobuf.generated;
 public final class TestRpcServiceProtos {
   private TestRpcServiceProtos() {}
   public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
+registerAllExtensions(
+(com.google.protobuf.ExtensionRegistryLite) registry);
   }
   /**
-   * Protobuf service {@code TestProtobufRpcProto}
-   *
* 
**
* A protobuf service for use in tests
* 
+   *
+   * Protobuf service {@code TestProtobufRpcProto}
*/
   public static abstract class TestProtobufRpcProto
   implements com.google.protobuf.Service {
@@ -534,7 +540,7 @@ public final class TestRpcServiceProtos {
   getDescriptor() {
 return descriptor;
   }
-  private static com.google.protobuf.Descriptors.FileDescriptor
+  private static  com.google.protobuf.Descriptors.FileDescriptor
   descriptor;
   static {
 java.lang.String[] descriptorData = {
@@ -550,18 +556,19 @@ public final class TestRpcServiceProtos {
   "edB\024TestRpcServiceProtos\210\001\001\240\001\001"
 };
 com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner 
assigner =
-  new 
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
-public com.google.protobuf.ExtensionRegistry assignDescriptors(
-com.google.protobuf.Descriptors.FileDescriptor root) {
-  descriptor = root;
-  return null;
-}
-  };
+new com.google.protobuf.Descriptors.FileDescriptor.
InternalDescriptorAssigner() {
+  public com.google.protobuf.ExtensionRegistry assignDescriptors(
+  com.google.protobuf.Descriptors.FileDescriptor root) {
+descriptor = root;
+return null;
+  }
+};
 com.google.protobuf.Descriptors.FileDescriptor
   .internalBuildGeneratedFileFrom(descriptorData,
 new com.google.protobuf.Descriptors.FileDescriptor[] {
   
org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.getDescriptor(),
 }, assigner);
+
org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.getDescriptor();
   }
 
   // @@protoc_insertion_point(outer_class_scope)



[02/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ZooKeeperProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ZooKeeperProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ZooKeeperProtos.java
index 9e2bd9c..d7b5221 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ZooKeeperProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ZooKeeperProtos.java
@@ -6,118 +6,115 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class ZooKeeperProtos {
   private ZooKeeperProtos() {}
   public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
+registerAllExtensions(
+(com.google.protobuf.ExtensionRegistryLite) registry);
   }
-  public interface MetaRegionServerOrBuilder
-  extends com.google.protobuf.MessageOrBuilder {
+  public interface MetaRegionServerOrBuilder extends
+  // @@protoc_insertion_point(interface_extends:hbase.pb.MetaRegionServer)
+  com.google.protobuf.MessageOrBuilder {
 
-// required .hbase.pb.ServerName server = 1;
 /**
- * required .hbase.pb.ServerName server = 1;
- *
  * 
  * The ServerName hosting the meta region currently, or destination server,
  * if meta region is in transition.
  * 
+ *
+ * required .hbase.pb.ServerName server = 1;
  */
 boolean hasServer();
 /**
- * required .hbase.pb.ServerName server = 1;
- *
  * 
  * The ServerName hosting the meta region currently, or destination server,
  * if meta region is in transition.
  * 
+ *
+ * required .hbase.pb.ServerName server = 1;
  */
 org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName 
getServer();
 /**
- * required .hbase.pb.ServerName server = 1;
- *
  * 
  * The ServerName hosting the meta region currently, or destination server,
  * if meta region is in transition.
  * 
+ *
+ * required .hbase.pb.ServerName server = 1;
  */
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder
 getServerOrBuilder();
 
-// optional uint32 rpc_version = 2;
 /**
- * optional uint32 rpc_version = 2;
- *
  * 
  * The major version of the rpc the server speaks.  This is used so that
  * clients connecting to the cluster can have prior knowledge of what 
version
  * to send to a RegionServer.  AsyncHBase will use this to detect versions.
  * 
+ *
+ * optional uint32 rpc_version = 2;
  */
 boolean hasRpcVersion();
 /**
- * optional uint32 rpc_version = 2;
- *
  * 
  * The major version of the rpc the server speaks.  This is used so that
  * clients connecting to the cluster can have prior knowledge of what 
version
  * to send to a RegionServer.  AsyncHBase will use this to detect versions.
  * 
+ *
+ * optional uint32 rpc_version = 2;
  */
 int getRpcVersion();
 
-// optional .hbase.pb.RegionState.State state = 3;
 /**
- * optional .hbase.pb.RegionState.State state = 3;
- *
  * 
  * State of the region transition. OPEN means fully operational 
'hbase:meta'
  * 
+ *
+ * optional .hbase.pb.RegionState.State state = 3;
  */
 boolean hasState();
 /**
- * optional .hbase.pb.RegionState.State state = 3;
- *
  * 
  * State of the region transition. OPEN means fully operational 
'hbase:meta'
  * 
+ *
+ * optional .hbase.pb.RegionState.State state = 3;
  */
 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State
 getState();
   }
   /**
-   * Protobuf type {@code hbase.pb.MetaRegionServer}
-   *
* 
**
* Content of the meta-region-server znode.
* 
+   *
+   * Protobuf type {@code hbase.pb.MetaRegionServer}
*/
-  public static final class MetaRegionServer extends
-  com.google.protobuf.GeneratedMessage
-  implements MetaRegionServerOrBuilder {
+  public  static final class MetaRegionServer extends
+  com.google.protobuf.GeneratedMessageV3 implements
+  // @@protoc_insertion_point(message_implements:hbase.pb.MetaRegionServer)
+  MetaRegionServerOrBuilder {
 // Use MetaRegionServer.newBuilder() to construct.
-private MetaRegionServer(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+private MetaRegionServer(com.google.protobuf.GeneratedMessageV3.Builder 
builder) {
   super(builder);
-  this.unknownFields = builder.getUnknownFields();
 

[11/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java
index eeabfb0..4426013 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java
@@ -6,7 +6,13 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class MasterProtos {
   private MasterProtos() {}
   public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
+registerAllExtensions(
+(com.google.protobuf.ExtensionRegistryLite) registry);
   }
   /**
* Protobuf enum {@code hbase.pb.MasterSwitchType}
@@ -16,11 +22,11 @@ public final class MasterProtos {
 /**
  * SPLIT = 0;
  */
-SPLIT(0, 0),
+SPLIT(0),
 /**
  * MERGE = 1;
  */
-MERGE(1, 1),
+MERGE(1),
 ;
 
 /**
@@ -33,9 +39,19 @@ public final class MasterProtos {
 public static final int MERGE_VALUE = 1;
 
 
-public final int getNumber() { return value; }
+public final int getNumber() {
+  return value;
+}
 
+/**
+ * @deprecated Use {@link #forNumber(int)} instead.
+ */
+@java.lang.Deprecated
 public static MasterSwitchType valueOf(int value) {
+  return forNumber(value);
+}
+
+public static MasterSwitchType forNumber(int value) {
   switch (value) {
 case 0: return SPLIT;
 case 1: return MERGE;
@@ -47,17 +63,17 @@ public final class MasterProtos {
 internalGetValueMap() {
   return internalValueMap;
 }
-private static com.google.protobuf.Internal.EnumLiteMap
-internalValueMap =
+private static final com.google.protobuf.Internal.EnumLiteMap<
+MasterSwitchType> internalValueMap =
   new com.google.protobuf.Internal.EnumLiteMap() {
 public MasterSwitchType findValueByNumber(int number) {
-  return MasterSwitchType.valueOf(number);
+  return MasterSwitchType.forNumber(number);
 }
   };
 
 public final com.google.protobuf.Descriptors.EnumValueDescriptor
 getValueDescriptor() {
-  return getDescriptor().getValues().get(index);
+  return getDescriptor().getValues().get(ordinal());
 }
 public final com.google.protobuf.Descriptors.EnumDescriptor
 getDescriptorForType() {
@@ -79,21 +95,19 @@ public final class MasterProtos {
   return VALUES[desc.getIndex()];
 }
 
-private final int index;
 private final int value;
 
-private MasterSwitchType(int index, int value) {
-  this.index = index;
+private MasterSwitchType(int value) {
   this.value = value;
 }
 
 // @@protoc_insertion_point(enum_scope:hbase.pb.MasterSwitchType)
   }
 
-  public interface AddColumnRequestOrBuilder
-  extends com.google.protobuf.MessageOrBuilder {
+  public interface AddColumnRequestOrBuilder extends
+  // @@protoc_insertion_point(interface_extends:hbase.pb.AddColumnRequest)
+  com.google.protobuf.MessageOrBuilder {
 
-// required .hbase.pb.TableName table_name = 1;
 /**
  * required .hbase.pb.TableName table_name = 1;
  */
@@ -107,7 +121,6 @@ public final class MasterProtos {
  */
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder
 getTableNameOrBuilder();
 
-// required .hbase.pb.ColumnFamilySchema column_families = 2;
 /**
  * required .hbase.pb.ColumnFamilySchema column_families = 2;
  */
@@ -121,7 +134,6 @@ public final class MasterProtos {
  */
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder
 getColumnFamiliesOrBuilder();
 
-// optional uint64 nonce_group = 3 [default = 0];
 /**
  * optional uint64 nonce_group = 3 [default = 0];
  */
@@ -131,7 +143,6 @@ public final class MasterProtos {
  */
 long getNonceGroup();
 
-// optional uint64 nonce = 4 [default = 0];
 /**
  * optional uint64 nonce = 4 [default = 0];
  */
@@ -144,36 +155,29 @@ public final class MasterProtos {
   /**
* Protobuf type {@code hbase.pb.AddColumnRequest}
*/
-  public static final class AddColumnRequest extends
-  com.google.protobuf.GeneratedMessage
-  implements AddColumnRequestOrBuilder {
+  public  static final class AddColumnRequest extends
+  com.google.protobuf.GeneratedMessageV3 implements
+  // 

[12/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java
index a758109..bc8d40b 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java
@@ -6,7 +6,13 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class MasterProcedureProtos {
   private MasterProcedureProtos() {}
   public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
+registerAllExtensions(
+(com.google.protobuf.ExtensionRegistryLite) registry);
   }
   /**
* Protobuf enum {@code hbase.pb.CreateTableState}
@@ -16,27 +22,27 @@ public final class MasterProcedureProtos {
 /**
  * CREATE_TABLE_PRE_OPERATION = 1;
  */
-CREATE_TABLE_PRE_OPERATION(0, 1),
+CREATE_TABLE_PRE_OPERATION(1),
 /**
  * CREATE_TABLE_WRITE_FS_LAYOUT = 2;
  */
-CREATE_TABLE_WRITE_FS_LAYOUT(1, 2),
+CREATE_TABLE_WRITE_FS_LAYOUT(2),
 /**
  * CREATE_TABLE_ADD_TO_META = 3;
  */
-CREATE_TABLE_ADD_TO_META(2, 3),
+CREATE_TABLE_ADD_TO_META(3),
 /**
  * CREATE_TABLE_ASSIGN_REGIONS = 4;
  */
-CREATE_TABLE_ASSIGN_REGIONS(3, 4),
+CREATE_TABLE_ASSIGN_REGIONS(4),
 /**
  * CREATE_TABLE_UPDATE_DESC_CACHE = 5;
  */
-CREATE_TABLE_UPDATE_DESC_CACHE(4, 5),
+CREATE_TABLE_UPDATE_DESC_CACHE(5),
 /**
  * CREATE_TABLE_POST_OPERATION = 6;
  */
-CREATE_TABLE_POST_OPERATION(5, 6),
+CREATE_TABLE_POST_OPERATION(6),
 ;
 
 /**
@@ -65,9 +71,19 @@ public final class MasterProcedureProtos {
 public static final int CREATE_TABLE_POST_OPERATION_VALUE = 6;
 
 
-public final int getNumber() { return value; }
+public final int getNumber() {
+  return value;
+}
 
+/**
+ * @deprecated Use {@link #forNumber(int)} instead.
+ */
+@java.lang.Deprecated
 public static CreateTableState valueOf(int value) {
+  return forNumber(value);
+}
+
+public static CreateTableState forNumber(int value) {
   switch (value) {
 case 1: return CREATE_TABLE_PRE_OPERATION;
 case 2: return CREATE_TABLE_WRITE_FS_LAYOUT;
@@ -83,17 +99,17 @@ public final class MasterProcedureProtos {
 internalGetValueMap() {
   return internalValueMap;
 }
-private static com.google.protobuf.Internal.EnumLiteMap
-internalValueMap =
+private static final com.google.protobuf.Internal.EnumLiteMap<
+CreateTableState> internalValueMap =
   new com.google.protobuf.Internal.EnumLiteMap() {
 public CreateTableState findValueByNumber(int number) {
-  return CreateTableState.valueOf(number);
+  return CreateTableState.forNumber(number);
 }
   };
 
 public final com.google.protobuf.Descriptors.EnumValueDescriptor
 getValueDescriptor() {
-  return getDescriptor().getValues().get(index);
+  return getDescriptor().getValues().get(ordinal());
 }
 public final com.google.protobuf.Descriptors.EnumDescriptor
 getDescriptorForType() {
@@ -115,11 +131,9 @@ public final class MasterProcedureProtos {
   return VALUES[desc.getIndex()];
 }
 
-private final int index;
 private final int value;
 
-private CreateTableState(int index, int value) {
-  this.index = index;
+private CreateTableState(int value) {
   this.value = value;
 }
 
@@ -134,31 +148,31 @@ public final class MasterProcedureProtos {
 /**
  * MODIFY_TABLE_PREPARE = 1;
  */
-MODIFY_TABLE_PREPARE(0, 1),
+MODIFY_TABLE_PREPARE(1),
 /**
  * MODIFY_TABLE_PRE_OPERATION = 2;
  */
-MODIFY_TABLE_PRE_OPERATION(1, 2),
+MODIFY_TABLE_PRE_OPERATION(2),
 /**
  * MODIFY_TABLE_UPDATE_TABLE_DESCRIPTOR = 3;
  */
-MODIFY_TABLE_UPDATE_TABLE_DESCRIPTOR(2, 3),
+MODIFY_TABLE_UPDATE_TABLE_DESCRIPTOR(3),
 /**
  * MODIFY_TABLE_REMOVE_REPLICA_COLUMN = 4;
  */
-MODIFY_TABLE_REMOVE_REPLICA_COLUMN(3, 4),
+MODIFY_TABLE_REMOVE_REPLICA_COLUMN(4),
 /**
  * MODIFY_TABLE_DELETE_FS_LAYOUT = 5;
  */
-MODIFY_TABLE_DELETE_FS_LAYOUT(4, 5),
+MODIFY_TABLE_DELETE_FS_LAYOUT(5),
 /**
  * MODIFY_TABLE_POST_OPERATION = 6;
  */
-MODIFY_TABLE_POST_OPERATION(5, 6),
+

[15/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java
index 1259d3c..2ae9f9c 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java
@@ -6,45 +6,51 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class HBaseProtos {
   private HBaseProtos() {}
   public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
+registerAllExtensions(
+(com.google.protobuf.ExtensionRegistryLite) registry);
   }
   /**
-   * Protobuf enum {@code hbase.pb.CompareType}
-   *
* 
* Comparison operators 
* 
+   *
+   * Protobuf enum {@code hbase.pb.CompareType}
*/
   public enum CompareType
   implements com.google.protobuf.ProtocolMessageEnum {
 /**
  * LESS = 0;
  */
-LESS(0, 0),
+LESS(0),
 /**
  * LESS_OR_EQUAL = 1;
  */
-LESS_OR_EQUAL(1, 1),
+LESS_OR_EQUAL(1),
 /**
  * EQUAL = 2;
  */
-EQUAL(2, 2),
+EQUAL(2),
 /**
  * NOT_EQUAL = 3;
  */
-NOT_EQUAL(3, 3),
+NOT_EQUAL(3),
 /**
  * GREATER_OR_EQUAL = 4;
  */
-GREATER_OR_EQUAL(4, 4),
+GREATER_OR_EQUAL(4),
 /**
  * GREATER = 5;
  */
-GREATER(5, 5),
+GREATER(5),
 /**
  * NO_OP = 6;
  */
-NO_OP(6, 6),
+NO_OP(6),
 ;
 
 /**
@@ -77,9 +83,19 @@ public final class HBaseProtos {
 public static final int NO_OP_VALUE = 6;
 
 
-public final int getNumber() { return value; }
+public final int getNumber() {
+  return value;
+}
 
+/**
+ * @deprecated Use {@link #forNumber(int)} instead.
+ */
+@java.lang.Deprecated
 public static CompareType valueOf(int value) {
+  return forNumber(value);
+}
+
+public static CompareType forNumber(int value) {
   switch (value) {
 case 0: return LESS;
 case 1: return LESS_OR_EQUAL;
@@ -96,17 +112,17 @@ public final class HBaseProtos {
 internalGetValueMap() {
   return internalValueMap;
 }
-private static com.google.protobuf.Internal.EnumLiteMap
-internalValueMap =
+private static final com.google.protobuf.Internal.EnumLiteMap<
+CompareType> internalValueMap =
   new com.google.protobuf.Internal.EnumLiteMap() {
 public CompareType findValueByNumber(int number) {
-  return CompareType.valueOf(number);
+  return CompareType.forNumber(number);
 }
   };
 
 public final com.google.protobuf.Descriptors.EnumValueDescriptor
 getValueDescriptor() {
-  return getDescriptor().getValues().get(index);
+  return getDescriptor().getValues().get(ordinal());
 }
 public final com.google.protobuf.Descriptors.EnumDescriptor
 getDescriptorForType() {
@@ -128,11 +144,9 @@ public final class HBaseProtos {
   return VALUES[desc.getIndex()];
 }
 
-private final int index;
 private final int value;
 
-private CompareType(int index, int value) {
-  this.index = index;
+private CompareType(int value) {
   this.value = value;
 }
 
@@ -147,31 +161,31 @@ public final class HBaseProtos {
 /**
  * NANOSECONDS = 1;
  */
-NANOSECONDS(0, 1),
+NANOSECONDS(1),
 /**
  * MICROSECONDS = 2;
  */
-MICROSECONDS(1, 2),
+MICROSECONDS(2),
 /**
  * MILLISECONDS = 3;
  */
-MILLISECONDS(2, 3),
+MILLISECONDS(3),
 /**
  * SECONDS = 4;
  */
-SECONDS(3, 4),
+SECONDS(4),
 /**
  * MINUTES = 5;
  */
-MINUTES(4, 5),
+MINUTES(5),
 /**
  * HOURS = 6;
  */
-HOURS(5, 6),
+HOURS(6),
 /**
  * DAYS = 7;
  */
-DAYS(6, 7),
+DAYS(7),
 ;
 
 /**
@@ -204,9 +218,19 @@ public final class HBaseProtos {
 public static final int DAYS_VALUE = 7;
 
 
-public final int getNumber() { return value; }
+public final int getNumber() {
+  return value;
+}
 
+/**
+ * @deprecated Use {@link #forNumber(int)} instead.
+ */
+@java.lang.Deprecated
 public static TimeUnit valueOf(int value) {
+  return forNumber(value);
+}
+
+public static TimeUnit forNumber(int value) {
   switch (value) {
 case 1: return NANOSECONDS;
 case 2: return MICROSECONDS;
@@ -223,17 +247,17 @@ public 

[01/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/HBASE-16264 17d4b70df -> 35cf55048


http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/util/ByteStringer.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/util/ByteStringer.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/util/ByteStringer.java
index 4ccf154..38cc0bd 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/util/ByteStringer.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/util/ByteStringer.java
@@ -17,37 +17,17 @@
  */
 package org.apache.hadoop.hbase.shaded.util;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 
 import com.google.protobuf.ByteString;
-import com.google.protobuf.HBaseZeroCopyByteString;
+import com.google.protobuf.UnsafeByteOperations;
 
 /**
  * Hack to workaround HBASE-10304 issue that keeps bubbling up when a 
mapreduce context.
  */
+// Depends on protobuf-3.1.0 feature.
 @InterfaceAudience.Private
 public class ByteStringer {
-  private static final Log LOG = LogFactory.getLog(ByteStringer.class);
-
-  /**
-   * Flag set at class loading time.
-   */
-  private static boolean USE_ZEROCOPYBYTESTRING = true;
-
-  // Can I classload HBaseZeroCopyByteString without IllegalAccessError?
-  // If we can, use it passing ByteStrings to pb else use native ByteString 
though more costly
-  // because it makes a copy of the passed in array.
-  static {
-try {
-  HBaseZeroCopyByteString.wrap(new byte [0]);
-} catch (IllegalAccessError iae) {
-  USE_ZEROCOPYBYTESTRING = false;
-  LOG.debug("Failed to classload HBaseZeroCopyByteString: " + 
iae.toString());
-}
-  }
-
   private ByteStringer() {
 super();
   }
@@ -56,14 +36,13 @@ public class ByteStringer {
* Wraps a byte array in a {@link ByteString} without copying it.
*/
   public static ByteString wrap(final byte[] array) {
-return USE_ZEROCOPYBYTESTRING? HBaseZeroCopyByteString.wrap(array): 
ByteString.copyFrom(array);
+return UnsafeByteOperations.unsafeWrap(array);
   }
 
   /**
* Wraps a subset of a byte array in a {@link ByteString} without copying it.
*/
   public static ByteString wrap(final byte[] array, int offset, int length) {
-return USE_ZEROCOPYBYTESTRING? HBaseZeroCopyByteString.wrap(array, offset, 
length):
-  ByteString.copyFrom(array, offset, length);
+return UnsafeByteOperations.unsafeWrap(array, offset, length);
   }
 }



[13/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MapReduceProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MapReduceProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MapReduceProtos.java
index ee901f0..b5abf42 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MapReduceProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MapReduceProtos.java
@@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class MapReduceProtos {
   private MapReduceProtos() {}
   public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
+registerAllExtensions(
+(com.google.protobuf.ExtensionRegistryLite) registry);
   }
-  public interface ScanMetricsOrBuilder
-  extends com.google.protobuf.MessageOrBuilder {
+  public interface ScanMetricsOrBuilder extends
+  // @@protoc_insertion_point(interface_extends:hbase.pb.ScanMetrics)
+  com.google.protobuf.MessageOrBuilder {
 
-// repeated .hbase.pb.NameInt64Pair metrics = 1;
 /**
  * repeated .hbase.pb.NameInt64Pair metrics = 1;
  */
@@ -39,36 +45,28 @@ public final class MapReduceProtos {
   /**
* Protobuf type {@code hbase.pb.ScanMetrics}
*/
-  public static final class ScanMetrics extends
-  com.google.protobuf.GeneratedMessage
-  implements ScanMetricsOrBuilder {
+  public  static final class ScanMetrics extends
+  com.google.protobuf.GeneratedMessageV3 implements
+  // @@protoc_insertion_point(message_implements:hbase.pb.ScanMetrics)
+  ScanMetricsOrBuilder {
 // Use ScanMetrics.newBuilder() to construct.
-private ScanMetrics(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+private ScanMetrics(com.google.protobuf.GeneratedMessageV3.Builder 
builder) {
   super(builder);
-  this.unknownFields = builder.getUnknownFields();
-}
-private ScanMetrics(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-private static final ScanMetrics defaultInstance;
-public static ScanMetrics getDefaultInstance() {
-  return defaultInstance;
 }
-
-public ScanMetrics getDefaultInstanceForType() {
-  return defaultInstance;
+private ScanMetrics() {
+  metrics_ = java.util.Collections.emptyList();
 }
 
-private final com.google.protobuf.UnknownFieldSet unknownFields;
 @java.lang.Override
 public final com.google.protobuf.UnknownFieldSet
-getUnknownFields() {
+getUnknownFields() {
   return this.unknownFields;
 }
 private ScanMetrics(
 com.google.protobuf.CodedInputStream input,
 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
 throws com.google.protobuf.InvalidProtocolBufferException {
-  initFields();
+  this();
   int mutable_bitField0_ = 0;
   com.google.protobuf.UnknownFieldSet.Builder unknownFields =
   com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -92,7 +90,8 @@ public final class MapReduceProtos {
 metrics_ = new 
java.util.ArrayList();
 mutable_bitField0_ |= 0x0001;
   }
-  
metrics_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair.PARSER,
 extensionRegistry));
+  metrics_.add(
+  
input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair.PARSER,
 extensionRegistry));
   break;
 }
   }
@@ -101,7 +100,7 @@ public final class MapReduceProtos {
 throw e.setUnfinishedMessage(this);
   } catch (java.io.IOException e) {
 throw new com.google.protobuf.InvalidProtocolBufferException(
-e.getMessage()).setUnfinishedMessage(this);
+e).setUnfinishedMessage(this);
   } finally {
 if (((mutable_bitField0_ & 0x0001) == 0x0001)) {
   metrics_ = java.util.Collections.unmodifiableList(metrics_);
@@ -115,29 +114,13 @@ public final class MapReduceProtos {
   return 
org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.internal_static_hbase_pb_ScanMetrics_descriptor;
 }
 
-protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
 internalGetFieldAccessorTable() {
   return 

[06/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionServerStatusProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionServerStatusProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionServerStatusProtos.java
index 63553af..540df5a 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionServerStatusProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionServerStatusProtos.java
@@ -6,88 +6,91 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class RegionServerStatusProtos {
   private RegionServerStatusProtos() {}
   public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
+registerAllExtensions(
+(com.google.protobuf.ExtensionRegistryLite) registry);
   }
-  public interface RegionServerStartupRequestOrBuilder
-  extends com.google.protobuf.MessageOrBuilder {
+  public interface RegionServerStartupRequestOrBuilder extends
+  // 
@@protoc_insertion_point(interface_extends:hbase.pb.RegionServerStartupRequest)
+  com.google.protobuf.MessageOrBuilder {
 
-// required uint32 port = 1;
 /**
- * required uint32 port = 1;
- *
  * 
  ** Port number this regionserver is up on 
  * 
+ *
+ * required uint32 port = 1;
  */
 boolean hasPort();
 /**
- * required uint32 port = 1;
- *
  * 
  ** Port number this regionserver is up on 
  * 
+ *
+ * required uint32 port = 1;
  */
 int getPort();
 
-// required uint64 server_start_code = 2;
 /**
- * required uint64 server_start_code = 2;
- *
  * 
  ** This servers' startcode 
  * 
+ *
+ * required uint64 server_start_code = 2;
  */
 boolean hasServerStartCode();
 /**
- * required uint64 server_start_code = 2;
- *
  * 
  ** This servers' startcode 
  * 
+ *
+ * required uint64 server_start_code = 2;
  */
 long getServerStartCode();
 
-// required uint64 server_current_time = 3;
 /**
- * required uint64 server_current_time = 3;
- *
  * 
  ** Current time of the region server in ms 
  * 
+ *
+ * required uint64 server_current_time = 3;
  */
 boolean hasServerCurrentTime();
 /**
- * required uint64 server_current_time = 3;
- *
  * 
  ** Current time of the region server in ms 
  * 
+ *
+ * required uint64 server_current_time = 3;
  */
 long getServerCurrentTime();
 
-// optional string use_this_hostname_instead = 4;
 /**
- * optional string use_this_hostname_instead = 4;
- *
  * 
  ** hostname for region server, optional 
  * 
+ *
+ * optional string use_this_hostname_instead = 4;
  */
 boolean hasUseThisHostnameInstead();
 /**
- * optional string use_this_hostname_instead = 4;
- *
  * 
  ** hostname for region server, optional 
  * 
+ *
+ * optional string use_this_hostname_instead = 4;
  */
 java.lang.String getUseThisHostnameInstead();
 /**
- * optional string use_this_hostname_instead = 4;
- *
  * 
  ** hostname for region server, optional 
  * 
+ *
+ * optional string use_this_hostname_instead = 4;
  */
 com.google.protobuf.ByteString
 getUseThisHostnameInsteadBytes();
@@ -95,36 +98,31 @@ public final class RegionServerStatusProtos {
   /**
* Protobuf type {@code hbase.pb.RegionServerStartupRequest}
*/
-  public static final class RegionServerStartupRequest extends
-  com.google.protobuf.GeneratedMessage
-  implements RegionServerStartupRequestOrBuilder {
+  public  static final class RegionServerStartupRequest extends
+  com.google.protobuf.GeneratedMessageV3 implements
+  // 
@@protoc_insertion_point(message_implements:hbase.pb.RegionServerStartupRequest)
+  RegionServerStartupRequestOrBuilder {
 // Use RegionServerStartupRequest.newBuilder() to construct.
-private 
RegionServerStartupRequest(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+private 
RegionServerStartupRequest(com.google.protobuf.GeneratedMessageV3.Builder 
builder) {
   super(builder);
-  this.unknownFields = builder.getUnknownFields();
-}
-private RegionServerStartupRequest(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-private static final RegionServerStartupRequest defaultInstance;
-public static 

[09/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteStri

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/QuotaProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/QuotaProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/QuotaProtos.java
index da4f9d4..d31e338 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/QuotaProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/QuotaProtos.java
@@ -6,7 +6,13 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class QuotaProtos {
   private QuotaProtos() {}
   public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
+registerAllExtensions(
+(com.google.protobuf.ExtensionRegistryLite) registry);
   }
   /**
* Protobuf enum {@code hbase.pb.QuotaScope}
@@ -16,11 +22,11 @@ public final class QuotaProtos {
 /**
  * CLUSTER = 1;
  */
-CLUSTER(0, 1),
+CLUSTER(1),
 /**
  * MACHINE = 2;
  */
-MACHINE(1, 2),
+MACHINE(2),
 ;
 
 /**
@@ -33,9 +39,19 @@ public final class QuotaProtos {
 public static final int MACHINE_VALUE = 2;
 
 
-public final int getNumber() { return value; }
+public final int getNumber() {
+  return value;
+}
 
+/**
+ * @deprecated Use {@link #forNumber(int)} instead.
+ */
+@java.lang.Deprecated
 public static QuotaScope valueOf(int value) {
+  return forNumber(value);
+}
+
+public static QuotaScope forNumber(int value) {
   switch (value) {
 case 1: return CLUSTER;
 case 2: return MACHINE;
@@ -47,17 +63,17 @@ public final class QuotaProtos {
 internalGetValueMap() {
   return internalValueMap;
 }
-private static com.google.protobuf.Internal.EnumLiteMap
-internalValueMap =
+private static final com.google.protobuf.Internal.EnumLiteMap<
+QuotaScope> internalValueMap =
   new com.google.protobuf.Internal.EnumLiteMap() {
 public QuotaScope findValueByNumber(int number) {
-  return QuotaScope.valueOf(number);
+  return QuotaScope.forNumber(number);
 }
   };
 
 public final com.google.protobuf.Descriptors.EnumValueDescriptor
 getValueDescriptor() {
-  return getDescriptor().getValues().get(index);
+  return getDescriptor().getValues().get(ordinal());
 }
 public final com.google.protobuf.Descriptors.EnumDescriptor
 getDescriptorForType() {
@@ -79,11 +95,9 @@ public final class QuotaProtos {
   return VALUES[desc.getIndex()];
 }
 
-private final int index;
 private final int value;
 
-private QuotaScope(int index, int value) {
-  this.index = index;
+private QuotaScope(int value) {
   this.value = value;
 }
 
@@ -98,27 +112,27 @@ public final class QuotaProtos {
 /**
  * REQUEST_NUMBER = 1;
  */
-REQUEST_NUMBER(0, 1),
+REQUEST_NUMBER(1),
 /**
  * REQUEST_SIZE = 2;
  */
-REQUEST_SIZE(1, 2),
+REQUEST_SIZE(2),
 /**
  * WRITE_NUMBER = 3;
  */
-WRITE_NUMBER(2, 3),
+WRITE_NUMBER(3),
 /**
  * WRITE_SIZE = 4;
  */
-WRITE_SIZE(3, 4),
+WRITE_SIZE(4),
 /**
  * READ_NUMBER = 5;
  */
-READ_NUMBER(4, 5),
+READ_NUMBER(5),
 /**
  * READ_SIZE = 6;
  */
-READ_SIZE(5, 6),
+READ_SIZE(6),
 ;
 
 /**
@@ -147,9 +161,19 @@ public final class QuotaProtos {
 public static final int READ_SIZE_VALUE = 6;
 
 
-public final int getNumber() { return value; }
+public final int getNumber() {
+  return value;
+}
 
+/**
+ * @deprecated Use {@link #forNumber(int)} instead.
+ */
+@java.lang.Deprecated
 public static ThrottleType valueOf(int value) {
+  return forNumber(value);
+}
+
+public static ThrottleType forNumber(int value) {
   switch (value) {
 case 1: return REQUEST_NUMBER;
 case 2: return REQUEST_SIZE;
@@ -165,17 +189,17 @@ public final class QuotaProtos {
 internalGetValueMap() {
   return internalValueMap;
 }
-private static com.google.protobuf.Internal.EnumLiteMap
-internalValueMap =
+private static final com.google.protobuf.Internal.EnumLiteMap<
+ThrottleType> internalValueMap =
   new com.google.protobuf.Internal.EnumLiteMap() {
 public ThrottleType findValueByNumber(int number) {
-  return ThrottleType.valueOf(number);
+  return ThrottleType.forNumber(number);
 }
   };
 
 public final 

[2/3] hbase git commit: HBASE-16721 Concurrency issue in WAL unflushed seqId tracking

2016-09-29 Thread enis
HBASE-16721 Concurrency issue in WAL unflushed seqId tracking


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f77f1530
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f77f1530
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f77f1530

Branch: refs/heads/branch-1.3
Commit: f77f1530d4cebd1679bc1c27782bc283638dbd5f
Parents: 728f58a
Author: Enis Soztutar 
Authored: Thu Sep 29 13:50:58 2016 -0700
Committer: Enis Soztutar 
Committed: Thu Sep 29 14:53:29 2016 -0700

--
 .../hadoop/hbase/regionserver/HRegion.java  |  19 ++--
 .../java/org/apache/hadoop/hbase/wal/WAL.java   |   2 +-
 .../hbase/regionserver/wal/TestFSHLog.java  | 101 ++-
 3 files changed, 110 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f77f1530/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index d43e838..520286f 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -2293,6 +2293,15 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 
 long trxId = 0;
 MultiVersionConcurrencyControl.WriteEntry writeEntry = mvcc.begin();
+// wait for all in-progress transactions to commit to WAL before
+// we can start the flush. This prevents
+// uncommitted transactions from being written into HFiles.
+// We have to block before we start the flush, otherwise keys that
+// were removed via a rollbackMemstore could be written to Hfiles.
+mvcc.completeAndWait(writeEntry);
+// set writeEntry to null to prevent mvcc.complete from being called again 
inside finally
+// block
+writeEntry = null;
 try {
   try {
 if (wal != null) {
@@ -2371,16 +2380,6 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   throw ioe;
 }
   }
-
-  // wait for all in-progress transactions to commit to WAL before
-  // we can start the flush. This prevents
-  // uncommitted transactions from being written into HFiles.
-  // We have to block before we start the flush, otherwise keys that
-  // were removed via a rollbackMemstore could be written to Hfiles.
-  mvcc.completeAndWait(writeEntry);
-  // set writeEntry to null to prevent mvcc.complete from being called 
again inside finally
-  // block
-  writeEntry = null;
 } finally {
   if (writeEntry != null) {
 // In case of failure just mark current writeEntry as complete.

http://git-wip-us.apache.org/repos/asf/hbase/blob/f77f1530/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java
--
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java
index d2b336e..041a5b9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java
@@ -47,7 +47,7 @@ import com.google.common.annotations.VisibleForTesting;
  */
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
-public interface WAL {
+public interface WAL extends AutoCloseable {
 
   /**
* Registers WALActionsListener

http://git-wip-us.apache.org/repos/asf/hbase/blob/f77f1530/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
index 6ece700..760cdc1 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
@@ -30,6 +30,10 @@ import java.util.Comparator;
 import java.util.List;
 import java.util.Set;
 import java.util.UUID;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.atomic.AtomicBoolean;
 
 import org.apache.commons.lang.mutable.MutableBoolean;
 import org.apache.commons.logging.Log;
@@ -55,6 +59,7 @@ import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 import 

[1/3] hbase git commit: HBASE-16721 Concurrency issue in WAL unflushed seqId tracking

2016-09-29 Thread enis
Repository: hbase
Updated Branches:
  refs/heads/branch-1 5ac2776d2 -> bf5a7aba5
  refs/heads/branch-1.2 42dff8a58 -> cf374af10
  refs/heads/branch-1.3 728f58ad5 -> f77f1530d


HBASE-16721 Concurrency issue in WAL unflushed seqId tracking


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bf5a7aba
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bf5a7aba
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bf5a7aba

Branch: refs/heads/branch-1
Commit: bf5a7aba5c0c83874f52cbd775dd280cb4a1cd49
Parents: 5ac2776
Author: Enis Soztutar 
Authored: Thu Sep 29 13:50:58 2016 -0700
Committer: Enis Soztutar 
Committed: Thu Sep 29 14:51:14 2016 -0700

--
 .../hadoop/hbase/regionserver/HRegion.java  |  19 ++--
 .../java/org/apache/hadoop/hbase/wal/WAL.java   |   2 +-
 .../hbase/regionserver/wal/TestFSHLog.java  | 101 ++-
 3 files changed, 110 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bf5a7aba/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index bfb9171..d06ed08 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -2294,6 +2294,15 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 
 long trxId = 0;
 MultiVersionConcurrencyControl.WriteEntry writeEntry = mvcc.begin();
+// wait for all in-progress transactions to commit to WAL before
+// we can start the flush. This prevents
+// uncommitted transactions from being written into HFiles.
+// We have to block before we start the flush, otherwise keys that
+// were removed via a rollbackMemstore could be written to Hfiles.
+mvcc.completeAndWait(writeEntry);
+// set writeEntry to null to prevent mvcc.complete from being called again 
inside finally
+// block
+writeEntry = null;
 try {
   try {
 if (wal != null) {
@@ -2372,16 +2381,6 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   throw ioe;
 }
   }
-
-  // wait for all in-progress transactions to commit to WAL before
-  // we can start the flush. This prevents
-  // uncommitted transactions from being written into HFiles.
-  // We have to block before we start the flush, otherwise keys that
-  // were removed via a rollbackMemstore could be written to Hfiles.
-  mvcc.completeAndWait(writeEntry);
-  // set writeEntry to null to prevent mvcc.complete from being called 
again inside finally
-  // block
-  writeEntry = null;
 } finally {
   if (writeEntry != null) {
 // In case of failure just mark current writeEntry as complete.

http://git-wip-us.apache.org/repos/asf/hbase/blob/bf5a7aba/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java
--
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java
index 76b19f0..e43d44f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java
@@ -51,7 +51,7 @@ import com.google.common.annotations.VisibleForTesting;
  */
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
-public interface WAL {
+public interface WAL extends AutoCloseable {
 
   /**
* Registers WALActionsListener

http://git-wip-us.apache.org/repos/asf/hbase/blob/bf5a7aba/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
index 6ece700..760cdc1 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
@@ -30,6 +30,10 @@ import java.util.Comparator;
 import java.util.List;
 import java.util.Set;
 import java.util.UUID;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.atomic.AtomicBoolean;
 
 import org.apache.commons.lang.mutable.MutableBoolean;

[3/3] hbase git commit: HBASE-16721 Concurrency issue in WAL unflushed seqId tracking

2016-09-29 Thread enis
HBASE-16721 Concurrency issue in WAL unflushed seqId tracking


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cf374af1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cf374af1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cf374af1

Branch: refs/heads/branch-1.2
Commit: cf374af102f139a6176d05b97201bfa8d9f687be
Parents: 42dff8a
Author: Enis Soztutar 
Authored: Thu Sep 29 13:50:58 2016 -0700
Committer: Enis Soztutar 
Committed: Thu Sep 29 14:55:45 2016 -0700

--
 .../hadoop/hbase/regionserver/HRegion.java  |  19 ++--
 .../java/org/apache/hadoop/hbase/wal/WAL.java   |   2 +-
 .../hbase/regionserver/wal/TestFSHLog.java  | 100 ++-
 3 files changed, 109 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/cf374af1/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 22c66e3..f93b5a1 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -2216,6 +2216,15 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 
 long trxId = 0;
 MultiVersionConcurrencyControl.WriteEntry writeEntry = mvcc.begin();
+// wait for all in-progress transactions to commit to WAL before
+// we can start the flush. This prevents
+// uncommitted transactions from being written into HFiles.
+// We have to block before we start the flush, otherwise keys that
+// were removed via a rollbackMemstore could be written to Hfiles.
+mvcc.completeAndWait(writeEntry);
+// set writeEntry to null to prevent mvcc.complete from being called again 
inside finally
+// block
+writeEntry = null;
 try {
   try {
 if (wal != null) {
@@ -2294,16 +2303,6 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   throw ioe;
 }
   }
-
-  // wait for all in-progress transactions to commit to WAL before
-  // we can start the flush. This prevents
-  // uncommitted transactions from being written into HFiles.
-  // We have to block before we start the flush, otherwise keys that
-  // were removed via a rollbackMemstore could be written to Hfiles.
-  mvcc.completeAndWait(writeEntry);
-  // set writeEntry to null to prevent mvcc.complete from being called 
again inside finally
-  // block
-  writeEntry = null;
 } finally {
   if (writeEntry != null) {
 // In case of failure just mark current writeEntry as complete.

http://git-wip-us.apache.org/repos/asf/hbase/blob/cf374af1/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java
--
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java
index d2b336e..041a5b9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java
@@ -47,7 +47,7 @@ import com.google.common.annotations.VisibleForTesting;
  */
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
-public interface WAL {
+public interface WAL extends AutoCloseable {
 
   /**
* Registers WALActionsListener

http://git-wip-us.apache.org/repos/asf/hbase/blob/cf374af1/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
index 6ece700..e09b621 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
@@ -30,6 +30,10 @@ import java.util.Comparator;
 import java.util.List;
 import java.util.Set;
 import java.util.UUID;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.atomic.AtomicBoolean;
 
 import org.apache.commons.lang.mutable.MutableBoolean;
 import org.apache.commons.logging.Log;
@@ -55,6 +59,7 @@ import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 import 

hbase git commit: HBASE-16721 Concurrency issue in WAL unflushed seqId tracking

2016-09-29 Thread enis
Repository: hbase
Updated Branches:
  refs/heads/branch-1.1 88512be52 -> 06c3dec2d


HBASE-16721 Concurrency issue in WAL unflushed seqId tracking

Conflicts:

hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java

hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/06c3dec2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/06c3dec2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/06c3dec2

Branch: refs/heads/branch-1.1
Commit: 06c3dec2da32dcb588f0eb31e5db87796668bd39
Parents: 88512be
Author: Enis Soztutar 
Authored: Thu Sep 29 13:50:58 2016 -0700
Committer: Enis Soztutar 
Committed: Thu Sep 29 14:42:25 2016 -0700

--
 .../hadoop/hbase/regionserver/HRegion.java  |  19 ++--
 .../java/org/apache/hadoop/hbase/wal/WAL.java   |   2 +-
 .../hbase/regionserver/wal/TestFSHLog.java  | 101 ++-
 3 files changed, 110 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/06c3dec2/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index f033177..cc89b84 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -2199,6 +2199,15 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 try {
   try {
 writeEntry = mvcc.beginMemstoreInsert();
+// wait for all in-progress transactions to commit to WAL before   

   
+// we can start the flush. This prevents
+// uncommitted transactions from being written into HFiles.
+// We have to block before we start the flush, otherwise keys that
+// were removed via a rollbackMemstore could be written to Hfiles.
+mvcc.waitForPreviousTransactionsComplete(writeEntry);
+// set w to null to prevent mvcc.advanceMemstore from being called 
again inside finally block
+writeEntry = null;
+
 if (wal != null) {
   Long earliestUnflushedSequenceIdForTheRegion =
   wal.startCacheFlush(encodedRegionName, flushedFamilyNames);
@@ -2275,16 +2284,6 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   throw ioe;
 }
   }
-
-  // wait for all in-progress transactions to commit to WAL before
-  // we can start the flush. This prevents
-  // uncommitted transactions from being written into HFiles.
-  // We have to block before we start the flush, otherwise keys that
-  // were removed via a rollbackMemstore could be written to Hfiles.
-  writeEntry.setWriteNumber(flushOpSeqId);
-  mvcc.waitForPreviousTransactionsComplete(writeEntry);
-  // set w to null to prevent mvcc.advanceMemstore from being called again 
inside finally block
-  writeEntry = null;
 } finally {
   if (writeEntry != null) {
 // in case of failure just mark current writeEntry as complete

http://git-wip-us.apache.org/repos/asf/hbase/blob/06c3dec2/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java
--
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java
index 473bba9..20d0834 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java
@@ -50,7 +50,7 @@ import com.google.common.annotations.VisibleForTesting;
  */
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
-public interface WAL {
+public interface WAL extends AutoCloseable {
 
   /**
* Registers WALActionsListener

http://git-wip-us.apache.org/repos/asf/hbase/blob/06c3dec2/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
index d14107a..1689778 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
+++ 

hbase git commit: HBASE-15984 Handle premature EOF treatment of WALs in replication.

2016-09-29 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 bfb20c0c1 -> 42dff8a58


HBASE-15984 Handle premature EOF treatment of WALs in replication.

In some particular deployments, the Replication code believes it has
reached EOF for a WAL prior to succesfully parsing all bytes known to
exist in a cleanly closed file.

Consistently this failure happens due to an InvalidProtobufException
after some number of seeks during our attempts to tail the in-progress
RegionServer WAL. As a work-around, this patch treats cleanly closed
files differently than other execution paths. If an EOF is detected due
to parsing or other errors while there are still unparsed bytes before
the end-of-file trailer, we now reset the WAL to the very beginning and
attempt a clean read-through.

In current testing, a single such reset is sufficient to work around
observed dataloss. However, the above change will retry a given WAL file
indefinitely. On each such attempt, a log message like the below will
be emitted at the WARN level:

  Processing end of WAL file '{}'. At position {}, which is too far away
  from reported file length {}. Restarting WAL reading (see HBASE-15983
  for details).

Additionally, this patch adds some additional log detail at the TRACE
level about file offsets seen while handling recoverable errors. It also
add metrics that measure the use of this recovery mechanism.

 Conflicts:

hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java

hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSourceImpl.java

hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java

 Conflicts:

hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java

hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java

hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSourceImpl.java

hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/42dff8a5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/42dff8a5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/42dff8a5

Branch: refs/heads/branch-1.2
Commit: 42dff8a58af02ec03fe97db34ab930defb79141f
Parents: bfb20c0
Author: Sean Busbey 
Authored: Tue Jun 7 16:00:46 2016 -0500
Committer: Sean Busbey 
Committed: Thu Sep 29 16:22:37 2016 -0500

--
 .../MetricsReplicationSourceSource.java | 17 +
 .../MetricsReplicationGlobalSourceSource.java   | 44 +++
 .../MetricsReplicationSourceSourceImpl.java | 80 
 .../regionserver/wal/ProtobufLogReader.java | 45 +--
 .../replication/regionserver/MetricsSource.java | 35 +
 .../regionserver/ReplicationSource.java | 39 --
 .../ReplicationWALReaderManager.java| 10 +++
 src/main/asciidoc/_chapters/ops_mgt.adoc| 24 +-
 8 files changed, 278 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/42dff8a5/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
index fecf191..22b90dd 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
@@ -32,6 +32,16 @@ public interface MetricsReplicationSourceSource {
 
   public static final String SOURCE_LOG_EDITS_FILTERED = 
"source.logEditsFiltered";
 
+  public static final String SOURCE_CLOSED_LOGS_WITH_UNKNOWN_LENGTH =
+  "source.closedLogsWithUnknownFileLength";
+  public static final String SOURCE_UNCLEANLY_CLOSED_LOGS = 
"source.uncleanlyClosedLogs";
+  public static final String SOURCE_UNCLEANLY_CLOSED_IGNORED_IN_BYTES =
+  "source.ignoredUncleanlyClosedLogContentsInBytes";
+  public static final String SOURCE_RESTARTED_LOG_READING = 
"source.restartedLogReading";
+  public static final String SOURCE_REPEATED_LOG_FILE_BYTES = 
"source.repeatedLogFileBytes";

hbase git commit: HBASE-16732 Avoid possible NPE in MetaTableLocator

2016-09-29 Thread jerryjch
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 2733e24d3 -> bfb20c0c1


HBASE-16732 Avoid possible NPE in MetaTableLocator


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bfb20c0c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bfb20c0c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bfb20c0c

Branch: refs/heads/branch-1.2
Commit: bfb20c0c1fa40f0580d440747a16852d2deeb78e
Parents: 2733e24
Author: Jerry He 
Authored: Thu Sep 29 13:44:59 2016 -0700
Committer: Jerry He 
Committed: Thu Sep 29 14:19:03 2016 -0700

--
 .../hadoop/hbase/zookeeper/MetaTableLocator.java   | 13 -
 .../hadoop/hbase/zookeeper/ZooKeeperWatcher.java   |  8 +---
 2 files changed, 13 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bfb20c0c/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
index ac6c6f0..0b7d42a 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
@@ -554,17 +554,20 @@ public class MetaTableLocator {
   final long timeout, Configuration conf)
   throws InterruptedException {
 int numReplicasConfigured = 1;
+
+List servers = new ArrayList();
+// Make the blocking call first so that we do the wait to know
+// the znodes are all in place or timeout.
+ServerName server = blockUntilAvailable(zkw, timeout);
+if (server == null) return null;
+servers.add(server);
+
 try {
   List metaReplicaNodes = zkw.getMetaReplicaNodes();
   numReplicasConfigured = metaReplicaNodes.size();
 } catch (KeeperException e) {
   LOG.warn("Got ZK exception " + e);
 }
-List servers = new 
ArrayList(numReplicasConfigured);
-ServerName server = blockUntilAvailable(zkw, timeout);
-if (server == null) return null;
-servers.add(server);
-
 for (int replicaId = 1; replicaId < numReplicasConfigured; replicaId++) {
   // return all replica locations for the meta
   servers.add(getMetaRegionLocation(zkw, replicaId));

http://git-wip-us.apache.org/repos/asf/hbase/blob/bfb20c0c/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
index 5b6385f..73a3a9e 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
@@ -485,9 +485,11 @@ public class ZooKeeperWatcher implements Watcher, 
Abortable, Closeable {
   public List getMetaReplicaNodes() throws KeeperException {
 List childrenOfBaseNode = ZKUtil.listChildrenNoWatch(this, 
baseZNode);
 List metaReplicaNodes = new ArrayList(2);
-String pattern = 
conf.get("zookeeper.znode.metaserver","meta-region-server");
-for (String child : childrenOfBaseNode) {
-  if (child.startsWith(pattern)) metaReplicaNodes.add(child);
+if (childrenOfBaseNode != null) {
+  String pattern = 
conf.get("zookeeper.znode.metaserver","meta-region-server");
+  for (String child : childrenOfBaseNode) {
+if (child.startsWith(pattern)) metaReplicaNodes.add(child);
+  }
 }
 return metaReplicaNodes;
   }



hbase git commit: HBASE-16732 Avoid possible NPE in MetaTableLocator

2016-09-29 Thread jerryjch
Repository: hbase
Updated Branches:
  refs/heads/branch-1 df25ebf84 -> 5ac2776d2


HBASE-16732 Avoid possible NPE in MetaTableLocator


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5ac2776d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5ac2776d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5ac2776d

Branch: refs/heads/branch-1
Commit: 5ac2776d2394c339f4bfee99de1150387e0d92e4
Parents: df25ebf
Author: Jerry He 
Authored: Thu Sep 29 13:44:59 2016 -0700
Committer: Jerry He 
Committed: Thu Sep 29 14:14:01 2016 -0700

--
 .../hadoop/hbase/zookeeper/MetaTableLocator.java   | 13 -
 .../hadoop/hbase/zookeeper/ZooKeeperWatcher.java   |  8 +---
 2 files changed, 13 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5ac2776d/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
index 40b84cf..1630d83 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
@@ -554,17 +554,20 @@ public class MetaTableLocator {
   final long timeout, Configuration conf)
   throws InterruptedException {
 int numReplicasConfigured = 1;
+
+List servers = new ArrayList();
+// Make the blocking call first so that we do the wait to know
+// the znodes are all in place or timeout.
+ServerName server = blockUntilAvailable(zkw, timeout);
+if (server == null) return null;
+servers.add(server);
+
 try {
   List metaReplicaNodes = zkw.getMetaReplicaNodes();
   numReplicasConfigured = metaReplicaNodes.size();
 } catch (KeeperException e) {
   LOG.warn("Got ZK exception " + e);
 }
-List servers = new 
ArrayList(numReplicasConfigured);
-ServerName server = blockUntilAvailable(zkw, timeout);
-if (server == null) return null;
-servers.add(server);
-
 for (int replicaId = 1; replicaId < numReplicasConfigured; replicaId++) {
   // return all replica locations for the meta
   servers.add(getMetaRegionLocation(zkw, replicaId));

http://git-wip-us.apache.org/repos/asf/hbase/blob/5ac2776d/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
index ce209d6..f333cd5 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
@@ -496,9 +496,11 @@ public class ZooKeeperWatcher implements Watcher, 
Abortable, Closeable {
   public List getMetaReplicaNodes() throws KeeperException {
 List childrenOfBaseNode = ZKUtil.listChildrenNoWatch(this, 
baseZNode);
 List metaReplicaNodes = new ArrayList(2);
-String pattern = 
conf.get("zookeeper.znode.metaserver","meta-region-server");
-for (String child : childrenOfBaseNode) {
-  if (child.startsWith(pattern)) metaReplicaNodes.add(child);
+if (childrenOfBaseNode != null) {
+  String pattern = 
conf.get("zookeeper.znode.metaserver","meta-region-server");
+  for (String child : childrenOfBaseNode) {
+if (child.startsWith(pattern)) metaReplicaNodes.add(child);
+  }
 }
 return metaReplicaNodes;
   }



hbase git commit: HBASE-16732 Avoid possible NPE in MetaTableLocator

2016-09-29 Thread jerryjch
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 39a79d50f -> 728f58ad5


HBASE-16732 Avoid possible NPE in MetaTableLocator


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/728f58ad
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/728f58ad
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/728f58ad

Branch: refs/heads/branch-1.3
Commit: 728f58ad5f1e52264df58161fcbcea4ce8527a9d
Parents: 39a79d5
Author: Jerry He 
Authored: Thu Sep 29 13:44:59 2016 -0700
Committer: Jerry He 
Committed: Thu Sep 29 14:05:24 2016 -0700

--
 .../hadoop/hbase/zookeeper/MetaTableLocator.java   | 13 -
 .../hadoop/hbase/zookeeper/ZooKeeperWatcher.java   |  8 +---
 2 files changed, 13 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/728f58ad/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
index be5bf6e..c13e212 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
@@ -553,17 +553,20 @@ public class MetaTableLocator {
   final long timeout, Configuration conf)
   throws InterruptedException {
 int numReplicasConfigured = 1;
+
+List servers = new ArrayList();
+// Make the blocking call first so that we do the wait to know
+// the znodes are all in place or timeout.
+ServerName server = blockUntilAvailable(zkw, timeout);
+if (server == null) return null;
+servers.add(server);
+
 try {
   List metaReplicaNodes = zkw.getMetaReplicaNodes();
   numReplicasConfigured = metaReplicaNodes.size();
 } catch (KeeperException e) {
   LOG.warn("Got ZK exception " + e);
 }
-List servers = new 
ArrayList(numReplicasConfigured);
-ServerName server = blockUntilAvailable(zkw, timeout);
-if (server == null) return null;
-servers.add(server);
-
 for (int replicaId = 1; replicaId < numReplicasConfigured; replicaId++) {
   // return all replica locations for the meta
   servers.add(getMetaRegionLocation(zkw, replicaId));

http://git-wip-us.apache.org/repos/asf/hbase/blob/728f58ad/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
index d89041d..f5fa0b7 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
@@ -481,9 +481,11 @@ public class ZooKeeperWatcher implements Watcher, 
Abortable, Closeable {
   public List getMetaReplicaNodes() throws KeeperException {
 List childrenOfBaseNode = ZKUtil.listChildrenNoWatch(this, 
baseZNode);
 List metaReplicaNodes = new ArrayList(2);
-String pattern = 
conf.get("zookeeper.znode.metaserver","meta-region-server");
-for (String child : childrenOfBaseNode) {
-  if (child.startsWith(pattern)) metaReplicaNodes.add(child);
+if (childrenOfBaseNode != null) {
+  String pattern = 
conf.get("zookeeper.znode.metaserver","meta-region-server");
+  for (String child : childrenOfBaseNode) {
+if (child.startsWith(pattern)) metaReplicaNodes.add(child);
+  }
 }
 return metaReplicaNodes;
   }



hbase git commit: HBASE-16732 Avoid possible NPE in MetaTableLocator

2016-09-29 Thread jerryjch
Repository: hbase
Updated Branches:
  refs/heads/master bf3c928b7 -> 3757da643


HBASE-16732 Avoid possible NPE in MetaTableLocator


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3757da64
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3757da64
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3757da64

Branch: refs/heads/master
Commit: 3757da643d43bf0eaf8a0bd4c30b56f24c95fb6c
Parents: bf3c928
Author: Jerry He 
Authored: Thu Sep 29 13:44:59 2016 -0700
Committer: Jerry He 
Committed: Thu Sep 29 14:00:46 2016 -0700

--
 .../hadoop/hbase/zookeeper/MetaTableLocator.java   | 13 -
 .../hadoop/hbase/zookeeper/ZooKeeperWatcher.java   |  8 +---
 2 files changed, 13 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3757da64/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
index 359617a..7b64e0c 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
@@ -550,17 +550,20 @@ public class MetaTableLocator {
   final long timeout, Configuration conf)
   throws InterruptedException {
 int numReplicasConfigured = 1;
+
+List servers = new ArrayList();
+// Make the blocking call first so that we do the wait to know
+// the znodes are all in place or timeout.
+ServerName server = blockUntilAvailable(zkw, timeout);
+if (server == null) return null;
+servers.add(server);
+
 try {
   List metaReplicaNodes = zkw.getMetaReplicaNodes();
   numReplicasConfigured = metaReplicaNodes.size();
 } catch (KeeperException e) {
   LOG.warn("Got ZK exception " + e);
 }
-List servers = new 
ArrayList(numReplicasConfigured);
-ServerName server = blockUntilAvailable(zkw, timeout);
-if (server == null) return null;
-servers.add(server);
-
 for (int replicaId = 1; replicaId < numReplicasConfigured; replicaId++) {
   // return all replica locations for the meta
   servers.add(getMetaRegionLocation(zkw, replicaId));

http://git-wip-us.apache.org/repos/asf/hbase/blob/3757da64/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
index f7d7e26..1f3afe4 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
@@ -481,9 +481,11 @@ public class ZooKeeperWatcher implements Watcher, 
Abortable, Closeable {
   public List getMetaReplicaNodes() throws KeeperException {
 List childrenOfBaseNode = ZKUtil.listChildrenNoWatch(this, 
baseZNode);
 List metaReplicaNodes = new ArrayList(2);
-String pattern = 
conf.get("zookeeper.znode.metaserver","meta-region-server");
-for (String child : childrenOfBaseNode) {
-  if (child.startsWith(pattern)) metaReplicaNodes.add(child);
+if (childrenOfBaseNode != null) {
+  String pattern = 
conf.get("zookeeper.znode.metaserver","meta-region-server");
+  for (String child : childrenOfBaseNode) {
+if (child.startsWith(pattern)) metaReplicaNodes.add(child);
+  }
 }
 return metaReplicaNodes;
   }



hbase git commit: HBASE-16721 Concurrency issue in WAL unflushed seqId tracking

2016-09-29 Thread enis
Repository: hbase
Updated Branches:
  refs/heads/master 76396714e -> bf3c928b7


HBASE-16721 Concurrency issue in WAL unflushed seqId tracking


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bf3c928b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bf3c928b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bf3c928b

Branch: refs/heads/master
Commit: bf3c928b7499797735f71974992b68c9d876b97c
Parents: 7639671
Author: Enis Soztutar 
Authored: Thu Sep 29 12:56:22 2016 -0700
Committer: Enis Soztutar 
Committed: Thu Sep 29 12:56:22 2016 -0700

--
 .../java/org/apache/hadoop/hbase/wal/WAL.java   |   2 +-
 .../regionserver/wal/AbstractTestFSWAL.java |   2 +-
 .../hbase/regionserver/wal/TestFSHLog.java  | 105 +++
 3 files changed, 107 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bf3c928b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java
--
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java
index 79321b3..20ae602 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java
@@ -49,7 +49,7 @@ import org.apache.hadoop.hbase.util.Bytes;
  */
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
-public interface WAL {
+public interface WAL extends AutoCloseable {
 
   /**
* Registers WALActionsListener

http://git-wip-us.apache.org/repos/asf/hbase/blob/bf3c928b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java
index 9eaeda4..19759d1 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java
@@ -72,7 +72,7 @@ import org.junit.rules.TestName;
 
 public abstract class AbstractTestFSWAL {
 
-  private static final Log LOG = LogFactory.getLog(AbstractTestFSWAL.class);
+  protected static final Log LOG = LogFactory.getLog(AbstractTestFSWAL.class);
 
   protected static Configuration CONF;
   protected static FileSystem FS;

http://git-wip-us.apache.org/repos/asf/hbase/blob/bf3c928b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
index bf56afe..640e851 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
@@ -23,6 +23,10 @@ import java.lang.reflect.Field;
 import java.util.List;
 import java.util.NavigableMap;
 import java.util.TreeMap;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.atomic.AtomicBoolean;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -32,14 +36,21 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl;
+import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSUtils;
+import org.apache.hadoop.hbase.util.Threads;
+import org.apache.hadoop.hbase.wal.WALKey;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import static org.junit.Assert.assertEquals;
+
 /**
  * Provides FSHLog test cases.
  */
@@ -101,4 +112,98 @@ public class TestFSHLog extends AbstractTestFSWAL {
   log.close();
 }
   }
+
+  /**
+   * Test case for https://issues.apache.org/jira/browse/HBASE-16721
+   */
+  @Test (timeout = 3)
+  public void testUnflushedSeqIdTracking() throws IOException, 

[07/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColCell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColCell.html 
b/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColCell.html
index 695f6db..5965e94 100644
--- a/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColCell.html
+++ b/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColCell.html
@@ -132,7 +132,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-private static class CellUtil.FirstOnRowColCell
+private static class CellUtil.FirstOnRowColCell
 extends CellUtil.FirstOnRowCell
 
 
@@ -287,7 +287,7 @@ extends 
 
 fArray
-private finalbyte[] fArray
+private finalbyte[] fArray
 
 
 
@@ -296,7 +296,7 @@ extends 
 
 foffset
-private finalint foffset
+private finalint foffset
 
 
 
@@ -305,7 +305,7 @@ extends 
 
 flength
-private finalbyte flength
+private finalbyte flength
 
 
 
@@ -314,7 +314,7 @@ extends 
 
 qArray
-private finalbyte[] qArray
+private finalbyte[] qArray
 
 
 
@@ -323,7 +323,7 @@ extends 
 
 qoffset
-private finalint qoffset
+private finalint qoffset
 
 
 
@@ -332,7 +332,7 @@ extends 
 
 qlength
-private finalint qlength
+private finalint qlength
 
 
 
@@ -349,7 +349,7 @@ extends 
 
 FirstOnRowColCell
-publicFirstOnRowColCell(byte[]rArray,
+publicFirstOnRowColCell(byte[]rArray,
  introffset,
  shortrlength,
  byte[]fArray,
@@ -374,7 +374,7 @@ extends 
 
 getFamilyArray
-publicbyte[]getFamilyArray()
+publicbyte[]getFamilyArray()
 Description copied from 
interface:Cell
 Contiguous bytes composed of legal HDFS filename characters 
which may start at any index in the
  containing array. Max length is Byte.MAX_VALUE, which is 127 bytes.
@@ -394,7 +394,7 @@ extends 
 
 getFamilyOffset
-publicintgetFamilyOffset()
+publicintgetFamilyOffset()
 
 Specified by:
 getFamilyOffsetin
 interfaceCell
@@ -411,7 +411,7 @@ extends 
 
 getFamilyLength
-publicbytegetFamilyLength()
+publicbytegetFamilyLength()
 
 Specified by:
 getFamilyLengthin
 interfaceCell
@@ -428,7 +428,7 @@ extends 
 
 getQualifierArray
-publicbyte[]getQualifierArray()
+publicbyte[]getQualifierArray()
 Description copied from 
interface:Cell
 Contiguous raw bytes that may start at any index in the 
containing array.
 
@@ -447,7 +447,7 @@ extends 
 
 getQualifierOffset
-publicintgetQualifierOffset()
+publicintgetQualifierOffset()
 
 Specified by:
 getQualifierOffsetin
 interfaceCell
@@ -464,7 +464,7 @@ extends 
 
 getQualifierLength
-publicintgetQualifierLength()
+publicintgetQualifierLength()
 
 Specified by:
 getQualifierLengthin
 interfaceCell

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColTSByteBufferedCell.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColTSByteBufferedCell.html
 
b/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColTSByteBufferedCell.html
index 29dd9e4..cad2f03 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColTSByteBufferedCell.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColTSByteBufferedCell.html
@@ -138,7 +138,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-private static class CellUtil.FirstOnRowColTSByteBufferedCell
+private static class CellUtil.FirstOnRowColTSByteBufferedCell
 extends CellUtil.FirstOnRowColByteBufferedCell
 
 
@@ -256,7 +256,7 @@ extends 
 
 ts
-privatelong ts
+privatelong ts
 
 
 
@@ -273,7 +273,7 @@ extends 
 
 FirstOnRowColTSByteBufferedCell
-publicFirstOnRowColTSByteBufferedCell(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBufferrBuffer,
+publicFirstOnRowColTSByteBufferedCell(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBufferrBuffer,
introffset,
shortrlength,
http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBufferfBuffer,
@@ -299,7 +299,7 @@ extends 
 
 getTimestamp
-publiclonggetTimestamp()
+publiclonggetTimestamp()
 
 Specified by:
 getTimestampin
 interfaceCell

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColTSCell.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColTSCell.html 
b/devapidocs/org/apache/hadoop/hbase/CellUtil.FirstOnRowColTSCell.html
index 6027eed..50a8da6 100644
--- 

[06/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/CellUtil.TagRewriteCell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/CellUtil.TagRewriteCell.html 
b/devapidocs/org/apache/hadoop/hbase/CellUtil.TagRewriteCell.html
index 93c76dc..94ef7ee 100644
--- a/devapidocs/org/apache/hadoop/hbase/CellUtil.TagRewriteCell.html
+++ b/devapidocs/org/apache/hadoop/hbase/CellUtil.TagRewriteCell.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 
 
 All Implemented Interfaces:
-Cell, HeapSize, SettableSequenceId, SettableTimestamp
+http://docs.oracle.com/javase/8/docs/api/java/lang/Cloneable.html?is-external=true;
 title="class or interface in java.lang">Cloneable, Cell, ExtendedCell, HeapSize, SettableSequenceId, SettableTimestamp
 
 
 Direct Known Subclasses:
@@ -122,9 +122,9 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-private static class CellUtil.TagRewriteCell
+private static class CellUtil.TagRewriteCell
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-implements Cell, SettableSequenceId, SettableTimestamp, HeapSize
+implements ExtendedCell
 This can be used when a Cell has to change with 
addition/removal of one or more tags. This is an
  efficient way to do so in which only the tags bytes part need to recreated 
and copied. All other
  parts, refer to the original Cell.
@@ -237,62 +237,73 @@ implements 
+int
+getSerializedSize(booleanwithTags)
+
+
 byte[]
 getTagsArray()
 
-
+
 int
 getTagsLength()
 
-
+
 int
 getTagsOffset()
 
-
+
 long
 getTimestamp()
 
-
+
 byte
 getTypeByte()
 
-
+
 byte[]
 getValueArray()
 Contiguous raw bytes that may start at any index in the 
containing array.
 
 
-
+
 int
 getValueLength()
 
-
+
 int
 getValueOffset()
 
-
+
 long
 heapSize()
 
-
+
 void
 setSequenceId(longseqId)
 Sets with the given seqId.
 
 
-
+
 void
 setTimestamp(byte[]ts,
 inttsOffset)
 Sets with the given timestamp.
 
 
-
+
 void
 setTimestamp(longts)
 Sets with the given timestamp.
 
 
+
+int
+write(http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true;
 title="class or interface in java.io">OutputStreamout,
+ booleanwithTags)
+Write this cell to an OutputStream in a KeyValue format.
+
+
 
 
 
@@ -370,7 +381,7 @@ implements 
 
 getRowArray
-publicbyte[]getRowArray()
+publicbyte[]getRowArray()
 Description copied from 
interface:Cell
 Contiguous raw bytes that may start at any index in the 
containing array. Max length is
  Short.MAX_VALUE which is 32,767 bytes.
@@ -388,7 +399,7 @@ implements 
 
 getRowOffset
-publicintgetRowOffset()
+publicintgetRowOffset()
 
 Specified by:
 getRowOffsetin
 interfaceCell
@@ -403,7 +414,7 @@ implements 
 
 getRowLength
-publicshortgetRowLength()
+publicshortgetRowLength()
 
 Specified by:
 getRowLengthin
 interfaceCell
@@ -418,7 +429,7 @@ implements 
 
 getFamilyArray
-publicbyte[]getFamilyArray()
+publicbyte[]getFamilyArray()
 Description copied from 
interface:Cell
 Contiguous bytes composed of legal HDFS filename characters 
which may start at any index in the
  containing array. Max length is Byte.MAX_VALUE, which is 127 bytes.
@@ -436,7 +447,7 @@ implements 
 
 getFamilyOffset
-publicintgetFamilyOffset()
+publicintgetFamilyOffset()
 
 Specified by:
 getFamilyOffsetin
 interfaceCell
@@ -451,7 +462,7 @@ implements 
 
 getFamilyLength
-publicbytegetFamilyLength()
+publicbytegetFamilyLength()
 
 Specified by:
 getFamilyLengthin
 interfaceCell
@@ -466,7 +477,7 @@ implements 
 
 getQualifierArray
-publicbyte[]getQualifierArray()
+publicbyte[]getQualifierArray()
 Description copied from 
interface:Cell
 Contiguous raw bytes that may start at any index in the 
containing array.
 
@@ -483,7 +494,7 @@ implements 
 
 getQualifierOffset
-publicintgetQualifierOffset()
+publicintgetQualifierOffset()
 
 Specified by:
 getQualifierOffsetin
 interfaceCell
@@ -498,7 +509,7 @@ implements 
 
 getQualifierLength
-publicintgetQualifierLength()
+publicintgetQualifierLength()
 
 Specified by:
 getQualifierLengthin
 interfaceCell
@@ -513,7 +524,7 @@ implements 
 
 getTimestamp
-publiclonggetTimestamp()
+publiclonggetTimestamp()
 
 Specified by:
 getTimestampin
 interfaceCell

[12/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/allclasses-frame.html
--
diff --git a/devapidocs/allclasses-frame.html b/devapidocs/allclasses-frame.html
index 5b7277c..b673f22 100644
--- a/devapidocs/allclasses-frame.html
+++ b/devapidocs/allclasses-frame.html
@@ -81,15 +81,9 @@
 AsyncFSWAL.Payload
 AsyncFSWALProvider
 AsyncFSWALProvider.AsyncWriter
-AsyncHBaseSaslRpcClient
-AsyncHBaseSaslRpcClientHandler
 AsyncProcess
-AsyncProcess.AsyncRequestFuture
-AsyncProcess.BatchErrors
 AsyncProcess.ListRowAccess
-AsyncProcess.ReplicaResultState
 AsyncProcess.RequestSizeChecker
-AsyncProcess.Retry
 AsyncProcess.RowChecker
 AsyncProcess.RowChecker.ReturnCode
 AsyncProcess.RowCheckerHost
@@ -97,6 +91,10 @@
 AsyncProcess.TaskCountChecker
 AsyncProtobufLogWriter
 AsyncProtobufLogWriter.BlockingCompletionHandler
+AsyncRequestFuture
+AsyncRequestFutureImpl
+AsyncRequestFutureImpl.ReplicaResultState
+AsyncRequestFutureImpl.Retry
 AtomicUtils
 Attributes
 AuthenticationKey
@@ -161,6 +159,7 @@
 Batch
 Batch.Call
 Batch.Callback
+BatchErrors
 BigDecimalColumnInterpreter
 BinaryComparator
 BinaryPrefixComparator
@@ -666,6 +665,7 @@
 ExpressionExpander
 ExpressionNode
 ExpressionParser
+ExtendedCell
 FailedLogCloseException
 FailedSanityCheckException
 FailedServerException
@@ -1089,6 +1089,8 @@
 KeepDeletedCells
 KeyLocker
 KeyOnlyFilter
+KeyOnlyFilter.KeyOnlyByteBufferedCell
+KeyOnlyFilter.KeyOnlyCell
 KeyPrefixRegionSplitPolicy
 KeyProvider
 KeyRange
@@ -1238,7 +1240,6 @@
 MemStoreFlusher.WakeupFlushThread
 MemStoreLAB
 MemStoreScanner
-MemStoreScanner.Type
 MemStoreSnapshot
 MemStoreWrapper
 Merge
@@ -1446,6 +1447,9 @@
 NamespacesResource
 NamespaceStateManager
 NamespaceTableAndRegionInfo
+NamespaceTableCfWALEntryFilter
+NettyHBaseSaslRpcClient
+NettyHBaseSaslRpcClientHandler
 NettyRpcClient
 NettyRpcClientConfigHelper
 NettyRpcConnection
@@ -1843,7 +1847,6 @@
 ReplicationTracker
 ReplicationTrackerZKImpl
 ReplicationWALReaderManager
-ReplicationZKLockCleanerChore
 ResizableBlockCache
 ResourceBase
 ResourceConfig
@@ -2001,6 +2004,7 @@
 ScannerModel.FilterModel.ByteArrayComparableModel
 ScannerModel.FilterModel.ByteArrayComparableModel.ComparatorType
 ScannerModel.FilterModel.FilterType
+ScannerResetException
 ScannerResource
 ScannerResultGenerator
 ScannerTimeoutException
@@ -2017,7 +2021,6 @@
 SecureBulkLoadEndpoint
 SecureBulkLoadManager
 SecureBulkLoadManager.SecureBulkLoadListener
-SecureBulkLoadUtil
 SecureProtobufLogReader
 SecureProtobufLogWriter
 SecureWALCellCodec
@@ -2159,6 +2162,9 @@
 SplitTransactionImpl
 SplitTransactionImpl.JournalEntryImpl
 SplitTransactionImpl.LoggingProgressable
+SslRMIClientSocketFactorySecure
+SslRMIServerSocketFactorySecure
+SslSelectChannelConnectorSecure
 SslSocketConnectorSecure
 StabilityOptions
 StateDumpServlet
@@ -2219,7 +2225,6 @@
 StoreScanner
 StoreScanner.StoreScannerCompactionRace
 StoreUtils
-Streamable
 StreamUtils
 Strings
 StringUtils
@@ -2275,7 +2280,6 @@
 TableBasedReplicationQueuesClientImpl
 TableBasedReplicationQueuesImpl
 TableCFsUpdater
-TableCfWALEntryFilter
 TableDescriptors
 TableExistsException
 TableHFileArchiveTracker
@@ -2543,7 +2547,6 @@
 ZKSplitLogManagerCoordination
 ZKSplitLogManagerCoordination.TaskFinisher
 ZKSplitLogManagerCoordination.TaskFinisher.Status
-ZKSplitLogManagerCoordination.ZkSplitLogManagerDetails
 ZkSplitLogWorkerCoordination
 ZkSplitLogWorkerCoordination.ZkSplitTaskDetails
 ZKTableArchiveClient

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/allclasses-noframe.html
--
diff --git a/devapidocs/allclasses-noframe.html 
b/devapidocs/allclasses-noframe.html
index 40b0029..af5296d 100644
--- a/devapidocs/allclasses-noframe.html
+++ b/devapidocs/allclasses-noframe.html
@@ -81,15 +81,9 @@
 AsyncFSWAL.Payload
 AsyncFSWALProvider
 AsyncFSWALProvider.AsyncWriter
-AsyncHBaseSaslRpcClient
-AsyncHBaseSaslRpcClientHandler
 AsyncProcess
-AsyncProcess.AsyncRequestFuture
-AsyncProcess.BatchErrors
 AsyncProcess.ListRowAccess
-AsyncProcess.ReplicaResultState
 AsyncProcess.RequestSizeChecker
-AsyncProcess.Retry
 AsyncProcess.RowChecker
 AsyncProcess.RowChecker.ReturnCode
 AsyncProcess.RowCheckerHost
@@ -97,6 +91,10 @@
 AsyncProcess.TaskCountChecker
 AsyncProtobufLogWriter
 AsyncProtobufLogWriter.BlockingCompletionHandler
+AsyncRequestFuture
+AsyncRequestFutureImpl
+AsyncRequestFutureImpl.ReplicaResultState
+AsyncRequestFutureImpl.Retry
 AtomicUtils
 Attributes
 AuthenticationKey
@@ -161,6 +159,7 @@
 Batch
 Batch.Call
 Batch.Callback
+BatchErrors
 BigDecimalColumnInterpreter
 BinaryComparator
 BinaryPrefixComparator
@@ -666,6 +665,7 @@
 ExpressionExpander
 ExpressionNode
 ExpressionParser
+ExtendedCell
 FailedLogCloseException
 FailedSanityCheckException
 FailedServerException
@@ -1089,6 +1089,8 @@
 KeepDeletedCells
 KeyLocker
 KeyOnlyFilter

[17/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html 
b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html
index 4dbb152..3d30c3b 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html
@@ -32,26 +32,26 @@
 024import java.io.DataInput;
 025import java.io.DataOutput;
 026import java.io.IOException;
-027import java.math.BigDecimal;
-028import java.math.BigInteger;
-029import java.nio.ByteBuffer;
-030import java.nio.charset.Charset;
-031import java.security.SecureRandom;
-032import java.util.Arrays;
-033import java.util.Collection;
-034import java.util.Comparator;
-035import java.util.Iterator;
-036import java.util.List;
-037
-038import com.google.protobuf.ByteString;
+027import 
java.io.UnsupportedEncodingException;
+028import java.math.BigDecimal;
+029import java.math.BigInteger;
+030import java.nio.ByteBuffer;
+031import java.nio.charset.Charset;
+032import 
java.nio.charset.StandardCharsets;
+033import java.security.SecureRandom;
+034import java.util.Arrays;
+035import java.util.Collection;
+036import java.util.Comparator;
+037import java.util.Iterator;
+038import java.util.List;
 039
 040import org.apache.commons.logging.Log;
 041import 
org.apache.commons.logging.LogFactory;
-042import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-043import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-044import org.apache.hadoop.hbase.Cell;
-045import 
org.apache.hadoop.hbase.CellComparator;
-046import 
org.apache.hadoop.hbase.KeyValue;
+042import org.apache.hadoop.hbase.Cell;
+043import 
org.apache.hadoop.hbase.CellComparator;
+044import 
org.apache.hadoop.hbase.KeyValue;
+045import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+046import 
org.apache.hadoop.hbase.classification.InterfaceStability;
 047import 
org.apache.hadoop.io.RawComparator;
 048import 
org.apache.hadoop.io.WritableComparator;
 049import 
org.apache.hadoop.io.WritableUtils;
@@ -60,2598 +60,2618 @@
 052
 053import 
com.google.common.annotations.VisibleForTesting;
 054import com.google.common.collect.Lists;
-055
-056/**
-057 * Utility class that handles byte 
arrays, conversions to/from other types,
-058 * comparisons, hash code generation, 
manufacturing keys for HashMaps or
-059 * HashSets, and can be used as key in 
maps or trees.
-060 */
-061@SuppressWarnings("restriction")
-062@InterfaceAudience.Public
-063@InterfaceStability.Stable
-064@edu.umd.cs.findbugs.annotations.SuppressWarnings(
-065
value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS",
-066justification="It has been like this 
forever")
-067public class Bytes implements 
ComparableBytes {
-068  //HConstants.UTF8_ENCODING should be 
updated if this changed
-069  /** When we encode strings, we always 
specify UTF8 encoding */
-070  private static final String 
UTF8_ENCODING = "UTF-8";
-071
-072  //HConstants.UTF8_CHARSET should be 
updated if this changed
-073  /** When we encode strings, we always 
specify UTF8 encoding */
-074  private static final Charset 
UTF8_CHARSET = Charset.forName(UTF8_ENCODING);
-075
-076  //HConstants.EMPTY_BYTE_ARRAY should be 
updated if this changed
-077  private static final byte [] 
EMPTY_BYTE_ARRAY = new byte [0];
-078
-079  private static final Log LOG = 
LogFactory.getLog(Bytes.class);
+055import com.google.protobuf.ByteString;
+056
+057/**
+058 * Utility class that handles byte 
arrays, conversions to/from other types,
+059 * comparisons, hash code generation, 
manufacturing keys for HashMaps or
+060 * HashSets, and can be used as key in 
maps or trees.
+061 */
+062@SuppressWarnings("restriction")
+063@InterfaceAudience.Public
+064@InterfaceStability.Stable
+065@edu.umd.cs.findbugs.annotations.SuppressWarnings(
+066
value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS",
+067justification="It has been like this 
forever")
+068public class Bytes implements 
ComparableBytes {
+069  //HConstants.UTF8_ENCODING should be 
updated if this changed
+070  /** When we encode strings, we always 
specify UTF8 encoding */
+071  private static final String 
UTF8_ENCODING = "UTF-8";
+072
+073  //HConstants.UTF8_CHARSET should be 
updated if this changed
+074  /** When we encode strings, we always 
specify UTF8 encoding */
+075  private static final Charset 
UTF8_CHARSET = Charset.forName(UTF8_ENCODING);
+076
+077  // Using the charset canonical name for 
String/byte[] conversions is much
+078  // more efficient due to use of cached 
encoders/decoders.
+079  private static final String UTF8_CSN = 
StandardCharsets.UTF_8.name();
 080
-081  /**
-082   * Size of boolean in bytes
-083   */
-084  public static final int SIZEOF_BOOLEAN 
= Byte.SIZE / Byte.SIZE;
+081  //HConstants.EMPTY_BYTE_ARRAY should be 
updated if this 

[27/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.html
index d607296..e22025b 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.html
@@ -57,616 +57,615 @@
 049import 
org.apache.hadoop.hbase.TableName;
 050import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 051import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-052import 
org.apache.hadoop.hbase.client.AsyncProcess.AsyncRequestFuture;
-053import 
org.apache.hadoop.hbase.ipc.RpcControllerFactory;
-054import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-055
-056/**
-057 * HTableMultiplexer provides a 
thread-safe non blocking PUT API across all the tables.
-058 * Each put will be sharded into 
different buffer queues based on its destination region server.
-059 * So each region server buffer queue 
will only have the puts which share the same destination.
-060 * And each queue will have a flush 
worker thread to flush the puts request to the region server.
-061 * If any queue is full, the 
HTableMultiplexer starts to drop the Put requests for that
-062 * particular queue.
-063 *
-064 * Also all the puts will be retried as a 
configuration number before dropping.
-065 * And the HTableMultiplexer can report 
the number of buffered requests and the number of the
-066 * failed (dropped) requests in total or 
on per region server basis.
-067 *
-068 * This class is thread safe.
-069 */
-070@InterfaceAudience.Public
-071@InterfaceStability.Evolving
-072public class HTableMultiplexer {
-073  private static final Log LOG = 
LogFactory.getLog(HTableMultiplexer.class.getName());
-074
-075  public static final String 
TABLE_MULTIPLEXER_FLUSH_PERIOD_MS =
-076  
"hbase.tablemultiplexer.flush.period.ms";
-077  public static final String 
TABLE_MULTIPLEXER_INIT_THREADS = "hbase.tablemultiplexer.init.threads";
-078  public static final String 
TABLE_MULTIPLEXER_MAX_RETRIES_IN_QUEUE =
-079  
"hbase.client.max.retries.in.queue";
-080
-081  /** The map between each region server 
to its flush worker */
-082  private final MapHRegionLocation, 
FlushWorker serverToFlushWorkerMap =
-083  new ConcurrentHashMap();
-084
-085  private final Configuration 
workerConf;
-086  private final ClusterConnection conn;
-087  private final ExecutorService pool;
-088  private final int maxAttempts;
-089  private final int 
perRegionServerBufferQueueSize;
-090  private final int maxKeyValueSize;
-091  private final ScheduledExecutorService 
executor;
-092  private final long flushPeriod;
-093
-094  /**
-095   * @param conf The HBaseConfiguration
-096   * @param 
perRegionServerBufferQueueSize determines the max number of the buffered Put 
ops for
-097   *  each region server before 
dropping the request.
-098   */
-099  public HTableMultiplexer(Configuration 
conf, int perRegionServerBufferQueueSize)
-100  throws IOException {
-101
this(ConnectionFactory.createConnection(conf), conf, 
perRegionServerBufferQueueSize);
-102  }
-103
-104  /**
-105   * @param conn The HBase connection.
-106   * @param conf The HBase 
configuration
-107   * @param 
perRegionServerBufferQueueSize determines the max number of the buffered Put 
ops for
-108   *  each region server before 
dropping the request.
-109   */
-110  public HTableMultiplexer(Connection 
conn, Configuration conf,
-111  int perRegionServerBufferQueueSize) 
{
-112this.conn = (ClusterConnection) 
conn;
-113this.pool = 
HTable.getDefaultExecutor(conf);
-114// how many times we could try in 
total, one more than retry number
-115this.maxAttempts = 
conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,
-116
HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER) + 1;
-117this.perRegionServerBufferQueueSize = 
perRegionServerBufferQueueSize;
-118this.maxKeyValueSize = 
HTable.getMaxKeyValueSize(conf);
-119this.flushPeriod = 
conf.getLong(TABLE_MULTIPLEXER_FLUSH_PERIOD_MS, 100);
-120int initThreads = 
conf.getInt(TABLE_MULTIPLEXER_INIT_THREADS, 10);
-121this.executor =
-122
Executors.newScheduledThreadPool(initThreads,
-123  new 
ThreadFactoryBuilder().setDaemon(true).setNameFormat("HTableFlushWorker-%d").build());
-124
-125this.workerConf = 
HBaseConfiguration.create(conf);
-126// We do not do the retry because we 
need to reassign puts to different queues if regions are
-127// moved.
-128
this.workerConf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 0);
-129  }
-130
-131  /**
-132   * Closes the internal {@link 
Connection}. Does nothing if the {@link Connection} has already
-133   * been closed.

[51/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/044b3379
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/044b3379
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/044b3379

Branch: refs/heads/asf-site
Commit: 044b3379513dec5a6714ace497811c6c4b580536
Parents: e3ab1d1
Author: jenkins 
Authored: Thu Sep 29 15:19:10 2016 +
Committer: Dima Spivak 
Committed: Thu Sep 29 16:07:56 2016 -0400

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 32855 -
 apache_hbase_reference_guide.pdfmarks   | 4 +-
 apidocs/allclasses-frame.html   | 1 +
 apidocs/allclasses-noframe.html | 1 +
 apidocs/constant-values.html|   571 +-
 apidocs/deprecated-list.html| 7 +-
 apidocs/index-all.html  |   104 +-
 apidocs/org/apache/hadoop/hbase/CellUtil.html   |   418 +-
 .../hadoop/hbase/DoNotRetryIOException.html | 2 +-
 apidocs/org/apache/hadoop/hbase/HConstants.html |   704 +-
 .../apache/hadoop/hbase/KeepDeletedCells.html   | 4 +-
 .../hadoop/hbase/UnknownScannerException.html   |16 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html |   264 +-
 .../hbase/class-use/DoNotRetryIOException.html  | 7 +
 .../hbase/class-use/HBaseIOException.html   | 7 +
 .../hadoop/hbase/class-use/ServerName.html  | 4 +-
 .../hadoop/hbase/class-use/TableName.html   |19 +-
 .../hbase/class-use/TableNotFoundException.html |19 +-
 .../hadoop/hbase/client/CompactionState.html| 4 +-
 .../apache/hadoop/hbase/client/Consistency.html | 4 +-
 .../apache/hadoop/hbase/client/Durability.html  | 4 +-
 ...ableMultiplexer.HTableMultiplexerStatus.html |20 +-
 .../hadoop/hbase/client/HTableMultiplexer.html  |28 +-
 .../hadoop/hbase/client/IsolationLevel.html | 4 +-
 .../org/apache/hadoop/hbase/client/Result.html  |68 +-
 .../hadoop/hbase/client/SnapshotType.html   | 4 +-
 .../hadoop/hbase/client/class-use/Admin.html|19 +-
 .../hbase/client/class-use/Connection.html  |20 +-
 .../hbase/client/class-use/Consistency.html |14 +-
 .../hbase/client/class-use/Durability.html  | 8 +-
 .../hbase/client/class-use/IsolationLevel.html  |14 +-
 .../hadoop/hbase/client/class-use/Mutation.html | 8 +-
 .../hbase/client/class-use/RegionLocator.html   |19 +-
 .../hadoop/hbase/client/class-use/Result.html   |26 +-
 .../hadoop/hbase/client/class-use/Row.html  | 6 +-
 .../hadoop/hbase/client/class-use/Scan.html | 6 +-
 .../hadoop/hbase/client/class-use/Table.html|61 +-
 .../hadoop/hbase/client/package-tree.html   | 8 +-
 .../client/replication/ReplicationAdmin.html|40 +-
 .../exceptions/RegionInRecoveryException.html   | 4 +-
 .../hbase/exceptions/ScannerResetException.html |   318 +
 .../exceptions/UnknownProtocolException.html| 4 +-
 .../class-use/ScannerResetException.html|   125 +
 .../hadoop/hbase/exceptions/package-frame.html  | 1 +
 .../hbase/exceptions/package-summary.html   | 7 +
 .../hadoop/hbase/exceptions/package-tree.html   | 1 +
 .../hbase/filter/CompareFilter.CompareOp.html   | 4 +-
 .../hadoop/hbase/filter/KeyOnlyFilter.html  |18 +-
 .../hbase/filter/MultiRowRangeFilter.html   |14 +-
 .../filter/class-use/ByteArrayComparable.html   | 8 +-
 .../class-use/CompareFilter.CompareOp.html  | 8 +-
 .../filter/class-use/Filter.ReturnCode.html |64 +-
 .../hadoop/hbase/filter/class-use/Filter.html   |62 +-
 .../hadoop/hbase/filter/package-tree.html   | 4 +-
 .../io/class-use/ImmutableBytesWritable.html|26 +-
 .../hadoop/hbase/io/class-use/TimeRange.html|12 +-
 .../hbase/io/crypto/class-use/Cipher.html   |16 +-
 .../hbase/io/encoding/DataBlockEncoding.html| 4 +-
 .../hadoop/hbase/ipc/package-summary.html   | 4 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   | 4 +-
 .../hadoop/hbase/jetty/package-frame.html   |14 +
 .../hadoop/hbase/jetty/package-summary.html |   124 +
 .../apache/hadoop/hbase/jetty/package-tree.html |   128 +
 .../apache/hadoop/hbase/jetty/package-use.html  |   125 +
 .../hadoop/hbase/mapred/package-summary.html| 4 +-
 .../hadoop/hbase/mapred/package-tree.html   | 4 +-
 .../hbase/mapreduce/LoadIncrementalHFiles.html  |   247 +-
 .../apache/hadoop/hbase/quotas/QuotaType.html   | 4 +-
 .../hadoop/hbase/quotas/package-tree.html   | 2 +-
 .../hadoop/hbase/regionserver/BloomType.html| 4 +-
 .../replication/ReplicationPeerConfig.html  |62 +-
 

[21/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
index b0dae74..d009a5c 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
@@ -30,87 +30,100 @@
 022import java.util.HashMap;
 023import java.util.List;
 024import java.util.Map;
-025import java.util.TreeMap;
-026
-027import 
org.apache.hadoop.hbase.TableName;
-028import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-029import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-030import 
org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
-031import 
org.apache.hadoop.hbase.util.Bytes;
-032
-033/**
-034 * A configuration for the replication 
peer cluster.
-035 */
-036@InterfaceAudience.Public
-037@InterfaceStability.Evolving
-038public class ReplicationPeerConfig {
-039
-040  private String clusterKey;
-041  private String 
replicationEndpointImpl;
-042  private final Mapbyte[], byte[] 
peerData;
-043  private final MapString, String 
configuration;
-044  private MapTableName, ? extends 
CollectionString tableCFsMap = null;
-045
-046
-047  public ReplicationPeerConfig() {
-048this.peerData = new 
TreeMapbyte[], byte[](Bytes.BYTES_COMPARATOR);
-049this.configuration = new 
HashMapString, String(0);
-050  }
-051
-052  /**
-053   * Set the clusterKey which is the 
concatenation of the slave cluster's:
-054   *  
hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
-055   */
-056  public ReplicationPeerConfig 
setClusterKey(String clusterKey) {
-057this.clusterKey = clusterKey;
-058return this;
-059  }
-060
-061  /**
-062   * Sets the ReplicationEndpoint plugin 
class for this peer.
-063   * @param replicationEndpointImpl a 
class implementing ReplicationEndpoint
-064   */
-065  public ReplicationPeerConfig 
setReplicationEndpointImpl(String replicationEndpointImpl) {
-066this.replicationEndpointImpl = 
replicationEndpointImpl;
-067return this;
-068  }
-069
-070  public String getClusterKey() {
-071return clusterKey;
-072  }
-073
-074  public String 
getReplicationEndpointImpl() {
-075return replicationEndpointImpl;
-076  }
-077
-078  public Mapbyte[], byte[] 
getPeerData() {
-079return peerData;
-080  }
-081
-082  public MapString, String 
getConfiguration() {
-083return configuration;
-084  }
-085
-086  public MapTableName, 
ListString getTableCFsMap() {
-087return (MapTableName, 
ListString) tableCFsMap;
-088  }
-089
-090  public ReplicationPeerConfig 
setTableCFsMap(MapTableName,
-091  
? extends CollectionString tableCFsMap) {
-092this.tableCFsMap = tableCFsMap;
-093return this;
-094  }
-095
-096  @Override
-097  public String toString() {
-098StringBuilder builder = new 
StringBuilder("clusterKey=").append(clusterKey).append(",");
-099
builder.append("replicationEndpointImpl=").append(replicationEndpointImpl).append(",");
-100if (tableCFsMap != null) {
-101  
builder.append("tableCFs=").append(tableCFsMap.toString());
-102}
-103return builder.toString();
+025import java.util.Set;
+026import java.util.TreeMap;
+027
+028import 
org.apache.hadoop.hbase.TableName;
+029import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+030import 
org.apache.hadoop.hbase.classification.InterfaceStability;
+031import 
org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
+032import 
org.apache.hadoop.hbase.util.Bytes;
+033
+034/**
+035 * A configuration for the replication 
peer cluster.
+036 */
+037@InterfaceAudience.Public
+038@InterfaceStability.Evolving
+039public class ReplicationPeerConfig {
+040
+041  private String clusterKey;
+042  private String 
replicationEndpointImpl;
+043  private final Mapbyte[], byte[] 
peerData;
+044  private final MapString, String 
configuration;
+045  private MapTableName, ? extends 
CollectionString tableCFsMap = null;
+046  private SetString namespaces = 
null;
+047
+048  public ReplicationPeerConfig() {
+049this.peerData = new 
TreeMapbyte[], byte[](Bytes.BYTES_COMPARATOR);
+050this.configuration = new 
HashMapString, String(0);
+051  }
+052
+053  /**
+054   * Set the clusterKey which is the 
concatenation of the slave cluster's:
+055   *  
hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
+056   */
+057  public ReplicationPeerConfig 
setClusterKey(String clusterKey) {
+058this.clusterKey = clusterKey;
+059return this;
+060  }
+061
+062  /**
+063   * Sets the ReplicationEndpoint plugin 
class for this peer.
+064   

[24/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/exceptions/ScannerResetException.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/exceptions/ScannerResetException.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/exceptions/ScannerResetException.html
new file mode 100644
index 000..04cc6bc
--- /dev/null
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/exceptions/ScannerResetException.html
@@ -0,0 +1,122 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/**
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License.  You may obtain a 
copy of the License at
+009 *
+010 * 
http://www.apache.org/licenses/LICENSE-2.0
+011 *
+012 * Unless required by applicable law or 
agreed to in writing, software
+013 * distributed under the License is 
distributed on an "AS IS" BASIS,
+014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+015 * See the License for the specific 
language governing permissions and
+016 * limitations under the License.
+017 */
+018
+019package 
org.apache.hadoop.hbase.exceptions;
+020
+021import 
org.apache.hadoop.hbase.DoNotRetryIOException;
+022import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+023import 
org.apache.hadoop.hbase.classification.InterfaceStability;
+024
+025/**
+026 * Thrown when the server side has 
received an Exception, and asks the Client to reset the scanner
+027 * state by closing the current region 
scanner, and reopening from the start of last seen row.
+028 */
+029@InterfaceAudience.Public
+030@InterfaceStability.Stable
+031public class ScannerResetException 
extends DoNotRetryIOException {
+032  private static final long 
serialVersionUID = -5649728171144849619L;
+033
+034  /** constructor */
+035  public ScannerResetException() {
+036super();
+037  }
+038
+039  /**
+040   * Constructor
+041   * @param s message
+042   */
+043  public ScannerResetException(String s) 
{
+044super(s);
+045  }
+046
+047  public ScannerResetException(String s, 
Exception e) {
+048super(s, e);
+049  }
+050}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html
index 5d13aba..d416cfa 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.html
@@ -33,7 +33,7 @@
 025
 026import org.apache.hadoop.hbase.Cell;
 027import 
org.apache.hadoop.hbase.CellComparator;
-028import 
org.apache.hadoop.hbase.KeyValueUtil;
+028import 
org.apache.hadoop.hbase.CellUtil;
 029import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 030import 
org.apache.hadoop.hbase.classification.InterfaceStability;
 031import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -183,7 +183,7 @@
 175  return null;
 176}
 177byte[] nextRowKey = 
tracker.nextRow();
-178return 
KeyValueUtil.createFirstOnRow(nextRowKey);
+178return 
CellUtil.createFirstOnRow(nextRowKey, 0, (short) nextRowKey.length);
 179  }
 180
 181  /**

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html
index 5dfe4d3..24a0d22 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/KeyOnlyFilter.html
@@ -28,117 +28,381 @@
 020
 021
 022import java.io.IOException;
-023import java.util.ArrayList;
-024
-025import org.apache.hadoop.hbase.Cell;
-026import 
org.apache.hadoop.hbase.KeyValue;
-027import 
org.apache.hadoop.hbase.KeyValueUtil;
-028import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-029import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-030import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-031import 
org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
-032import 
org.apache.hadoop.hbase.util.Bytes;
-033

[25/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
index a0f0e21..b4f5eb1 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
@@ -197,445 +197,481 @@
 189   * @param peerConfig configuration for 
the replication slave cluster
 190   */
 191  public void addPeer(String id, 
ReplicationPeerConfig peerConfig) throws ReplicationException {
-192
this.replicationPeers.registerPeer(id, peerConfig);
-193  }
-194
-195  /**
-196   *  @deprecated as release of 2.0.0, 
and it will be removed in 3.0.0
-197   * */
-198  @Deprecated
-199  public static MapTableName, 
ListString parseTableCFsFromConfig(String tableCFsConfig) {
-200return 
ReplicationSerDeHelper.parseTableCFsFromConfig(tableCFsConfig);
-201  }
-202
-203  public void updatePeerConfig(String id, 
ReplicationPeerConfig peerConfig)
-204  throws ReplicationException {
-205
this.replicationPeers.updatePeerConfig(id, peerConfig);
-206  }
-207  /**
-208   * Removes a peer cluster and stops the 
replication to it.
-209   * @param id a short name that 
identifies the cluster
-210   */
-211  public void removePeer(String id) 
throws ReplicationException {
-212
this.replicationPeers.unregisterPeer(id);
-213  }
-214
-215  /**
-216   * Restart the replication stream to 
the specified peer.
-217   * @param id a short name that 
identifies the cluster
-218   */
-219  public void enablePeer(String id) 
throws ReplicationException {
-220
this.replicationPeers.enablePeer(id);
-221  }
-222
-223  /**
-224   * Stop the replication stream to the 
specified peer.
-225   * @param id a short name that 
identifies the cluster
-226   */
-227  public void disablePeer(String id) 
throws ReplicationException {
-228
this.replicationPeers.disablePeer(id);
-229  }
-230
-231  /**
-232   * Get the number of slave clusters the 
local cluster has.
-233   * @return number of slave clusters
-234   */
-235  public int getPeersCount() {
-236return 
this.replicationPeers.getAllPeerIds().size();
-237  }
-238
-239  public MapString, 
ReplicationPeerConfig listPeerConfigs() {
-240return 
this.replicationPeers.getAllPeerConfigs();
-241  }
-242
-243  public ReplicationPeerConfig 
getPeerConfig(String id) throws ReplicationException {
-244return 
this.replicationPeers.getReplicationPeerConfig(id);
-245  }
-246
-247  /**
-248   * Get the replicable table-cf config 
of the specified peer.
-249   * @param id a short name that 
identifies the cluster
-250   * @deprecated as release of 2.0.0, and 
it will be removed in 3.0.0,
-251   * use {@link #getPeerConfig(String)} 
instead.
-252   * */
-253  @Deprecated
-254  public String getPeerTableCFs(String 
id) throws ReplicationException {
-255return 
ReplicationSerDeHelper.convertToString(this.replicationPeers.getPeerTableCFsConfig(id));
-256  }
-257
-258  /**
-259   * Append the replicable table-cf 
config of the specified peer
-260   * @param id a short that identifies 
the cluster
-261   * @param tableCfs table-cfs config 
str
-262   * @throws ReplicationException
-263   * @deprecated as release of 2.0.0, and 
it will be removed in 3.0.0,
-264   * use {@link 
#appendPeerTableCFs(String, Map)} instead.
-265   */
-266  @Deprecated
-267  public void appendPeerTableCFs(String 
id, String tableCfs) throws ReplicationException {
-268appendPeerTableCFs(id, 
ReplicationSerDeHelper.parseTableCFsFromConfig(tableCfs));
-269  }
-270
-271  /**
-272   * Append the replicable table-cf 
config of the specified peer
-273   * @param id a short that identifies 
the cluster
-274   * @param tableCfs A map from tableName 
to column family names
-275   * @throws ReplicationException
-276   */
-277  public void appendPeerTableCFs(String 
id, MapTableName, ? extends CollectionString tableCfs)
-278  throws ReplicationException {
-279if (tableCfs == null) {
-280  throw new 
ReplicationException("tableCfs is null");
-281}
-282MapTableName, 
ListString preTableCfs = 
this.replicationPeers.getPeerTableCFsConfig(id);
-283if (preTableCfs == null) {
-284  setPeerTableCFs(id, tableCfs);
-285  return;
+192
checkNamespacesAndTableCfsConfigConflict(peerConfig.getNamespaces(),
+193  peerConfig.getTableCFsMap());
+194
this.replicationPeers.registerPeer(id, peerConfig);
+195  }
+196
+197  /**
+198   *  @deprecated as release of 2.0.0, 
and it will be removed in 3.0.0
+199   * */
+200  @Deprecated
+201  public static MapTableName, 
ListString parseTableCFsFromConfig(String tableCFsConfig) {
+202return 

[32/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
index d4666b5..e626707 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
@@ -124,80 +124,88 @@
 
 
 
-byte[]
-OrderedBlob.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/Short.html?is-external=true;
 title="class or interface in java.lang">Short
+RawShort.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
-OrderedInt32.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]
+Struct.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-RawString.decode(PositionedByteRangesrc)
+T
+TerminatedWrapper.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
-OrderedInt64.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/Byte.html?is-external=true;
 title="class or interface in java.lang">Byte
+OrderedInt8.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Double.html?is-external=true;
 title="class or interface in java.lang">Double
-RawDouble.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+OrderedString.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
-RawInteger.decode(PositionedByteRangesrc)
-
-
 http://docs.oracle.com/javase/8/docs/api/java/lang/Double.html?is-external=true;
 title="class or interface in java.lang">Double
 OrderedFloat64.decode(PositionedByteRangesrc)
 
-
-http://docs.oracle.com/javase/8/docs/api/java/lang/Float.html?is-external=true;
 title="class or interface in java.lang">Float
-RawFloat.decode(PositionedByteRangesrc)
-
 
-T
-FixedLengthWrapper.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
+OrderedInt32.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-OrderedString.decode(PositionedByteRangesrc)
-
-
 http://docs.oracle.com/javase/8/docs/api/java/lang/Number.html?is-external=true;
 title="class or interface in java.lang">Number
 OrderedNumeric.decode(PositionedByteRangesrc)
 
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Byte.html?is-external=true;
 title="class or interface in java.lang">Byte
+RawByte.decode(PositionedByteRangesrc)
+
 
-T
-TerminatedWrapper.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/Float.html?is-external=true;
 title="class or interface in java.lang">Float
+RawFloat.decode(PositionedByteRangesrc)
 
 
 http://docs.oracle.com/javase/8/docs/api/java/lang/Float.html?is-external=true;
 title="class or interface in java.lang">Float
 OrderedFloat32.decode(PositionedByteRangesrc)
 
 
-byte[]
-RawBytes.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
+RawInteger.decode(PositionedByteRangesrc)
 
 
+byte[]
+OrderedBlob.decode(PositionedByteRangesrc)
+
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
-RawLong.decode(PositionedByteRangesrc)
+OrderedInt64.decode(PositionedByteRangesrc)
+
+
+T
+FixedLengthWrapper.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Short.html?is-external=true;
 title="class or interface in java.lang">Short
-RawShort.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+RawString.decode(PositionedByteRangesrc)
 
 
 byte[]
+RawBytes.decode(PositionedByteRangesrc)
+
+
+byte[]
 OrderedBlobVar.decode(PositionedByteRangesrc)
 
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Short.html?is-external=true;
 title="class or interface in java.lang">Short
+OrderedInt16.decode(PositionedByteRangesrc)
+
 

[04/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/DoNotRetryIOException.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/DoNotRetryIOException.html 
b/devapidocs/org/apache/hadoop/hbase/DoNotRetryIOException.html
index de594cf..2bce902 100644
--- a/devapidocs/org/apache/hadoop/hbase/DoNotRetryIOException.html
+++ b/devapidocs/org/apache/hadoop/hbase/DoNotRetryIOException.html
@@ -127,7 +127,7 @@
 
 
 Direct Known Subclasses:
-AccessDeniedException, ConstraintException, CoprocessorException, 
CorruptHFileException, DoNotRetryRegionException, 
FailedSanityCheckException, FatalConnectionException, HBaseSnapshotException, 
InvalidFamilyOperationException, 
InvalidLabelException, InvalidQuotaSettingsException, LabelAlreadyExistsExcepti
 on, LeaseException, LockTimeoutException, 
NamespaceExistException, NamespaceNotFoundException, NoSuchColumnFamilyException, NotAllMetaRegionsOnlineException, 
OperationConflictException, OutOfOrderScannerNextException, QuotaExceededException, ScannerTimeoutException, ServerTooBusyException, TableExistsException, TableNotDisabledException, TableNotEnabledException, TableNotFoundException, UnknownProtocolException, UnknownScannerException
+AccessDeniedException, ConstraintException, CoprocessorException, 
CorruptHFileException, DoNotRetryRegionException, 
FailedSanityCheckException, FatalConnectionException, HBaseSnapshotException, 
InvalidFamilyOperationException, 
InvalidLabelException, InvalidQuotaSettingsException, LabelAlreadyExistsExcepti
 on, LeaseException, LockTimeoutException, 
NamespaceExistException, NamespaceNotFoundException, NoSuchColumnFamilyException, NotAllMetaRegionsOnlineException, 
OperationConflictException, OutOfOrderScannerNextException, QuotaExceededException, ScannerResetException, 
ScannerTimeoutException, ServerTooBusyException, TableExistsException, TableNotDisabledException, TableNotEnabledException, TableNotFoundException, UnknownProtocolException, UnknownScannerException
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/DroppedSnapshotException.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/DroppedSnapshotException.html 
b/devapidocs/org/apache/hadoop/hbase/DroppedSnapshotException.html
index 044f9e3..94d18c6 100644
--- a/devapidocs/org/apache/hadoop/hbase/DroppedSnapshotException.html
+++ b/devapidocs/org/apache/hadoop/hbase/DroppedSnapshotException.html
@@ -44,7 +44,7 @@
 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames
@@ -286,7 +286,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/IOException.ht
 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/ExtendedCell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ExtendedCell.html 
b/devapidocs/org/apache/hadoop/hbase/ExtendedCell.html
new file mode 100644
index 000..73e06fd
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/ExtendedCell.html
@@ -0,0 +1,311 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+ExtendedCell (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = {"i0":6,"i1":6};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase
+Interface ExtendedCell
+
+
+
+
+
+
+All Superinterfaces:
+Cell, http://docs.oracle.com/javase/8/docs/api/java/lang/Cloneable.html?is-external=true;
 title="class or interface in java.lang">Cloneable, HeapSize, SettableSequenceId, SettableTimestamp
+
+
+All Known Implementing Classes:
+BufferedDataBlockEncoder.OffheapDecodedCell,
 BufferedDataBlockEncoder.OnheapDecodedCell,
 CellUtil.ShareableMemoryTagRewriteCell, CellUtil.TagRewriteCell, KeyValue, KeyValue.KeyOnlyKeyValue<
 /a>, KeyValueCodec.ByteBufferedKeyValueDecoder.ShareableMemoryKeyValue,
 

[14/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 63b1466..b0c63ff 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -280,10 +280,10 @@
 Warnings
 Errors
 
-1845
+1850
 0
 0
-11576
+11585
 
 Files
 
@@ -331,7 +331,7 @@
 org/apache/hadoop/hbase/CellUtil.java
 0
 0
-97
+96
 
 org/apache/hadoop/hbase/ChoreService.java
 0
@@ -383,147 +383,147 @@
 0
 1
 
+org/apache/hadoop/hbase/ExtendedCell.java
+0
+0
+1
+
 org/apache/hadoop/hbase/HBaseConfiguration.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/HColumnDescriptor.java
 0
 0
 27
-
+
 org/apache/hadoop/hbase/HConstants.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/HRegionInfo.java
 0
 0
 58
-
+
 org/apache/hadoop/hbase/HRegionLocation.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/HTableDescriptor.java
 0
 0
 46
-
+
 org/apache/hadoop/hbase/HealthChecker.java
 0
 0
 17
-
+
 org/apache/hadoop/hbase/JMXListener.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/JitterScheduledThreadPoolExecutorImpl.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/KeyValue.java
 0
 0
 135
-
+
 org/apache/hadoop/hbase/KeyValueTestUtil.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/KeyValueUtil.java
 0
 0
 30
-
+
 org/apache/hadoop/hbase/LocalHBaseCluster.java
 0
 0
 23
-
+
 org/apache/hadoop/hbase/MetaMutationAnnotation.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/MetaTableAccessor.java
 0
 0
 116
-
+
 org/apache/hadoop/hbase/NamespaceDescriptor.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/NotAllMetaRegionsOnlineException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ProcedureUtil.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/RegionLoad.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/RegionLocations.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/RegionStateListener.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/ScheduledChore.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/ServerLoad.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ServerName.java
 0
 0
 34
-
+
 org/apache/hadoop/hbase/SettableSequenceId.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/SettableTimestamp.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/SplitLogCounters.java
 0
 0
 1
-
-org/apache/hadoop/hbase/SplitLogTask.java
-0
-0
-2
 
-org/apache/hadoop/hbase/Streamable.java
+org/apache/hadoop/hbase/SplitLogTask.java
 0
 0
 2
@@ -626,3047 +626,3057 @@
 org/apache/hadoop/hbase/client/AsyncProcess.java
 0
 0
-29
+10
 
-org/apache/hadoop/hbase/client/BufferedMutator.java
+org/apache/hadoop/hbase/client/AsyncRequestFuture.java
 0
 0
 1
 
+org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
+0
+0
+24
+
+org/apache/hadoop/hbase/client/BatchErrors.java
+0
+0
+1
+
+org/apache/hadoop/hbase/client/BufferedMutator.java
+0
+0
+1
+
 org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/CancellableRegionServerCallable.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ClientAsyncPrefetchScanner.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/ClientIdGenerator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ClientScanner.java
 0
 0
-206
-
+205
+
 org/apache/hadoop/hbase/client/ClientSimpleScanner.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ClientSmallReversedScanner.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/ClientSmallScanner.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/ClusterStatusListener.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/CompactType.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/ConnectionConfiguration.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ConnectionImplementation.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/CoprocessorHConnection.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/DelayingRunner.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/Delete.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/Get.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/HBaseAdmin.java
 0
 0
 68
-
+
 org/apache/hadoop/hbase/client/HRegionLocator.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/HTable.java
 0
 0
 25
-
+
 org/apache/hadoop/hbase/client/HTableInterface.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/HTableMultiplexer.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/HTableWrapper.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/client/Increment.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/MasterCallable.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/MetaCache.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/client/MetricsConnection.java
 0
 0
 44
-
+
 org/apache/hadoop/hbase/client/MultiAction.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/MultiResponse.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/MultiServerCallable.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/client/Mutation.java
 0
 0
 14
-
+
 org/apache/hadoop/hbase/client/Operation.java
 0
 0
 1
-
+
 

[09/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index a067f67..bba004e 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -689,6 +689,8 @@
 
 The user should override this method, and try to take a 
lock if necessary.
 
+acquireLock(Procedure)
 - Method in class org.apache.hadoop.hbase.procedure2.ProcedureExecutor
+
 acquireLock(K)
 - Method in class org.apache.hadoop.hbase.util.KeyLocker
 
 Return a lock for the given key.
@@ -728,7 +730,7 @@
 
 ACTION_BY_CODE
 - Static variable in class org.apache.hadoop.hbase.security.access.Permission
 
-actions
 - Variable in class org.apache.hadoop.hbase.client.AsyncProcess.BatchErrors
+actions
 - Variable in class org.apache.hadoop.hbase.client.BatchErrors
 
 actions
 - Variable in class org.apache.hadoop.hbase.client.DelayingRunner
 
@@ -738,7 +740,7 @@
 
 actions
 - Variable in class org.apache.hadoop.hbase.security.access.Permission
 
-actionsInProgress
 - Variable in class org.apache.hadoop.hbase.client.AsyncProcess.AsyncRequestFutureImpl
+actionsInProgress
 - Variable in class org.apache.hadoop.hbase.client.AsyncRequestFutureImpl
 
 activateOptions()
 - Method in class org.apache.hadoop.hbase.AsyncConsoleAppender
 
@@ -792,7 +794,7 @@
 
 Add column and value to this Append operation.
 
-add(Throwable,
 Row, ServerName) - Method in class 
org.apache.hadoop.hbase.client.AsyncProcess.BatchErrors
+add(Throwable,
 Row, ServerName) - Method in class 
org.apache.hadoop.hbase.client.BatchErrors
 
 add(BigDecimal,
 BigDecimal) - Method in class 
org.apache.hadoop.hbase.client.coprocessor.BigDecimalColumnInterpreter
 
@@ -1011,9 +1013,13 @@
 add(T)
 - Method in class org.apache.hadoop.hbase.util.ConcatenatedLists
 
 add(long) 
- Method in class org.apache.hadoop.hbase.util.Counter
-
+
+Deprecated.
+
 add(long)
 - Method in class org.apache.hadoop.hbase.util.Counter.Cell
-
+
+Deprecated.
+
 add(long,
 long) - Method in class org.apache.hadoop.hbase.util.FastLongHistogram
 
 Adds a value to the histogram.
@@ -1934,11 +1940,11 @@
 
 Complete taking the snapshot on the region.
 
-addReplicaActions(int,
 MapServerName, MultiActionRow, 
ListActionRow) - Method in class 
org.apache.hadoop.hbase.client.AsyncProcess.AsyncRequestFutureImpl.ReplicaCallIssuingRunnable
+addReplicaActions(int,
 MapServerName, MultiActionRow, 
ListActionRow) - Method in class 
org.apache.hadoop.hbase.client.AsyncRequestFutureImpl.ReplicaCallIssuingRunnable
 
 Add replica actions to action map by server.
 
-addReplicaActionsAgain(ActionRow,
 MapServerName, MultiActionRow) - Method in class 
org.apache.hadoop.hbase.client.AsyncProcess.AsyncRequestFutureImpl.ReplicaCallIssuingRunnable
+addReplicaActionsAgain(ActionRow,
 MapServerName, MultiActionRow) - Method in class 
org.apache.hadoop.hbase.client.AsyncRequestFutureImpl.ReplicaCallIssuingRunnable
 
 addReplicas(MasterProcedureEnv,
 HTableDescriptor, ListHRegionInfo) - Static method in class 
org.apache.hadoop.hbase.master.procedure.CreateTableProcedure
 
@@ -1955,7 +1961,7 @@
 
 ADDRESS_BITS_PER_WORD
 - Static variable in class org.apache.hadoop.hbase.procedure2.store.ProcedureStoreTracker.BitSetNode
 
-addresses
 - Variable in class org.apache.hadoop.hbase.client.AsyncProcess.BatchErrors
+addresses
 - Variable in class org.apache.hadoop.hbase.client.BatchErrors
 
 Addressing 
- Class in org.apache.hadoop.hbase.util
 
@@ -2051,7 +2057,7 @@
 
 addShutdownHook(Thread,
 int) - Method in class org.apache.hadoop.hbase.util.ShutdownHookManager.ShutdownHookManagerV2
 
-addSingleServerRequestHeapSize(ServerName,
 
AsyncProcess.AsyncRequestFutureImplCResult.SingleServerRequestRunnable)
 - Method in class org.apache.hadoop.hbase.client.AsyncProcess.AsyncRequestFutureImpl
+addSingleServerRequestHeapSize(ServerName,
 AsyncRequestFutureImplCResult.SingleServerRequestRunnable) 
- Method in class org.apache.hadoop.hbase.client.AsyncRequestFutureImpl
 
 addSize(RpcCallContext,
 Result, Object) - Method in class 
org.apache.hadoop.hbase.regionserver.RSRpcServices
 
@@ -2343,6 +2349,11 @@
 
 afterLast
 - Variable in class org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArrayScanner
 
+afterReplay(TEnvironment)
 - Method in class org.apache.hadoop.hbase.procedure2.Procedure
+
+Called when the procedure is ready to be added to the queue 
after
+ the loading/replay operation.
+
 ageAtEviction
 - Variable in class org.apache.hadoop.hbase.io.hfile.CacheStats
 
 Keep running age at eviction time
@@ -2530,6 +2541,8 @@
 
 alreadyRunning
 - Variable in class org.apache.hadoop.hbase.master.CatalogJanitor
 
+ALWAYS_COPY_FILES
 - Static variable in class org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles
+
 amd64 - Static 
variable in class org.apache.hadoop.hbase.util.JVM
 
 ampBytes
 - Static variable in class org.apache.hadoop.hbase.http.HtmlQuoting
@@ -3190,10 

[10/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/deprecated-list.html
--
diff --git a/devapidocs/deprecated-list.html b/devapidocs/deprecated-list.html
index 539b40f..afdfb4b 100644
--- a/devapidocs/deprecated-list.html
+++ b/devapidocs/deprecated-list.html
@@ -97,6 +97,16 @@
 use Table instead
 
 
+
+org.apache.hadoop.hbase.SettableSequenceId
+as of 2.0 and will be 
removed in 3.0. Use ExtendedCell 
instead
+
+
+
+org.apache.hadoop.hbase.SettableTimestamp
+as of 2.0 and will be 
removed in 3.0. Use ExtendedCell 
instead
+
+
 
 
 
@@ -113,11 +123,16 @@
 
 
 
+org.apache.hadoop.hbase.util.Counter
+use http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">LongAdder instead.
+
+
+
 org.apache.hadoop.hbase.filter.FirstKeyValueMatchingQualifiersFilter
 Deprecated in 2.0. See 
HBASE-13347
 
 
-
+
 org.apache.hadoop.hbase.security.access.HbaseObjectWritableFor96Migration
 This class is needed 
migrating TablePermissions written with
  Writables.  It is needed to read old permissions written pre-0.96.  This
@@ -125,53 +140,53 @@
  will have been migrated and written with protobufs.
 
 
-
+
 org.apache.hadoop.hbase.mapreduce.HLogInputFormat
 use WALInputFormat.  Remove in 
hadoop 3.0
 
 
-
+
 org.apache.hadoop.hbase.regionserver.wal.HLogKey
 use WALKey. Deprecated as 
of 1.0 (HBASE-12522). Remove in 2.0
 
 
-
+
 org.apache.hadoop.hbase.regionserver.wal.HLogPrettyPrinter
 use the "hbase wal" 
command
 
 
-
+
 org.apache.hadoop.hbase.KeyValue.KVComparator
 : Use CellComparator.
 
 
-
+
 org.apache.hadoop.hbase.KeyValue.MetaComparator
 : CellComparator.META_COMPARATOR
 to be used
 
 
-
+
 org.apache.hadoop.hbase.KeyValue.RawBytesComparator
 Not to be used for any 
comparsions
 
 
-
+
 org.apache.hadoop.hbase.regionserver.wal.KeyValueCompression
 
-
+
 org.apache.hadoop.hbase.regionserver.querymatcher.LegacyScanQueryMatcher
 
-
+
 org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint
 As of release 2.0.0, this 
will be removed in HBase 3.0.0
 
 
-
+
 org.apache.hadoop.hbase.zookeeper.ZKLeaderManager
 Not used
 
 
-
+
 org.apache.hadoop.hbase.zookeeper.ZKUtil.NodeAndData
 Unused
 
@@ -413,13 +428,13 @@
 org.apache.hadoop.hbase.mapreduce.CellCreator.create(byte[],
 int, int, byte[], int, int, byte[], int, int, long, byte[], int, int, 
String)
 
 
-org.apache.hadoop.hbase.regionserver.RpcSchedulerFactory.create(Configuration,
 PriorityFunction)
+org.apache.hadoop.hbase.regionserver.FifoRpcSchedulerFactory.create(Configuration,
 PriorityFunction)
 
 
 org.apache.hadoop.hbase.regionserver.SimpleRpcSchedulerFactory.create(Configuration,
 PriorityFunction)
 
 
-org.apache.hadoop.hbase.regionserver.FifoRpcSchedulerFactory.create(Configuration,
 PriorityFunction)
+org.apache.hadoop.hbase.regionserver.RpcSchedulerFactory.create(Configuration,
 PriorityFunction)
 
 
 org.apache.hadoop.hbase.coprocessor.ObserverContext.createAndPrepare(T,
 ObserverContextT)
@@ -447,13 +462,13 @@
 org.apache.hadoop.hbase.KeyValueUtil.ensureKeyValues(ListCell)
 
 
-org.apache.hadoop.hbase.regionserver.SplitTransaction.execute(Server,
 RegionServerServices)
-use #execute(Server, 
RegionServerServices, User);  as of 1.0.2, remove in 3.0
+org.apache.hadoop.hbase.regionserver.RegionMergeTransaction.execute(Server,
 RegionServerServices)
+use #execute(Server, 
RegionServerServices, User)
 
 
 
-org.apache.hadoop.hbase.regionserver.RegionMergeTransaction.execute(Server,
 RegionServerServices)
-use #execute(Server, 
RegionServerServices, User)
+org.apache.hadoop.hbase.regionserver.SplitTransaction.execute(Server,
 RegionServerServices)
+use #execute(Server, 
RegionServerServices, User);  as of 1.0.2, remove in 3.0
 
 
 
@@ -465,15 +480,15 @@
 org.apache.hadoop.hbase.rest.client.RemoteHTable.exists(ListGet)
 
 
-org.apache.hadoop.hbase.filter.Filter.filterRowKey(byte[],
 int, int)
+org.apache.hadoop.hbase.filter.FilterBase.filterRowKey(byte[],
 int, int)
 As of release 2.0.0, this 
will be removed in HBase 3.0.0.
- Instead use Filter.filterRowKey(Cell)
+ Instead use FilterBase.filterRowKey(Cell)
 
 
 
-org.apache.hadoop.hbase.filter.FilterBase.filterRowKey(byte[],
 int, int)
+org.apache.hadoop.hbase.filter.Filter.filterRowKey(byte[],
 int, int)
 As of release 2.0.0, this 
will be removed in HBase 3.0.0.
- Instead use FilterBase.filterRowKey(Cell)
+ Instead use Filter.filterRowKey(Cell)
 
 
 
@@ -570,10 +585,10 @@
 
 
 
-org.apache.hadoop.hbase.http.HttpServer.getPort()
+org.apache.hadoop.hbase.http.InfoServer.getPort()
 
 
-org.apache.hadoop.hbase.http.InfoServer.getPort()
+org.apache.hadoop.hbase.http.HttpServer.getPort()
 
 
 org.apache.hadoop.hbase.CellUtil.getQualifierBufferShallowCopy(Cell)
@@ -780,15 +795,15 @@
 
 
 

[37/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
index 22047b3..145917a 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
@@ -107,45 +107,43 @@
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterColumn(Cellcell)
+MultipleColumnPrefixFilter.filterColumn(Cellcell)
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterColumn(Cellcell)
+ColumnPrefixFilter.filterColumn(Cellcell)
 
 
-abstract Filter.ReturnCode
-Filter.filterKeyValue(Cellv)
-A way to filter based on the column family, column 
qualifier and/or the column value.
-
+Filter.ReturnCode
+InclusiveStopFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterKeyValue(Cellcell)
+RandomRowFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-WhileMatchFilter.filterKeyValue(Cellv)
+DependentColumnFilter.filterKeyValue(Cellc)
 
 
 Filter.ReturnCode
-PrefixFilter.filterKeyValue(Cellv)
+FirstKeyOnlyFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterKeyValue(Cellv)
+KeyOnlyFilter.filterKeyValue(Cellignored)
 
 
 Filter.ReturnCode
-FirstKeyOnlyFilter.filterKeyValue(Cellv)
+SingleColumnValueFilter.filterKeyValue(Cellc)
 
 
 Filter.ReturnCode
-InclusiveStopFilter.filterKeyValue(Cellv)
+ColumnPaginationFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterKeyValue(Cellkv)
+PageFilter.filterKeyValue(Cellignored)
 
 
 Filter.ReturnCode
@@ -153,69 +151,71 @@
 
 
 Filter.ReturnCode
-ValueFilter.filterKeyValue(Cellv)
+QualifierFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-ColumnPaginationFilter.filterKeyValue(Cellv)
+FamilyFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterKeyValue(Cellignored)
+WhileMatchFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterKeyValue(Cellc)
+ValueFilter.filterKeyValue(Cellv)
 
 
-Filter.ReturnCode
-FuzzyRowFilter.filterKeyValue(Cellc)
+abstract Filter.ReturnCode
+Filter.filterKeyValue(Cellv)
+A way to filter based on the column family, column 
qualifier and/or the column value.
+
 
 
 Filter.ReturnCode
-SkipFilter.filterKeyValue(Cellv)
+MultipleColumnPrefixFilter.filterKeyValue(Cellkv)
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterKeyValue(Cellkv)
+TimestampsFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-TimestampsFilter.filterKeyValue(Cellv)
+FuzzyRowFilter.filterKeyValue(Cellc)
 
 
 Filter.ReturnCode
-SingleColumnValueFilter.filterKeyValue(Cellc)
+MultiRowRangeFilter.filterKeyValue(Cellignored)
 
 
 Filter.ReturnCode
-RowFilter.filterKeyValue(Cellv)
+SkipFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-RandomRowFilter.filterKeyValue(Cellv)
+ColumnPrefixFilter.filterKeyValue(Cellcell)
 
 
 Filter.ReturnCode
-QualifierFilter.filterKeyValue(Cellv)
+RowFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-FamilyFilter.filterKeyValue(Cellv)
+PrefixFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-MultiRowRangeFilter.filterKeyValue(Cellignored)
-
-
-Filter.ReturnCode
 FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cellv)
 Deprecated.
 
 
+
+Filter.ReturnCode
+ColumnRangeFilter.filterKeyValue(Cellkv)
+
 
 Filter.ReturnCode
-PageFilter.filterKeyValue(Cellignored)
+ColumnCountGetFilter.filterKeyValue(Cellv)
 
 
 static Filter.ReturnCode

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html 
b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
index 9d8a596..a41faf9 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
@@ -140,11 +140,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 Filter
-Scan.getFilter()
+Query.getFilter()
 
 
 Filter
-Query.getFilter()
+Scan.getFilter()
 
 
 
@@ -156,19 +156,19 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
+Query
+Query.setFilter(Filterfilter)
+Apply the specified server-side filter when performing the 
Query.
+
+
+
 Get
 Get.setFilter(Filterfilter)
 
-
+
 Scan
 Scan.setFilter(Filterfilter)
 
-
-Query
-Query.setFilter(Filterfilter)
-Apply the specified server-side filter when performing the 
Query.
-
-
 
 
 
@@ -390,59 +390,59 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Filter

[22/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
index 908b8b7..9e44e11 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
@@ -123,1019 +123,1128 @@
 115  private static final String 
ASSIGN_SEQ_IDS = "hbase.mapreduce.bulkload.assign.sequenceNumbers";
 116  public final static String 
CREATE_TABLE_CONF_KEY = "create.table";
 117  public final static String 
SILENCE_CONF_KEY = "ignore.unmatched.families";
-118
-119  // We use a '.' prefix which is ignored 
when walking directory trees
-120  // above. It is invalid family name.
-121  final static String TMP_DIR = ".tmp";
-122
-123  private int 
maxFilesPerRegionPerFamily;
-124  private boolean assignSeqIds;
-125  private SetString 
unmatchedFamilies = new HashSetString();
-126
-127  // Source filesystem
-128  private FileSystem fs;
-129  // Source delegation token
-130  private FsDelegationToken 
fsDelegationToken;
-131  private String bulkToken;
-132  private UserProvider userProvider;
-133  private int nrThreads;
-134  private RpcControllerFactory 
rpcControllerFactory;
-135
-136  private LoadIncrementalHFiles() {}
-137
-138  public 
LoadIncrementalHFiles(Configuration conf) throws Exception {
-139super(conf);
-140this.rpcControllerFactory = new 
RpcControllerFactory(conf);
-141initialize();
-142  }
-143
-144  private void initialize() throws 
Exception {
-145if (initalized) {
-146  return;
-147}
-148// make a copy, just to be sure we're 
not overriding someone else's config
-149
setConf(HBaseConfiguration.create(getConf()));
-150Configuration conf = getConf();
-151// disable blockcache for tool 
invocation, see HBASE-10500
-152
conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0);
-153this.userProvider = 
UserProvider.instantiate(conf);
-154this.fsDelegationToken = new 
FsDelegationToken(userProvider, "renewer");
-155assignSeqIds = 
conf.getBoolean(ASSIGN_SEQ_IDS, true);
-156maxFilesPerRegionPerFamily = 
conf.getInt(MAX_FILES_PER_REGION_PER_FAMILY, 32);
-157nrThreads = 
conf.getInt("hbase.loadincremental.threads.max",
-158  
Runtime.getRuntime().availableProcessors());
-159initalized = true;
-160  }
-161
-162  private void usage() {
-163System.err.println("usage: " + NAME + 
" /path/to/hfileoutputformat-output tablename" + "\n -D"
-164+ CREATE_TABLE_CONF_KEY + "=no - 
can be used to avoid creation of table by this tool\n"
-165+ "  Note: if you set this to 
'no', then the target table must already exist in HBase\n -D"
-166+ SILENCE_CONF_KEY + "=yes - can 
be used to ignore unmatched column families\n"
-167+ "\n");
-168  }
-169
-170  private interface 
BulkHFileVisitorTFamily {
-171TFamily bulkFamily(final byte[] 
familyName)
-172  throws IOException;
-173void bulkHFile(final TFamily family, 
final FileStatus hfileStatus)
-174  throws IOException;
-175  }
-176
-177  /**
-178   * Iterate over the bulkDir hfiles.
-179   * Skip reference, HFileLink, files 
starting with "_" and non-valid hfiles.
-180   */
-181  private static TFamily void 
visitBulkHFiles(final FileSystem fs, final Path bulkDir,
-182final BulkHFileVisitorTFamily 
visitor) throws IOException {
-183visitBulkHFiles(fs, bulkDir, visitor, 
true);
-184  }
-185
-186  /**
-187   * Iterate over the bulkDir hfiles.
-188   * Skip reference, HFileLink, files 
starting with "_".
-189   * Check and skip non-valid hfiles by 
default, or skip this validation by setting
-190   * 
'hbase.loadincremental.validate.hfile' to false.
-191   */
-192  private static TFamily void 
visitBulkHFiles(final FileSystem fs, final Path bulkDir,
-193final BulkHFileVisitorTFamily 
visitor, final boolean validateHFile) throws IOException {
-194if (!fs.exists(bulkDir)) {
-195  throw new 
FileNotFoundException("Bulkload dir " + bulkDir + " not found");
-196}
-197
-198FileStatus[] familyDirStatuses = 
fs.listStatus(bulkDir);
-199if (familyDirStatuses == null) {
-200  throw new FileNotFoundException("No 
families found in " + bulkDir);
-201}
-202
-203for (FileStatus familyStat : 
familyDirStatuses) {
-204  if (!familyStat.isDirectory()) {
-205LOG.warn("Skipping non-directory 
" + familyStat.getPath());
-206continue;
-207  }
-208  Path familyDir = 
familyStat.getPath();
-209  byte[] familyName = 
familyDir.getName().getBytes();
-210  // Skip invalid family
-211  try {
-212
HColumnDescriptor.isLegalFamilyName(familyName);
-213  }
-214  catch 

[48/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apache_hbase_reference_guide.pdfmarks
--
diff --git a/apache_hbase_reference_guide.pdfmarks 
b/apache_hbase_reference_guide.pdfmarks
index 583aacc..5ef666b 100644
--- a/apache_hbase_reference_guide.pdfmarks
+++ b/apache_hbase_reference_guide.pdfmarks
@@ -2,8 +2,8 @@
   /Author (Apache HBase Team)
   /Subject ()
   /Keywords ()
-  /ModDate (D:20160916144607)
-  /CreationDate (D:20160916144607)
+  /ModDate (D:20160929151030)
+  /CreationDate (D:20160929151030)
   /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
   /Producer ()
   /DOCINFO pdfmark

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/allclasses-frame.html
--
diff --git a/apidocs/allclasses-frame.html b/apidocs/allclasses-frame.html
index 14631ff..8c9dd06 100644
--- a/apidocs/allclasses-frame.html
+++ b/apidocs/allclasses-frame.html
@@ -278,6 +278,7 @@
 RpcRetryingCaller
 RSGroupInfo
 Scan
+ScannerResetException
 ScannerTimeoutException
 ScheduledChore
 SecurityCapability

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/allclasses-noframe.html
--
diff --git a/apidocs/allclasses-noframe.html b/apidocs/allclasses-noframe.html
index 0597dcd..b40ba1f 100644
--- a/apidocs/allclasses-noframe.html
+++ b/apidocs/allclasses-noframe.html
@@ -278,6 +278,7 @@
 RpcRetryingCaller
 RSGroupInfo
 Scan
+ScannerResetException
 ScannerTimeoutException
 ScheduledChore
 SecurityCapability



[28/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
index d607296..e22025b 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html
@@ -57,616 +57,615 @@
 049import 
org.apache.hadoop.hbase.TableName;
 050import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 051import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-052import 
org.apache.hadoop.hbase.client.AsyncProcess.AsyncRequestFuture;
-053import 
org.apache.hadoop.hbase.ipc.RpcControllerFactory;
-054import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-055
-056/**
-057 * HTableMultiplexer provides a 
thread-safe non blocking PUT API across all the tables.
-058 * Each put will be sharded into 
different buffer queues based on its destination region server.
-059 * So each region server buffer queue 
will only have the puts which share the same destination.
-060 * And each queue will have a flush 
worker thread to flush the puts request to the region server.
-061 * If any queue is full, the 
HTableMultiplexer starts to drop the Put requests for that
-062 * particular queue.
-063 *
-064 * Also all the puts will be retried as a 
configuration number before dropping.
-065 * And the HTableMultiplexer can report 
the number of buffered requests and the number of the
-066 * failed (dropped) requests in total or 
on per region server basis.
-067 *
-068 * This class is thread safe.
-069 */
-070@InterfaceAudience.Public
-071@InterfaceStability.Evolving
-072public class HTableMultiplexer {
-073  private static final Log LOG = 
LogFactory.getLog(HTableMultiplexer.class.getName());
-074
-075  public static final String 
TABLE_MULTIPLEXER_FLUSH_PERIOD_MS =
-076  
"hbase.tablemultiplexer.flush.period.ms";
-077  public static final String 
TABLE_MULTIPLEXER_INIT_THREADS = "hbase.tablemultiplexer.init.threads";
-078  public static final String 
TABLE_MULTIPLEXER_MAX_RETRIES_IN_QUEUE =
-079  
"hbase.client.max.retries.in.queue";
-080
-081  /** The map between each region server 
to its flush worker */
-082  private final MapHRegionLocation, 
FlushWorker serverToFlushWorkerMap =
-083  new ConcurrentHashMap();
-084
-085  private final Configuration 
workerConf;
-086  private final ClusterConnection conn;
-087  private final ExecutorService pool;
-088  private final int maxAttempts;
-089  private final int 
perRegionServerBufferQueueSize;
-090  private final int maxKeyValueSize;
-091  private final ScheduledExecutorService 
executor;
-092  private final long flushPeriod;
-093
-094  /**
-095   * @param conf The HBaseConfiguration
-096   * @param 
perRegionServerBufferQueueSize determines the max number of the buffered Put 
ops for
-097   *  each region server before 
dropping the request.
-098   */
-099  public HTableMultiplexer(Configuration 
conf, int perRegionServerBufferQueueSize)
-100  throws IOException {
-101
this(ConnectionFactory.createConnection(conf), conf, 
perRegionServerBufferQueueSize);
-102  }
-103
-104  /**
-105   * @param conn The HBase connection.
-106   * @param conf The HBase 
configuration
-107   * @param 
perRegionServerBufferQueueSize determines the max number of the buffered Put 
ops for
-108   *  each region server before 
dropping the request.
-109   */
-110  public HTableMultiplexer(Connection 
conn, Configuration conf,
-111  int perRegionServerBufferQueueSize) 
{
-112this.conn = (ClusterConnection) 
conn;
-113this.pool = 
HTable.getDefaultExecutor(conf);
-114// how many times we could try in 
total, one more than retry number
-115this.maxAttempts = 
conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,
-116
HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER) + 1;
-117this.perRegionServerBufferQueueSize = 
perRegionServerBufferQueueSize;
-118this.maxKeyValueSize = 
HTable.getMaxKeyValueSize(conf);
-119this.flushPeriod = 
conf.getLong(TABLE_MULTIPLEXER_FLUSH_PERIOD_MS, 100);
-120int initThreads = 
conf.getInt(TABLE_MULTIPLEXER_INIT_THREADS, 10);
-121this.executor =
-122
Executors.newScheduledThreadPool(initThreads,
-123  new 
ThreadFactoryBuilder().setDaemon(true).setNameFormat("HTableFlushWorker-%d").build());
-124
-125this.workerConf = 
HBaseConfiguration.create(conf);
-126// We do not do the retry because we 
need to reassign puts to different queues if regions are
-127// moved.
-128
this.workerConf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 0);
-129  }
-130
-131  /**

[45/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/CellUtil.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/CellUtil.html 
b/apidocs/org/apache/hadoop/hbase/CellUtil.html
index 435e068..e2a2306 100644
--- a/apidocs/org/apache/hadoop/hbase/CellUtil.html
+++ b/apidocs/org/apache/hadoop/hbase/CellUtil.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9,"i19":9,"i20":9,"i21":9,"i22":9,"i23":9,"i24":9,"i25":9,"i26":9,"i27":9,"i28":9,"i29":9,"i30":9,"i31":9,"i32":9,"i33":9,"i34":9,"i35":9,"i36":9,"i37":9,"i38":9,"i39":9,"i40":9,"i41":9,"i42":9,"i43":9,"i44":9,"i45":9,"i46":41,"i47":9,"i48":9,"i49":9,"i50":9,"i51":9,"i52":9,"i53":9,"i54":9,"i55":9,"i56":9,"i57":9,"i58":9,"i59":9,"i60":9,"i61":9,"i62":9,"i63":9,"i64":9,"i65":9,"i66":9,"i67":9,"i68":9,"i69":9,"i70":9,"i71":9,"i72":9,"i73":9,"i74":41,"i75":9,"i76":9,"i77":9,"i78":9,"i79":9,"i80":9,"i81":9,"i82":9,"i83":9,"i84":9,"i85":41,"i86":9,"i87":9,"i88":9,"i89":9,"i90":9,"i91":9,"i92":9,"i93":9,"i94":9,"i95":9,"i96":9,"i97":9,"i98":9};
+var methods = 
{"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9,"i19":9,"i20":9,"i21":9,"i22":9,"i23":9,"i24":9,"i25":9,"i26":9,"i27":9,"i28":9,"i29":9,"i30":9,"i31":9,"i32":9,"i33":9,"i34":9,"i35":9,"i36":9,"i37":9,"i38":9,"i39":9,"i40":9,"i41":9,"i42":9,"i43":9,"i44":9,"i45":9,"i46":9,"i47":9,"i48":9,"i49":9,"i50":41,"i51":9,"i52":9,"i53":9,"i54":9,"i55":9,"i56":9,"i57":9,"i58":9,"i59":9,"i60":9,"i61":9,"i62":9,"i63":9,"i64":9,"i65":9,"i66":9,"i67":9,"i68":9,"i69":9,"i70":9,"i71":9,"i72":9,"i73":9,"i74":9,"i75":9,"i76":9,"i77":9,"i78":41,"i79":9,"i80":9,"i81":9,"i82":9,"i83":9,"i84":9,"i85":9,"i86":9,"i87":9,"i88":9,"i89":41,"i90":9,"i91":9,"i92":9,"i93":9,"i94":9,"i95":9,"i96":9,"i97":9,"i98":9,"i99":9,"i100":9,"i101":9,"i102":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -111,7 +111,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Public
  @InterfaceStability.Evolving
-public final class CellUtil
+public final class CellUtil
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Utility methods helpful slinging Cell instances.
  Some methods below are for internal use only and are marked 
InterfaceAudience.Private at the
@@ -295,23 +295,45 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 static Cell
+createFirstOnRow(byte[]row)
+
+
+static Cell
+createFirstOnRow(byte[]row,
+byte[]family,
+byte[]col)
+
+
+static Cell
 createFirstOnRow(byte[]row,
 introffset,
 shortrlength)
 
-
+
+static Cell
+createFirstOnRow(byte[]row,
+introffset,
+shortrlength,
+byte[]family,
+intfoffset,
+byteflength,
+byte[]col,
+intcoffset,
+intclength)
+
+
 static Cell
 createFirstOnRow(Cellcell)
 Create a Cell that is smaller than all other possible Cells 
for the given Cell's row.
 
 
-
+
 static Cell
 createFirstOnRowCol(Cellcell)
 Create a Cell that is smaller than all other possible Cells 
for the given Cell's row.
 
 
-
+
 static Cell
 createFirstOnRowCol(Cellcell,
byte[]qArray,
@@ -321,85 +343,89 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
  passed qualifier.
 
 
-
+
 static Cell
 createFirstOnRowColTS(Cellcell,
  longts)
 Creates the first cell with the row/family/qualifier of 
this cell and the given timestamp.
 
 
-
+
+static Cell
+createLastOnRow(byte[]row)
+
+
 static Cell
 createLastOnRow(Cellcell)
 Create a Cell that is larger than all other possible Cells 
for the given Cell's row.
 
 
-
+
 static Cell
 createLastOnRowCol(Cellcell)
 Create a Cell that is larger than all other possible Cells 
for the given Cell's rk:cf:q.
 
 
-
+
 static boolean
 equals(Cella,
   Cellb)
 equals
 
 
-
+
 static boolean
 equalsIgnoreMvccVersion(Cella,
Cellb)
 special case for Cell.equals
 
 
-
+
 static long
 estimatedHeapSizeOf(Cellcell)
 This is an estimate of the heap space occupied by a 
cell.
 
 
-
+
 static int
 estimatedSerializedSizeOf(Cellcell)
 Estimate based on keyvalue's serialization format.
 
 
-
+
 static int
 estimatedSerializedSizeOfKey(Cellcell)
 Calculates the serialized key size.
 
 
-
+
 static ByteRange
 fillFamilyRange(Cellcell,
ByteRangerange)
 
-
+
 static ByteRange
 

[16/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/util/Counter.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/Counter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/util/Counter.html
index 01ae315..3cb1f0b 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/util/Counter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/util/Counter.html
@@ -34,188 +34,190 @@
 026
 027/**
 028 * High scalable counter. Thread safe.
-029 */
-030@InterfaceAudience.Public
-031@InterfaceStability.Evolving
-032public class Counter {
-033  private static final int 
MAX_CELLS_LENGTH = 1  20;
-034
-035  private static class Cell {
-036// Pads are added around the value to 
avoid cache-line contention with
-037// another cell's value. The 
cache-line size is expected to be equal to or
-038// less than about 128 Bytes (= 64 
Bits * 16).
-039
-040@SuppressWarnings("unused")
-041volatile long p0, p1, p2, p3, p4, p5, 
p6;
-042volatile long value;
-043@SuppressWarnings("unused")
-044volatile long q0, q1, q2, q3, q4, q5, 
q6;
-045
-046static final 
AtomicLongFieldUpdaterCell valueUpdater =
-047
AtomicLongFieldUpdater.newUpdater(Cell.class, "value");
-048
-049Cell() {}
+029 * @deprecated use {@link 
java.util.concurrent.atomic.LongAdder} instead.
+030 */
+031@InterfaceAudience.Public
+032@InterfaceStability.Evolving
+033@Deprecated
+034public class Counter {
+035  private static final int 
MAX_CELLS_LENGTH = 1  20;
+036
+037  private static class Cell {
+038// Pads are added around the value to 
avoid cache-line contention with
+039// another cell's value. The 
cache-line size is expected to be equal to or
+040// less than about 128 Bytes (= 64 
Bits * 16).
+041
+042@SuppressWarnings("unused")
+043volatile long p0, p1, p2, p3, p4, p5, 
p6;
+044volatile long value;
+045@SuppressWarnings("unused")
+046volatile long q0, q1, q2, q3, q4, q5, 
q6;
+047
+048static final 
AtomicLongFieldUpdaterCell valueUpdater =
+049
AtomicLongFieldUpdater.newUpdater(Cell.class, "value");
 050
-051Cell(long initValue) {
-052  value = initValue;
-053}
-054
-055long get() {
-056  return value;
-057}
-058
-059boolean add(long delta) {
-060  long current = value;
-061  return 
valueUpdater.compareAndSet(this, current, current + delta);
-062}
-063  }
-064
-065  private static class Container {
-066/** The length should be a power of 
2. */
-067final Cell[] cells;
-068
-069/** True if a new extended container 
is going to replace this. */
-070final AtomicBoolean demoted = new 
AtomicBoolean();
-071
-072Container(Cell cell) {
-073  this(new Cell[] { cell });
-074}
-075
-076/**
-077 * @param cells the length should be 
a power of 2
-078 */
-079Container(Cell[] cells) {
-080  this.cells = cells;
-081}
-082  }
-083
-084  private final 
AtomicReferenceContainer containerRef;
+051Cell() {}
+052
+053Cell(long initValue) {
+054  value = initValue;
+055}
+056
+057long get() {
+058  return value;
+059}
+060
+061boolean add(long delta) {
+062  long current = value;
+063  return 
valueUpdater.compareAndSet(this, current, current + delta);
+064}
+065  }
+066
+067  private static class Container {
+068/** The length should be a power of 
2. */
+069final Cell[] cells;
+070
+071/** True if a new extended container 
is going to replace this. */
+072final AtomicBoolean demoted = new 
AtomicBoolean();
+073
+074Container(Cell cell) {
+075  this(new Cell[] { cell });
+076}
+077
+078/**
+079 * @param cells the length should be 
a power of 2
+080 */
+081Container(Cell[] cells) {
+082  this.cells = cells;
+083}
+084  }
 085
-086  public Counter() {
-087this(new Cell());
-088  }
-089
-090  public Counter(long initValue) {
-091this(new Cell(initValue));
-092  }
-093
-094  private Counter(Cell initCell) {
-095containerRef = new 
AtomicReferenceContainer(new Container(initCell));
-096  }
-097
-098  private static int hash() {
-099// The logic is borrowed from 
high-scale-lib's ConcurrentAutoTable.
-100
-101int h = 
System.identityHashCode(Thread.currentThread());
-102// You would think that 
System.identityHashCode on the current thread
-103// would be a good hash fcn, but 
actually on SunOS 5.8 it is pretty lousy
-104// in the low bits.
-105
-106h ^= (h  20) ^ (h 
 12); // Bit spreader, borrowed from Doug Lea
-107h ^= (h   7) ^ (h 
  4);
-108return h;
-109  }
-110
-111  private static class IndexHolder {
-112int index = hash();
-113  }
-114
-115  private final 
ThreadLocalIndexHolder indexHolderThreadLocal =
-116  new 
ThreadLocalIndexHolder() {
-117@Override
-118protected IndexHolder initialValue() 
{
-119  

[36/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/jetty/package-summary.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/jetty/package-summary.html 
b/apidocs/org/apache/hadoop/hbase/jetty/package-summary.html
new file mode 100644
index 000..0fbedc5
--- /dev/null
+++ b/apidocs/org/apache/hadoop/hbase/jetty/package-summary.html
@@ -0,0 +1,124 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+org.apache.hadoop.hbase.jetty (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevPackage
+NextPackage
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+
+
+
+Packageorg.apache.hadoop.hbase.jetty
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevPackage
+NextPackage
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+
+
+Copyright  20072016 http://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/jetty/package-tree.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/jetty/package-tree.html 
b/apidocs/org/apache/hadoop/hbase/jetty/package-tree.html
new file mode 100644
index 000..d324d1a
--- /dev/null
+++ b/apidocs/org/apache/hadoop/hbase/jetty/package-tree.html
@@ -0,0 +1,128 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+org.apache.hadoop.hbase.jetty Class Hierarchy (Apache HBase 
2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+
+
+
+Hierarchy For Package org.apache.hadoop.hbase.jetty
+Package Hierarchies:
+
+All Packages
+
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+
+
+Copyright  20072016 http://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/jetty/package-use.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/jetty/package-use.html 
b/apidocs/org/apache/hadoop/hbase/jetty/package-use.html
new file mode 100644
index 000..5be6266
--- /dev/null
+++ b/apidocs/org/apache/hadoop/hbase/jetty/package-use.html
@@ -0,0 +1,125 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+Uses of Package org.apache.hadoop.hbase.jetty (Apache HBase 
2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+
+
+
+Uses 

[43/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/HConstants.html 
b/apidocs/org/apache/hadoop/hbase/HConstants.html
index e29802c..0454504 100644
--- a/apidocs/org/apache/hadoop/hbase/HConstants.html
+++ b/apidocs/org/apache/hadoop/hbase/HConstants.html
@@ -163,24 +163,30 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+BULKLOAD_STAGING_DIR_NAME
+Staging dir used by bulk load
+
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 BYTES_PER_CHECKSUM
 The name of the configuration parameter that specifies
  the number of bytes in a newly created checksum chunk.
 
 
-
+
 static byte[]
 CATALOG_FAMILY
 The catalog family
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CATALOG_FAMILY_STR
 The catalog family as a string
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CHECKSUM_TYPE_NAME
 The name of the configuration parameter that specifies
@@ -188,668 +194,668 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
  for newly created blocks.
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CIPHER_AES
 Default cipher for encryption
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLIENT_PORT_STR
 The ZK client port key in the ZK properties map.
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLUSTER_DISTRIBUTED
 Cluster is in distributed mode or not
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLUSTER_ID_DEFAULT
 Default value for cluster ID
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLUSTER_ID_FILE_NAME
 name of the file for unique cluster ID
 
 
-
+
 static boolean
 CLUSTER_IS_DISTRIBUTED
 Cluster is fully-distributed
 
 
-
+
 static boolean
 CLUSTER_IS_LOCAL
 Cluster is standalone or pseudo-distributed
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 COMPACTION_KV_MAX
 Parameter name for the maximum batch of KVs to be used in 
flushes and compactions
 
 
-
+
 static int
 COMPACTION_KV_MAX_DEFAULT
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CONFIGURATION
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CORRUPT_DIR_NAME
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_INCLUSION_KEY
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern
 CP_HTD_ATTR_KEY_PATTERN
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern
 CP_HTD_ATTR_VALUE_PARAM_PATTERN
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern
 CP_HTD_ATTR_VALUE_PATTERN
 
  Pattern that matches a coprocessor specification.
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_ALTERNATE_KEY_ALGORITHM_CONF_KEY
 Configuration key for the name of the alternate cipher 
algorithm for the cluster, a string
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_CIPHERPROVIDER_CONF_KEY
 

[38/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html 
b/apidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
index 0b4060d..d3618d5 100644
--- a/apidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
+++ b/apidocs/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.html
@@ -522,7 +522,7 @@ publicvoid
 parseTableCFsFromConfig
 http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
-public statichttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringparseTableCFsFromConfig(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtableCFsConfig)
+public statichttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringparseTableCFsFromConfig(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtableCFsConfig)
 Deprecated.as release of 2.0.0, and it will be removed in 
3.0.0
 
 
@@ -532,7 +532,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/M
 
 
 updatePeerConfig
-publicvoidupdatePeerConfig(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid,
+publicvoidupdatePeerConfig(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid,
  ReplicationPeerConfigpeerConfig)
   throws ReplicationException
 
@@ -547,7 +547,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/M
 
 
 removePeer
-publicvoidremovePeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid)
+publicvoidremovePeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid)
 throws ReplicationException
 Removes a peer cluster and stops the replication to 
it.
 
@@ -564,7 +564,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/M
 
 
 enablePeer
-publicvoidenablePeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid)
+publicvoidenablePeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid)
 throws ReplicationException
 Restart the replication stream to the specified peer.
 
@@ -581,7 +581,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/M
 
 
 disablePeer
-publicvoiddisablePeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid)
+publicvoiddisablePeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid)
  throws ReplicationException
 Stop the replication stream to the specified peer.
 
@@ -598,7 +598,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/M
 
 
 getPeersCount
-publicintgetPeersCount()
+publicintgetPeersCount()
 Get the number of slave clusters the local cluster 
has.
 
 Returns:
@@ -612,7 +612,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/M
 
 
 listPeerConfigs
-publichttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,ReplicationPeerConfiglistPeerConfigs()
+publichttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,ReplicationPeerConfiglistPeerConfigs()
 
 
 
@@ -621,7 +621,7 @@ 

[40/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html 
b/apidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
index dbc9edd..d0a7402 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
@@ -348,6 +348,13 @@
 
 
 class
+ScannerResetException
+Thrown when the server side has received an Exception, and 
asks the Client to reset the scanner
+ state by closing the current region scanner, and reopening from the start of 
last seen row.
+
+
+
+class
 UnknownProtocolException
 An error requesting an RPC protocol that the server is not 
serving.
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html 
b/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html
index 53abc29..fe99dcf 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html
@@ -359,13 +359,13 @@
 
 
 long
-ExponentialClientBackoffPolicy.getBackoffTime(ServerNameserverName,
+ClientBackoffPolicy.getBackoffTime(ServerNameserverName,
   byte[]region,
   
org.apache.hadoop.hbase.client.backoff.ServerStatisticsstats)
 
 
 long
-ClientBackoffPolicy.getBackoffTime(ServerNameserverName,
+ExponentialClientBackoffPolicy.getBackoffTime(ServerNameserverName,
   byte[]region,
   
org.apache.hadoop.hbase.client.backoff.ServerStatisticsstats)
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
index addd970..c9878ce 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -399,14 +399,14 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 TableName
-Table.getName()
-Gets the fully qualified table name instance of this 
table.
+BufferedMutator.getName()
+Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
 
 
 
 TableName
-BufferedMutator.getName()
-Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
+Table.getName()
+Gets the fully qualified table name instance of this 
table.
 
 
 
@@ -1055,11 +1055,18 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 
+int
+LoadIncrementalHFiles.run(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringdirPath,
+   http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.fs.Pathmap,
+   TableNametableName)
+
+
 protected http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItem
-LoadIncrementalHFiles.tryAtomicRegionLoad(Connectionconn,
+LoadIncrementalHFiles.tryAtomicRegionLoad(Connectionconn,
TableNametableName,
byte[]first,
-   http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in 
java.util">Collectionorg.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItemlqis)
+   http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in 
java.util">Collectionorg.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItemlqis,
+   booleancopyFile)
 Attempts to do an atomic load of many hfiles into a 
region.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html 
b/apidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
index 1bbaf83..fecd7b1 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
@@ -133,6 

[30/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/CellUtil.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/CellUtil.html 
b/apidocs/src-html/org/apache/hadoop/hbase/CellUtil.html
index 7234f1e..a78e2b5 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/CellUtil.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/CellUtil.html
@@ -31,362 +31,362 @@
 023
 024import java.io.DataOutputStream;
 025import java.io.IOException;
-026import java.math.BigDecimal;
-027import java.nio.ByteBuffer;
-028import java.util.ArrayList;
-029import java.util.Arrays;
-030import java.util.Iterator;
-031import java.util.List;
-032import java.util.Map.Entry;
-033import java.util.NavigableMap;
-034
-035import 
org.apache.hadoop.hbase.KeyValue.Type;
-036import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-037import 
org.apache.hadoop.hbase.classification.InterfaceAudience.Private;
-038import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-039import 
org.apache.hadoop.hbase.io.HeapSize;
-040import 
org.apache.hadoop.hbase.io.TagCompressionContext;
-041import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-042import 
org.apache.hadoop.hbase.util.ByteRange;
-043import 
org.apache.hadoop.hbase.util.Bytes;
-044import 
org.apache.hadoop.hbase.util.ClassSize;
-045
-046/**
-047 * Utility methods helpful slinging 
{@link Cell} instances.
-048 * Some methods below are for internal 
use only and are marked InterfaceAudience.Private at the
-049 * method level.
-050 */
-051@InterfaceAudience.Public
-052@InterfaceStability.Evolving
-053public final class CellUtil {
-054
-055  /**
-056   * Private constructor to keep this 
class from being instantiated.
-057   */
-058  private CellUtil(){}
-059
-060  /*** ByteRange 
***/
-061
-062  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
-063return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
-064  }
-065
-066  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
-067return 
range.set(cell.getFamilyArray(), cell.getFamilyOffset(), 
cell.getFamilyLength());
-068  }
-069
-070  public static ByteRange 
fillQualifierRange(Cell cell, ByteRange range) {
-071return 
range.set(cell.getQualifierArray(), cell.getQualifierOffset(),
-072  cell.getQualifierLength());
-073  }
-074
-075  public static ByteRange 
fillValueRange(Cell cell, ByteRange range) {
-076return 
range.set(cell.getValueArray(), cell.getValueOffset(), 
cell.getValueLength());
-077  }
-078
-079  public static ByteRange 
fillTagRange(Cell cell, ByteRange range) {
-080return range.set(cell.getTagsArray(), 
cell.getTagsOffset(), cell.getTagsLength());
-081  }
-082
-083  /* get individual 
arrays for tests /
-084
-085  public static byte[] cloneRow(Cell 
cell){
-086byte[] output = new 
byte[cell.getRowLength()];
-087copyRowTo(cell, output, 0);
-088return output;
-089  }
-090
-091  public static byte[] cloneFamily(Cell 
cell){
-092byte[] output = new 
byte[cell.getFamilyLength()];
-093copyFamilyTo(cell, output, 0);
-094return output;
-095  }
-096
-097  public static byte[] 
cloneQualifier(Cell cell){
-098byte[] output = new 
byte[cell.getQualifierLength()];
-099copyQualifierTo(cell, output, 0);
-100return output;
-101  }
-102
-103  public static byte[] cloneValue(Cell 
cell){
-104byte[] output = new 
byte[cell.getValueLength()];
-105copyValueTo(cell, output, 0);
-106return output;
-107  }
-108
-109  public static byte[] cloneTags(Cell 
cell) {
-110byte[] output = new 
byte[cell.getTagsLength()];
-111copyTagTo(cell, output, 0);
-112return output;
-113  }
-114
-115  /**
-116   * Returns tag value in a new byte 
array. If server-side, use
-117   * {@link Tag#getValueArray()} with 
appropriate {@link Tag#getValueOffset()} and
-118   * {@link Tag#getValueLength()} instead 
to save on allocations.
-119   * @param cell
-120   * @return tag value in a new byte 
array.
-121   */
-122  public static byte[] getTagArray(Cell 
cell){
-123byte[] output = new 
byte[cell.getTagsLength()];
-124copyTagTo(cell, output, 0);
-125return output;
-126  }
-127
+026import java.io.OutputStream;
+027import java.math.BigDecimal;
+028import java.nio.ByteBuffer;
+029import java.util.ArrayList;
+030import java.util.Arrays;
+031import java.util.Iterator;
+032import java.util.List;
+033import java.util.Map.Entry;
+034import java.util.NavigableMap;
+035
+036import 
org.apache.hadoop.hbase.KeyValue.Type;
+037import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+038import 
org.apache.hadoop.hbase.classification.InterfaceAudience.Private;
+039import 
org.apache.hadoop.hbase.classification.InterfaceStability;
+040import 
org.apache.hadoop.hbase.io.HeapSize;
+041import 

[52/52] hbase-site git commit: Empty commit

2016-09-29 Thread dimaspivak
Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/a16440ac
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/a16440ac
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/a16440ac

Branch: refs/heads/asf-site
Commit: a16440acb475281df7f185c6cf4c920c88b05963
Parents: 044b337
Author: Dima Spivak 
Authored: Thu Sep 29 16:08:37 2016 -0400
Committer: Dima Spivak 
Committed: Thu Sep 29 16:08:37 2016 -0400

--

--




[49/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apache_hbase_reference_guide.pdf
--
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index bd7f8a5..068b048 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,24 +5,24 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
 /Producer (Apache HBase Team)
-/CreationDate (D:20160916144419+00'00')
-/ModDate (D:20160916144419+00'00')
+/CreationDate (D:20160929150855+00'00')
+/ModDate (D:20160929150855+00'00')
 >>
 endobj
 2 0 obj
 << /Type /Catalog
 /Pages 3 0 R
 /Names 25 0 R
-/Outlines 4013 0 R
-/PageLabels 4217 0 R
+/Outlines 4009 0 R
+/PageLabels 4213 0 R
 /PageMode /UseOutlines
 /ViewerPreferences [/FitWindow]
 >>
 endobj
 3 0 obj
 << /Type /Pages
-/Count 668
-/Kids [7 0 R 13 0 R 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 39 0 R 43 0 R 47 0 R 58 
0 R 62 0 R 64 0 R 66 0 R 68 0 R 75 0 R 78 0 R 80 0 R 85 0 R 88 0 R 90 0 R 92 0 
R 101 0 R 107 0 R 112 0 R 114 0 R 135 0 R 141 0 R 148 0 R 150 0 R 154 0 R 157 0 
R 167 0 R 174 0 R 190 0 R 194 0 R 198 0 R 200 0 R 204 0 R 210 0 R 212 0 R 214 0 
R 216 0 R 218 0 R 221 0 R 227 0 R 229 0 R 232 0 R 234 0 R 236 0 R 238 0 R 240 0 
R 242 0 R 246 0 R 249 0 R 252 0 R 254 0 R 256 0 R 258 0 R 260 0 R 262 0 R 264 0 
R 270 0 R 273 0 R 275 0 R 277 0 R 279 0 R 284 0 R 288 0 R 291 0 R 296 0 R 299 0 
R 303 0 R 318 0 R 328 0 R 334 0 R 345 0 R 355 0 R 360 0 R 362 0 R 364 0 R 375 0 
R 380 0 R 384 0 R 389 0 R 393 0 R 404 0 R 416 0 R 431 0 R 437 0 R 439 0 R 441 0 
R 448 0 R 459 0 R 470 0 R 481 0 R 484 0 R 487 0 R 491 0 R 495 0 R 498 0 R 501 0 
R 503 0 R 506 0 R 510 0 R 512 0 R 516 0 R 520 0 R 526 0 R 530 0 R 532 0 R 538 0 
R 540 0 R 544 0 R 552 0 R 554 0 R 557 0 R 560 0 R 563 0 R 566 0 R 581 0 R 588 0 
R 595 0 R 606 0 R 613 0 R 622 0 R 630 0
  R 633 0 R 637 0 R 640 0 R 652 0 R 660 0 R 666 0 R 671 0 R 675 0 R 677 0 R 691 
0 R 703 0 R 709 0 R 715 0 R 718 0 R 726 0 R 734 0 R 739 0 R 744 0 R 750 0 R 752 
0 R 754 0 R 756 0 R 764 0 R 773 0 R 777 0 R 784 0 R 792 0 R 798 0 R 802 0 R 809 
0 R 813 0 R 818 0 R 826 0 R 828 0 R 832 0 R 843 0 R 848 0 R 850 0 R 853 0 R 857 
0 R 863 0 R 866 0 R 878 0 R 882 0 R 887 0 R 895 0 R 900 0 R 904 0 R 908 0 R 910 
0 R 913 0 R 915 0 R 919 0 R 921 0 R 924 0 R 929 0 R 933 0 R 938 0 R 942 0 R 945 
0 R 947 0 R 954 0 R 958 0 R 963 0 R 976 0 R 980 0 R 984 0 R 989 0 R 991 0 R 
1000 0 R 1003 0 R 1008 0 R 1011 0 R 1020 0 R 1023 0 R 1029 0 R 1036 0 R 1039 0 
R 1041 0 R 1050 0 R 1052 0 R 1054 0 R 1057 0 R 1059 0 R 1061 0 R 1063 0 R 1065 
0 R 1067 0 R 1070 0 R 1073 0 R 1078 0 R 1081 0 R 1083 0 R 1085 0 R 1087 0 R 
1092 0 R 1101 0 R 1104 0 R 1106 0 R 1108 0 R 1113 0 R 1115 0 R 1118 0 R 1120 0 
R 1122 0 R 1124 0 R 1127 0 R 1132 0 R 1138 0 R 1145 0 R 1150 0 R 1164 0 R 1175 
0 R 1179 0 R 1192 0 R 1201 0 R 1217 0 R 1221 0 R 1
 231 0 R 1244 0 R 1247 0 R 1259 0 R 1268 0 R 1276 0 R 1280 0 R 1289 0 R 1294 0 
R 1298 0 R 1304 0 R 1310 0 R 1317 0 R 1325 0 R 1327 0 R 1339 0 R 1341 0 R 1346 
0 R 1350 0 R 1355 0 R 1365 0 R 1371 0 R 1377 0 R 1379 0 R 1381 0 R 1393 0 R 
1400 0 R 1409 0 R 1415 0 R 1429 0 R 1437 0 R 1441 0 R 1450 0 R 1458 0 R 1466 0 
R 1472 0 R 1476 0 R 1479 0 R 1481 0 R 1490 0 R 1493 0 R 1500 0 R 1504 0 R 1507 
0 R 1515 0 R 1519 0 R 1522 0 R 1524 0 R 1533 0 R 1540 0 R 1546 0 R 1551 0 R 
1555 0 R 1558 0 R 1564 0 R 1569 0 R 1574 0 R 1576 0 R 1578 0 R 1581 0 R 1583 0 
R 1592 0 R 1595 0 R 1601 0 R 1608 0 R 1612 0 R 1618 0 R 1621 0 R 1623 0 R 1628 
0 R 1631 0 R 1633 0 R 1635 0 R 1637 0 R 1644 0 R 1654 0 R 1656 0 R 1658 0 R 
1660 0 R 1662 0 R 1666 0 R 1668 0 R 1670 0 R 1672 0 R 1675 0 R 1677 0 R 1679 0 
R 1681 0 R 1685 0 R 1689 0 R 1698 0 R 1700 0 R 1702 0 R 1704 0 R 1706 0 R 1712 
0 R 1714 0 R 1719 0 R 1721 0 R 1723 0 R 1730 0 R 1735 0 R 1739 0 R 1743 0 R 
1746 0 R 1749 0 R 1754 0 R 1756 0 R 1759 0 R 1761 0 R 1763 0 R
  1765 0 R 1769 0 R 1771 0 R 1775 0 R 1777 0 R 1779 0 R 1781 0 R 1783 0 R 1790 
0 R 1793 0 R 1798 0 R 1800 0 R 1802 0 R 1804 0 R 1806 0 R 1814 0 R 1824 0 R 
1827 0 R 1843 0 R 1858 0 R 1862 0 R 1867 0 R 1871 0 R 1874 0 R 1879 0 R 1881 0 
R 1888 0 R 1890 0 R 1893 0 R 1895 0 R 1897 0 R 1899 0 R 1901 0 R 1905 0 R 1907 
0 R 1915 0 R 1923 0 R 1929 0 R 1940 0 R 1954 0 R 1966 0 R 1985 0 R 1987 0 R 
1989 0 R 1993 0 R 2010 0 R 2018 0 R 2025 0 R 2034 0 R 2038 0 R 2048 0 R 2059 0 
R 2065 0 R 2074 0 R 2087 0 R 2104 0 R 2114 0 R 2117 0 R 2126 0 R 2141 0 R 2148 
0 R 2151 0 R 2156 0 R 2161 0 R 2171 0 R 2179 0 R 2182 0 R 2184 0 R 2188 0 R 
2201 0 R 2209 0 R 2215 0 R 2219 0 R  0 R 2224 0 R 2226 0 R 2228 0 R 2230 0 
R 2235 0 R 2237 0 R 2247 0 R 2257 0 R 2264 0 R 2276 0 R 2281 0 R 2285 0 R 2297 
0 R 2304 0 R 2310 0 R 2312 0 R 2323 0 R 2330 0 R 2341 0 R 2345 0 R 2354 0 R 
2361 0 R 2371 0 R 2379 0 R 2388 0 R 2394 0 R 2399 0 R 2404 0 R 2407 0 R 2409 0 
R 2415 0 R 2419 0 R 2423 0 R 2429 0 R 2436 0 R 2441 0 R 2445 0
  R 2454 0 R 2459 0 R 2464 0 R 2477 0 R 

[05/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/CellUtil.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/CellUtil.html 
b/devapidocs/org/apache/hadoop/hbase/CellUtil.html
index 0a09cb8..f2a8a69 100644
--- a/devapidocs/org/apache/hadoop/hbase/CellUtil.html
+++ b/devapidocs/org/apache/hadoop/hbase/CellUtil.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9,"i19":9,"i20":9,"i21":9,"i22":9,"i23":9,"i24":9,"i25":9,"i26":9,"i27":9,"i28":9,"i29":9,"i30":9,"i31":9,"i32":9,"i33":9,"i34":9,"i35":9,"i36":9,"i37":9,"i38":9,"i39":9,"i40":9,"i41":9,"i42":9,"i43":9,"i44":9,"i45":9,"i46":9,"i47":9,"i48":9,"i49":41,"i50":9,"i51":9,"i52":9,"i53":9,"i54":9,"i55":9,"i56":9,"i57":9,"i58":9,"i59":9,"i60":9,"i61":9,"i62":9,"i63":9,"i64":9,"i65":9,"i66":9,"i67":9,"i68":9,"i69":9,"i70":9,"i71":9,"i72":9,"i73":9,"i74":9,"i75":9,"i76":9,"i77":9,"i78":9,"i79":9,"i80":9,"i81":41,"i82":9,"i83":9,"i84":9,"i85":9,"i86":9,"i87":9,"i88":9,"i89":9,"i90":9,"i91":9,"i92":9,"i93":41,"i94":9,"i95":9,"i96":9,"i97":9,"i98":9,"i99":9,"i100":9,"i101":9,"i102":9,"i103":9,"i104":9,"i105":9,"i106":9,"i107":9};
+var methods = 
{"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9,"i19":9,"i20":9,"i21":9,"i22":9,"i23":9,"i24":9,"i25":9,"i26":9,"i27":9,"i28":9,"i29":9,"i30":9,"i31":9,"i32":9,"i33":9,"i34":9,"i35":9,"i36":9,"i37":9,"i38":9,"i39":9,"i40":9,"i41":9,"i42":9,"i43":9,"i44":9,"i45":9,"i46":9,"i47":9,"i48":9,"i49":9,"i50":9,"i51":9,"i52":9,"i53":41,"i54":9,"i55":9,"i56":9,"i57":9,"i58":9,"i59":9,"i60":9,"i61":9,"i62":9,"i63":9,"i64":9,"i65":9,"i66":9,"i67":9,"i68":9,"i69":9,"i70":9,"i71":9,"i72":9,"i73":9,"i74":9,"i75":9,"i76":9,"i77":9,"i78":9,"i79":9,"i80":9,"i81":9,"i82":9,"i83":9,"i84":9,"i85":41,"i86":9,"i87":9,"i88":9,"i89":9,"i90":9,"i91":9,"i92":9,"i93":9,"i94":9,"i95":9,"i96":9,"i97":41,"i98":9,"i99":9,"i100":9,"i101":9,"i102":9,"i103":9,"i104":9,"i105":9,"i106":9,"i107":9,"i108":9,"i109":9,"i110":9,"i111":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -111,7 +111,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Public
  @InterfaceStability.Evolving
-public final class CellUtil
+public final class CellUtil
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Utility methods helpful slinging Cell instances.
  Some methods below are for internal use only and are marked 
InterfaceAudience.Private at the
@@ -451,23 +451,45 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 static Cell
+createFirstOnRow(byte[]row)
+
+
+static Cell
+createFirstOnRow(byte[]row,
+byte[]family,
+byte[]col)
+
+
+static Cell
 createFirstOnRow(byte[]row,
 introffset,
 shortrlength)
 
-
+
+static Cell
+createFirstOnRow(byte[]row,
+introffset,
+shortrlength,
+byte[]family,
+intfoffset,
+byteflength,
+byte[]col,
+intcoffset,
+intclength)
+
+
 static Cell
 createFirstOnRow(Cellcell)
 Create a Cell that is smaller than all other possible Cells 
for the given Cell's row.
 
 
-
+
 static Cell
 createFirstOnRowCol(Cellcell)
 Create a Cell that is smaller than all other possible Cells 
for the given Cell's row.
 
 
-
+
 static Cell
 createFirstOnRowCol(Cellcell,
byte[]qArray,
@@ -477,85 +499,89 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
  passed qualifier.
 
 
-
+
 static Cell
 createFirstOnRowColTS(Cellcell,
  longts)
 Creates the first cell with the row/family/qualifier of 
this cell and the given timestamp.
 
 
-
+
+static Cell
+createLastOnRow(byte[]row)
+
+
 static Cell
 createLastOnRow(Cellcell)
 Create a Cell that is larger than all other possible Cells 
for the given Cell's row.
 
 
-
+
 static Cell
 createLastOnRowCol(Cellcell)
 Create a Cell that is larger than all other possible Cells 
for the given Cell's rk:cf:q.
 
 
-
+
 static boolean
 equals(Cella,
   Cellb)
 equals
 
 
-
+
 static boolean
 equalsIgnoreMvccVersion(Cella,
Cellb)
 special case for Cell.equals
 
 
-
+
 static long
 estimatedHeapSizeOf(Cellcell)
 This is an estimate of the heap space occupied by a 
cell.
 
 
-
+
 static int
 estimatedSerializedSizeOf(Cellcell)
 Estimate based on keyvalue's serialization format.
 
 
-
+
 static int
 

[29/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html 
b/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
index b222809..1f34ddb 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
@@ -237,81 +237,81 @@
 229  /** Default value for ZooKeeper session 
timeout */
 230  public static final int 
DEFAULT_ZK_SESSION_TIMEOUT = 180 * 1000;
 231
-232  /** Configuration key for whether to 
use ZK.multi */
-233  public static final String 
ZOOKEEPER_USEMULTI = "hbase.zookeeper.useMulti";
+232  /** Parameter name for port region 
server listens on. */
+233  public static final String 
REGIONSERVER_PORT = "hbase.regionserver.port";
 234
-235  /** Parameter name for port region 
server listens on. */
-236  public static final String 
REGIONSERVER_PORT = "hbase.regionserver.port";
+235  /** Default port region server listens 
on. */
+236  public static final int 
DEFAULT_REGIONSERVER_PORT = 16020;
 237
-238  /** Default port region server listens 
on. */
-239  public static final int 
DEFAULT_REGIONSERVER_PORT = 16020;
+238  /** default port for region server web 
api */
+239  public static final int 
DEFAULT_REGIONSERVER_INFOPORT = 16030;
 240
-241  /** default port for region server web 
api */
-242  public static final int 
DEFAULT_REGIONSERVER_INFOPORT = 16030;
-243
-244  /** A configuration key for 
regionserver info port */
-245  public static final String 
REGIONSERVER_INFO_PORT =
-246"hbase.regionserver.info.port";
-247
-248  /** A flag that enables automatic 
selection of regionserver info port */
-249  public static final String 
REGIONSERVER_INFO_PORT_AUTO =
-250  REGIONSERVER_INFO_PORT + ".auto";
+241  /** A configuration key for 
regionserver info port */
+242  public static final String 
REGIONSERVER_INFO_PORT =
+243"hbase.regionserver.info.port";
+244
+245  /** A flag that enables automatic 
selection of regionserver info port */
+246  public static final String 
REGIONSERVER_INFO_PORT_AUTO =
+247  REGIONSERVER_INFO_PORT + ".auto";
+248
+249  /** Parameter name for what region 
server implementation to use. */
+250  public static final String 
REGION_SERVER_IMPL= "hbase.regionserver.impl";
 251
-252  /** Parameter name for what region 
server implementation to use. */
-253  public static final String 
REGION_SERVER_IMPL= "hbase.regionserver.impl";
+252  /** Parameter name for what master 
implementation to use. */
+253  public static final String MASTER_IMPL= 
"hbase.master.impl";
 254
-255  /** Parameter name for what master 
implementation to use. */
-256  public static final String MASTER_IMPL= 
"hbase.master.impl";
+255  /** Parameter name for what hbase 
client implementation to use. */
+256  public static final String 
HBASECLIENT_IMPL= "hbase.hbaseclient.impl";
 257
-258  /** Parameter name for what hbase 
client implementation to use. */
-259  public static final String 
HBASECLIENT_IMPL= "hbase.hbaseclient.impl";
+258  /** Parameter name for how often 
threads should wake up */
+259  public static final String 
THREAD_WAKE_FREQUENCY = "hbase.server.thread.wakefrequency";
 260
-261  /** Parameter name for how often 
threads should wake up */
-262  public static final String 
THREAD_WAKE_FREQUENCY = "hbase.server.thread.wakefrequency";
+261  /** Default value for thread wake 
frequency */
+262  public static final int 
DEFAULT_THREAD_WAKE_FREQUENCY = 10 * 1000;
 263
-264  /** Default value for thread wake 
frequency */
-265  public static final int 
DEFAULT_THREAD_WAKE_FREQUENCY = 10 * 1000;
+264  /** Parameter name for how often we 
should try to write a version file, before failing */
+265  public static final String 
VERSION_FILE_WRITE_ATTEMPTS = "hbase.server.versionfile.writeattempts";
 266
 267  /** Parameter name for how often we 
should try to write a version file, before failing */
-268  public static final String 
VERSION_FILE_WRITE_ATTEMPTS = "hbase.server.versionfile.writeattempts";
+268  public static final int 
DEFAULT_VERSION_FILE_WRITE_ATTEMPTS = 3;
 269
-270  /** Parameter name for how often we 
should try to write a version file, before failing */
-271  public static final int 
DEFAULT_VERSION_FILE_WRITE_ATTEMPTS = 3;
+270  /** Parameter name for how often a 
region should should perform a major compaction */
+271  public static final String 
MAJOR_COMPACTION_PERIOD = "hbase.hregion.majorcompaction";
 272
-273  /** Parameter name for how often a 
region should should perform a major compaction */
-274  public static final String 
MAJOR_COMPACTION_PERIOD = "hbase.hregion.majorcompaction";
-275
-276  /** Parameter name for the maximum 
batch of KVs to be used in flushes and compactions */
-277  public static final String 
COMPACTION_KV_MAX = "hbase.hstore.compaction.kv.max";

[41/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
index 9ca4097..52ebf28 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -232,23 +232,45 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Cell
+CellUtil.createFirstOnRow(byte[]row)
+
+
+static Cell
+CellUtil.createFirstOnRow(byte[]row,
+byte[]family,
+byte[]col)
+
+
+static Cell
 CellUtil.createFirstOnRow(byte[]row,
 introffset,
 shortrlength)
 
 
 static Cell
+CellUtil.createFirstOnRow(byte[]row,
+introffset,
+shortrlength,
+byte[]family,
+intfoffset,
+byteflength,
+byte[]col,
+intcoffset,
+intclength)
+
+
+static Cell
 CellUtil.createFirstOnRow(Cellcell)
 Create a Cell that is smaller than all other possible Cells 
for the given Cell's row.
 
 
-
+
 static Cell
 CellUtil.createFirstOnRowCol(Cellcell)
 Create a Cell that is smaller than all other possible Cells 
for the given Cell's row.
 
 
-
+
 static Cell
 CellUtil.createFirstOnRowCol(Cellcell,
byte[]qArray,
@@ -258,13 +280,17 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  passed qualifier.
 
 
-
+
 static Cell
 CellUtil.createFirstOnRowColTS(Cellcell,
  longts)
 Creates the first cell with the row/family/qualifier of 
this cell and the given timestamp.
 
 
+
+static Cell
+CellUtil.createLastOnRow(byte[]row)
+
 
 static Cell
 CellUtil.createLastOnRow(Cellcell)
@@ -968,17 +994,17 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-Append
-Append.add(Cellcell)
-Add column and value to this Append operation.
-
-
-
 Increment
 Increment.add(Cellcell)
 Add the specified KeyValue to this operation.
 
 
+
+Append
+Append.add(Cellcell)
+Add column and value to this Append operation.
+
+
 
 Delete
 Delete.addDeleteMarker(Cellkv)
@@ -1060,8 +1086,8 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Put.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
 
 
-Append
-Append.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
+Increment
+Increment.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
 
 
 Mutation
@@ -1070,8 +1096,8 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-Increment
-Increment.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
+Append
+Append.setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
 
 
 Delete
@@ -1092,6 +1118,14 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
+Cell
+ColumnPaginationFilter.getNextCellHint(Cellcell)
+
+
+Cell
+FilterList.getNextCellHint(CellcurrentCell)
+
+
 abstract Cell
 Filter.getNextCellHint(CellcurrentCell)
 If the filter returns the match code SEEK_NEXT_USING_HINT, 
then it should also tell which is
@@ -1100,23 +1134,25 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 Cell
-ColumnPrefixFilter.getNextCellHint(Cellcell)
+MultipleColumnPrefixFilter.getNextCellHint(Cellcell)
 
 
 Cell
-MultipleColumnPrefixFilter.getNextCellHint(Cellcell)
+TimestampsFilter.getNextCellHint(CellcurrentCell)
+Pick the next cell that the scanner should seek to.
+
 
 
 Cell
-FilterList.getNextCellHint(CellcurrentCell)
+FuzzyRowFilter.getNextCellHint(CellcurrentCell)
 
 
 Cell

[15/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/book.html
--
diff --git a/book.html b/book.html
index 1f99bce..d244150 100644
--- a/book.html
+++ b/book.html
@@ -1928,8 +1928,9 @@ For example:
 4.2. ZooKeeper Requirements
 
 ZooKeeper 3.4.x is required as of HBase 1.0.0.
-HBase makes use of the multi functionality that is only available 
since 3.4.0 (The useMulti configuration option defaults to 
true in HBase 1.0.0).
-See https://issues.apache.org/jira/browse/HBASE-12241;>HBASE-12241 
(The crash of regionServer when taking deadservers replication queue 
breaks replication) and https://issues.apache.org/jira/browse/HBASE-6775;>HBASE-6775 (Use 
ZK.multi when available for HBASE-6710 0.92/0.94 compatibility fix) for 
background.
+HBase makes use of the multi functionality that is only available 
since Zookeeper 3.4.0. The hbase.zookeeper.useMulti configuration 
property defaults to true in HBase 1.0.0.
+Refer to https://issues.apache.org/jira/browse/HBASE-12241;>HBASE-12241 (The crash 
of regionServer when taking deadservers replication queue breaks 
replication) and https://issues.apache.org/jira/browse/HBASE-6775;>HBASE-6775 (Use 
ZK.multi when available for HBASE-6710 0.92/0.94 compatibility fix) for 
background.
+The property is deprecated and useMulti is always enabled in HBase 2.0.
 
 
 
@@ -2221,21 +,6 @@ Configuration that it is thought rare anyone would 
change can exist only in code
 
 
 
-
-
-hbase.bulkload.staging.dir
-
-
-Description
-A staging directory in default file system (HDFS) for bulk loading.
-
-
-Default
-${hbase.fs.tmp.dir}
-
-
-
-
 
 
 hbase.cluster.distributed
@@ -2791,21 +2777,6 @@ Configuration that it is thought rare anyone would 
change can exist only in code
 
 
 
-
-
-hbase.zookeeper.useMulti
-
-
-Description
-Instructs HBase to make use of ZooKeepers multi-update 
functionality. This allows certain ZooKeeper operations to complete more 
quickly and prevents some issues with rare Replication failure scenarios (see 
the release note of HBASE-2611 for an example). IMPORTANT: only set this to 
true if all ZooKeeper servers in the cluster are on version 3.4+ and will not 
be downgraded. ZooKeeper versions before 3.4 do not support multi-update and 
will not fail gracefully if multi-update is invoked (see ZOOKEEPER-1495).
-
-
-Default
-true
-
-
-
-
 
 
 hbase.zookeeper.property.initLimit
@@ -4218,7 +4189,7 @@ Configuration that it is thought rare anyone would change 
can exist only in code
 
 
 Description
-FS Permissions for the root directory in a secure (kerberos) setup. When 
master starts, it creates the rootdir with this permissions or sets the 
permissions if it does not match.
+FS Permissions for the root data subdirectory in a secure (kerberos) setup. 
When master starts, it creates the rootdir with this permissions or sets the 
permissions if it does not match.
 
 
 Default
@@ -26440,9 +26411,7 @@ The following example limits the above example to 200 
MB/sec.
 
 137.9. 
Storing Snapshots in an Amazon S3 Bucket
 
-For general information and limitations of using Amazon S3 storage with 
HBase, see
-[amazon_s3_configuration]. You can also 
store and retrieve snapshots from Amazon
-S3, using the following procedure.
+You can store and retrieve snapshots from Amazon S3, using the following 
procedure.
 
 
 
@@ -30021,9 +29990,7 @@ In the example below we have ZooKeeper persist to 
/user/local/zookeeper
 
 What version of ZooKeeper should I use?
 
-The newer version, the better.
-For example, some folks have been bitten by https://issues.apache.org/jira/browse/ZOOKEEPER-1277;>ZOOKEEPER-1277.
-If running zookeeper 3.5+, you can ask hbase to make use of the new multi 
operation by enabling hbase.zookeeper.useMulti" in your 
hbase-site.xml.
+The newer version, the better. ZooKeeper 3.4.x is required as of HBase 
1.0.0
 
 
 
@@ -34050,7 +34017,7 @@ The server will return cellblocks compressed using this 
same compressor as long
 
 
 Version 2.0.0-SNAPSHOT
-Last updated 2016-07-24 14:31:11 +00:00
+Last updated 2016-09-29 14:44:28 +00:00
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/bulk-loads.html
--
diff --git a/bulk-loads.html b/bulk-loads.html
index 0a4e0ff..ea211c3 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase   
   Bulk Loads in Apache HBase (TM)
@@ -305,7 +305,7 @@ under the License. -->
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-09-16
+  Last Published: 
2016-09-29
 
 
 



[33/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/util/Counter.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/util/Counter.html 
b/apidocs/org/apache/hadoop/hbase/util/Counter.html
index 67684f0..922a61e 100644
--- a/apidocs/org/apache/hadoop/hbase/util/Counter.html
+++ b/apidocs/org/apache/hadoop/hbase/util/Counter.html
@@ -18,8 +18,8 @@
 catch(err) {
 }
 //-->
-var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var methods = {"i0":42,"i1":42,"i2":42,"i3":42,"i4":42,"i5":42,"i6":42};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -108,10 +108,14 @@ var activeTableTab = "activeTableTab";
 
 
 
+Deprecated.
+use http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">LongAdder instead.
+
 
 @InterfaceAudience.Public
  @InterfaceStability.Evolving
-public class Counter
+ http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
+public class Counter
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 High scalable counter. Thread safe.
 
@@ -132,10 +136,14 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 Constructor and Description
 
 
-Counter()
+Counter()
+Deprecated.
+
 
 
-Counter(longinitValue)
+Counter(longinitValue)
+Deprecated.
+
 
 
 
@@ -147,38 +155,52 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 Method Summary
 
-All MethodsInstance MethodsConcrete Methods
+All MethodsInstance MethodsConcrete MethodsDeprecated Methods
 
 Modifier and Type
 Method and Description
 
 
 void
-add(longdelta)
+add(longdelta)
+Deprecated.
+
 
 
 void
-decrement()
+decrement()
+Deprecated.
+
 
 
 void
-destroy()
+destroy()
+Deprecated.
+
 
 
 long
-get()
+get()
+Deprecated.
+
 
 
 void
-increment()
+increment()
+Deprecated.
+
 
 
 void
-set(longvalue)
+set(longvalue)
+Deprecated.
+
 
 
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-toString()
+toString()
+Deprecated.
+
 
 
 
@@ -208,7 +230,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 Counter
-publicCounter()
+publicCounter()
+Deprecated.
 
 
 
@@ -217,7 +240,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 Counter
-publicCounter(longinitValue)
+publicCounter(longinitValue)
+Deprecated.
 
 
 
@@ -234,7 +258,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 add
-publicvoidadd(longdelta)
+publicvoidadd(longdelta)
+Deprecated.
 
 
 
@@ -243,7 +268,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 increment
-publicvoidincrement()
+publicvoidincrement()
+Deprecated.
 
 
 
@@ -252,7 +278,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 decrement
-publicvoiddecrement()
+publicvoiddecrement()
+Deprecated.
 
 
 
@@ -261,7 +288,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 set
-publicvoidset(longvalue)
+publicvoidset(longvalue)
+Deprecated.
 
 
 
@@ -270,7 +298,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 get
-publiclongget()
+publiclongget()
+Deprecated.
 
 
 
@@ -279,7 +308,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 destroy
-publicvoiddestroy()
+publicvoiddestroy()
+Deprecated.
 
 
 
@@ -288,7 +318,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 toString
-publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
+publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
+Deprecated.
 
 Overrides:
 http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--;
 title="class or interface in java.lang">toStringin 
classhttp://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/util/FastLongHistogram.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/util/FastLongHistogram.html 

[19/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html 
b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
index 4dbb152..3d30c3b 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
@@ -32,26 +32,26 @@
 024import java.io.DataInput;
 025import java.io.DataOutput;
 026import java.io.IOException;
-027import java.math.BigDecimal;
-028import java.math.BigInteger;
-029import java.nio.ByteBuffer;
-030import java.nio.charset.Charset;
-031import java.security.SecureRandom;
-032import java.util.Arrays;
-033import java.util.Collection;
-034import java.util.Comparator;
-035import java.util.Iterator;
-036import java.util.List;
-037
-038import com.google.protobuf.ByteString;
+027import 
java.io.UnsupportedEncodingException;
+028import java.math.BigDecimal;
+029import java.math.BigInteger;
+030import java.nio.ByteBuffer;
+031import java.nio.charset.Charset;
+032import 
java.nio.charset.StandardCharsets;
+033import java.security.SecureRandom;
+034import java.util.Arrays;
+035import java.util.Collection;
+036import java.util.Comparator;
+037import java.util.Iterator;
+038import java.util.List;
 039
 040import org.apache.commons.logging.Log;
 041import 
org.apache.commons.logging.LogFactory;
-042import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-043import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-044import org.apache.hadoop.hbase.Cell;
-045import 
org.apache.hadoop.hbase.CellComparator;
-046import 
org.apache.hadoop.hbase.KeyValue;
+042import org.apache.hadoop.hbase.Cell;
+043import 
org.apache.hadoop.hbase.CellComparator;
+044import 
org.apache.hadoop.hbase.KeyValue;
+045import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+046import 
org.apache.hadoop.hbase.classification.InterfaceStability;
 047import 
org.apache.hadoop.io.RawComparator;
 048import 
org.apache.hadoop.io.WritableComparator;
 049import 
org.apache.hadoop.io.WritableUtils;
@@ -60,2598 +60,2618 @@
 052
 053import 
com.google.common.annotations.VisibleForTesting;
 054import com.google.common.collect.Lists;
-055
-056/**
-057 * Utility class that handles byte 
arrays, conversions to/from other types,
-058 * comparisons, hash code generation, 
manufacturing keys for HashMaps or
-059 * HashSets, and can be used as key in 
maps or trees.
-060 */
-061@SuppressWarnings("restriction")
-062@InterfaceAudience.Public
-063@InterfaceStability.Stable
-064@edu.umd.cs.findbugs.annotations.SuppressWarnings(
-065
value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS",
-066justification="It has been like this 
forever")
-067public class Bytes implements 
ComparableBytes {
-068  //HConstants.UTF8_ENCODING should be 
updated if this changed
-069  /** When we encode strings, we always 
specify UTF8 encoding */
-070  private static final String 
UTF8_ENCODING = "UTF-8";
-071
-072  //HConstants.UTF8_CHARSET should be 
updated if this changed
-073  /** When we encode strings, we always 
specify UTF8 encoding */
-074  private static final Charset 
UTF8_CHARSET = Charset.forName(UTF8_ENCODING);
-075
-076  //HConstants.EMPTY_BYTE_ARRAY should be 
updated if this changed
-077  private static final byte [] 
EMPTY_BYTE_ARRAY = new byte [0];
-078
-079  private static final Log LOG = 
LogFactory.getLog(Bytes.class);
+055import com.google.protobuf.ByteString;
+056
+057/**
+058 * Utility class that handles byte 
arrays, conversions to/from other types,
+059 * comparisons, hash code generation, 
manufacturing keys for HashMaps or
+060 * HashSets, and can be used as key in 
maps or trees.
+061 */
+062@SuppressWarnings("restriction")
+063@InterfaceAudience.Public
+064@InterfaceStability.Stable
+065@edu.umd.cs.findbugs.annotations.SuppressWarnings(
+066
value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS",
+067justification="It has been like this 
forever")
+068public class Bytes implements 
ComparableBytes {
+069  //HConstants.UTF8_ENCODING should be 
updated if this changed
+070  /** When we encode strings, we always 
specify UTF8 encoding */
+071  private static final String 
UTF8_ENCODING = "UTF-8";
+072
+073  //HConstants.UTF8_CHARSET should be 
updated if this changed
+074  /** When we encode strings, we always 
specify UTF8 encoding */
+075  private static final Charset 
UTF8_CHARSET = Charset.forName(UTF8_ENCODING);
+076
+077  // Using the charset canonical name for 
String/byte[] conversions is much
+078  // more efficient due to use of cached 
encoders/decoders.
+079  private static final String UTF8_CSN = 
StandardCharsets.UTF_8.name();
 080
-081  /**
-082   * Size of boolean in bytes
-083   */
-084  public static final int 

[18/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html 
b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html
index 4dbb152..3d30c3b 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html
@@ -32,26 +32,26 @@
 024import java.io.DataInput;
 025import java.io.DataOutput;
 026import java.io.IOException;
-027import java.math.BigDecimal;
-028import java.math.BigInteger;
-029import java.nio.ByteBuffer;
-030import java.nio.charset.Charset;
-031import java.security.SecureRandom;
-032import java.util.Arrays;
-033import java.util.Collection;
-034import java.util.Comparator;
-035import java.util.Iterator;
-036import java.util.List;
-037
-038import com.google.protobuf.ByteString;
+027import 
java.io.UnsupportedEncodingException;
+028import java.math.BigDecimal;
+029import java.math.BigInteger;
+030import java.nio.ByteBuffer;
+031import java.nio.charset.Charset;
+032import 
java.nio.charset.StandardCharsets;
+033import java.security.SecureRandom;
+034import java.util.Arrays;
+035import java.util.Collection;
+036import java.util.Comparator;
+037import java.util.Iterator;
+038import java.util.List;
 039
 040import org.apache.commons.logging.Log;
 041import 
org.apache.commons.logging.LogFactory;
-042import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-043import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-044import org.apache.hadoop.hbase.Cell;
-045import 
org.apache.hadoop.hbase.CellComparator;
-046import 
org.apache.hadoop.hbase.KeyValue;
+042import org.apache.hadoop.hbase.Cell;
+043import 
org.apache.hadoop.hbase.CellComparator;
+044import 
org.apache.hadoop.hbase.KeyValue;
+045import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+046import 
org.apache.hadoop.hbase.classification.InterfaceStability;
 047import 
org.apache.hadoop.io.RawComparator;
 048import 
org.apache.hadoop.io.WritableComparator;
 049import 
org.apache.hadoop.io.WritableUtils;
@@ -60,2598 +60,2618 @@
 052
 053import 
com.google.common.annotations.VisibleForTesting;
 054import com.google.common.collect.Lists;
-055
-056/**
-057 * Utility class that handles byte 
arrays, conversions to/from other types,
-058 * comparisons, hash code generation, 
manufacturing keys for HashMaps or
-059 * HashSets, and can be used as key in 
maps or trees.
-060 */
-061@SuppressWarnings("restriction")
-062@InterfaceAudience.Public
-063@InterfaceStability.Stable
-064@edu.umd.cs.findbugs.annotations.SuppressWarnings(
-065
value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS",
-066justification="It has been like this 
forever")
-067public class Bytes implements 
ComparableBytes {
-068  //HConstants.UTF8_ENCODING should be 
updated if this changed
-069  /** When we encode strings, we always 
specify UTF8 encoding */
-070  private static final String 
UTF8_ENCODING = "UTF-8";
-071
-072  //HConstants.UTF8_CHARSET should be 
updated if this changed
-073  /** When we encode strings, we always 
specify UTF8 encoding */
-074  private static final Charset 
UTF8_CHARSET = Charset.forName(UTF8_ENCODING);
-075
-076  //HConstants.EMPTY_BYTE_ARRAY should be 
updated if this changed
-077  private static final byte [] 
EMPTY_BYTE_ARRAY = new byte [0];
-078
-079  private static final Log LOG = 
LogFactory.getLog(Bytes.class);
+055import com.google.protobuf.ByteString;
+056
+057/**
+058 * Utility class that handles byte 
arrays, conversions to/from other types,
+059 * comparisons, hash code generation, 
manufacturing keys for HashMaps or
+060 * HashSets, and can be used as key in 
maps or trees.
+061 */
+062@SuppressWarnings("restriction")
+063@InterfaceAudience.Public
+064@InterfaceStability.Stable
+065@edu.umd.cs.findbugs.annotations.SuppressWarnings(
+066
value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS",
+067justification="It has been like this 
forever")
+068public class Bytes implements 
ComparableBytes {
+069  //HConstants.UTF8_ENCODING should be 
updated if this changed
+070  /** When we encode strings, we always 
specify UTF8 encoding */
+071  private static final String 
UTF8_ENCODING = "UTF-8";
+072
+073  //HConstants.UTF8_CHARSET should be 
updated if this changed
+074  /** When we encode strings, we always 
specify UTF8 encoding */
+075  private static final Charset 
UTF8_CHARSET = Charset.forName(UTF8_ENCODING);
+076
+077  // Using the charset canonical name for 
String/byte[] conversions is much
+078  // more efficient due to use of cached 
encoders/decoders.
+079  private static final String UTF8_CSN = 
StandardCharsets.UTF_8.name();
 080
-081  /**
-082   * Size of boolean in bytes
-083   */
-084  public static final int 

[34/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/util/Bytes.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/util/Bytes.html 
b/apidocs/org/apache/hadoop/hbase/util/Bytes.html
index ea19094..8276a12 100644
--- a/apidocs/org/apache/hadoop/hbase/util/Bytes.html
+++ b/apidocs/org/apache/hadoop/hbase/util/Bytes.html
@@ -115,7 +115,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Public
  @InterfaceStability.Stable
-public class Bytes
+public class Bytes
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableBytes
 Utility class that handles byte arrays, conversions to/from 
other types,
@@ -1199,7 +1199,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 SIZEOF_BOOLEAN
-public static finalint SIZEOF_BOOLEAN
+public static finalint SIZEOF_BOOLEAN
 Size of boolean in bytes
 
 See Also:
@@ -1213,7 +1213,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 SIZEOF_BYTE
-public static finalint SIZEOF_BYTE
+public static finalint SIZEOF_BYTE
 Size of byte in bytes
 
 See Also:
@@ -1227,7 +1227,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 SIZEOF_CHAR
-public static finalint SIZEOF_CHAR
+public static finalint SIZEOF_CHAR
 Size of char in bytes
 
 See Also:
@@ -1241,7 +1241,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 SIZEOF_DOUBLE
-public static finalint SIZEOF_DOUBLE
+public static finalint SIZEOF_DOUBLE
 Size of double in bytes
 
 See Also:
@@ -1255,7 +1255,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 SIZEOF_FLOAT
-public static finalint SIZEOF_FLOAT
+public static finalint SIZEOF_FLOAT
 Size of float in bytes
 
 See Also:
@@ -1269,7 +1269,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 SIZEOF_INT
-public static finalint SIZEOF_INT
+public static finalint SIZEOF_INT
 Size of int in bytes
 
 See Also:
@@ -1283,7 +1283,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 SIZEOF_LONG
-public static finalint SIZEOF_LONG
+public static finalint SIZEOF_LONG
 Size of long in bytes
 
 See Also:
@@ -1297,7 +1297,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 SIZEOF_SHORT
-public static finalint SIZEOF_SHORT
+public static finalint SIZEOF_SHORT
 Size of short in bytes
 
 See Also:
@@ -1311,7 +1311,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 MASK_FOR_LOWER_INT_IN_LONG
-public static finallong MASK_FOR_LOWER_INT_IN_LONG
+public static finallong MASK_FOR_LOWER_INT_IN_LONG
 Mask to apply to a long to reveal the lower int only. Use 
like this:
  int i = (int)(0xL ^ some_long_value);
 
@@ -1326,7 +1326,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 ESTIMATED_HEAP_TAX
-public static finalint ESTIMATED_HEAP_TAX
+public static finalint ESTIMATED_HEAP_TAX
 Estimate of size cost to pay beyond payload in jvm for 
instance of byte [].
  Estimate based on study of jhat and jprofiler numbers.
 
@@ -1341,7 +1341,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 BYTES_COMPARATOR
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">Comparatorbyte[] BYTES_COMPARATOR
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">Comparatorbyte[] BYTES_COMPARATOR
 Pass this to TreeMaps where byte [] are keys.
 
 
@@ -1351,7 +1351,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 BYTES_RAWCOMPARATOR
-public static finalorg.apache.hadoop.io.RawComparatorbyte[] 
BYTES_RAWCOMPARATOR
+public static finalorg.apache.hadoop.io.RawComparatorbyte[] 
BYTES_RAWCOMPARATOR
 Use comparing byte arrays, byte-by-byte
 
 
@@ -1369,7 +1369,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 Bytes
-publicBytes()
+publicBytes()
 Create a zero-size sequence.
 
 
@@ -1379,7 +1379,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 Bytes
-publicBytes(byte[]bytes)
+publicBytes(byte[]bytes)
 Create a Bytes using the byte array as the initial 
value.
 
 Parameters:
@@ -1393,7 +1393,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 Bytes
-publicBytes(Bytesibw)
+publicBytes(Bytesibw)
 Set the new Bytes to the contents of the passed
  ibw.
 
@@ -1408,7 +1408,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 

[26/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/client/Result.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Result.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Result.html
index da95361..72920ba 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Result.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Result.html
@@ -283,666 +283,678 @@
 275return result;
 276  }
 277
-278  protected int binarySearch(final Cell 
[] kvs,
-279 final byte 
[] family,
-280 final byte 
[] qualifier) {
-281Cell searchTerm =
-282
KeyValueUtil.createFirstOnRow(CellUtil.cloneRow(kvs[0]),
-283family, qualifier);
-284
-285// pos === ( -(insertion point) - 
1)
-286int pos = Arrays.binarySearch(kvs, 
searchTerm, CellComparator.COMPARATOR);
-287// never will exact match
-288if (pos  0) {
-289  pos = (pos+1) * -1;
-290  // pos is now insertion point
-291}
-292if (pos == kvs.length) {
-293  return -1; // doesn't exist
-294}
-295return pos;
-296  }
-297
-298  /**
-299   * Searches for the latest value for 
the specified column.
-300   *
-301   * @param kvs the array to search
-302   * @param family family name
-303   * @param foffset family offset
-304   * @param flength family length
-305   * @param qualifier column qualifier
-306   * @param qoffset qualifier offset
-307   * @param qlength qualifier length
-308   *
-309   * @return the index where the value 
was found, or -1 otherwise
-310   */
-311  protected int binarySearch(final Cell 
[] kvs,
-312  final byte [] family, final int 
foffset, final int flength,
-313  final byte [] qualifier, final int 
qoffset, final int qlength) {
-314
-315double keyValueSize = (double)
-316
KeyValue.getKeyValueDataStructureSize(kvs[0].getRowLength(), flength, qlength, 
0);
-317
-318byte[] buffer = localBuffer.get();
-319if (buffer == null || keyValueSize 
 buffer.length) {
-320  // pad to the smallest multiple of 
the pad width
-321  buffer = new byte[(int) 
Math.ceil(keyValueSize / PAD_WIDTH) * PAD_WIDTH];
-322  localBuffer.set(buffer);
-323}
-324
-325Cell searchTerm = 
KeyValueUtil.createFirstOnRow(buffer, 0,
-326kvs[0].getRowArray(), 
kvs[0].getRowOffset(), kvs[0].getRowLength(),
-327family, foffset, flength,
-328qualifier, qoffset, qlength);
+278  private byte[] notNullBytes(final 
byte[] bytes) {
+279if (bytes == null) {
+280  return 
HConstants.EMPTY_BYTE_ARRAY;
+281} else {
+282  return bytes;
+283}
+284  }
+285
+286  protected int binarySearch(final Cell 
[] kvs,
+287 final byte 
[] family,
+288 final byte 
[] qualifier) {
+289byte[] familyNotNull = 
notNullBytes(family);
+290byte[] qualifierNotNull = 
notNullBytes(qualifier);
+291Cell searchTerm =
+292
CellUtil.createFirstOnRow(kvs[0].getRowArray(),
+293kvs[0].getRowOffset(), 
kvs[0].getRowLength(),
+294familyNotNull, 0, 
(byte)familyNotNull.length,
+295qualifierNotNull, 0, 
qualifierNotNull.length);
+296
+297// pos === ( -(insertion point) - 
1)
+298int pos = Arrays.binarySearch(kvs, 
searchTerm, CellComparator.COMPARATOR);
+299// never will exact match
+300if (pos  0) {
+301  pos = (pos+1) * -1;
+302  // pos is now insertion point
+303}
+304if (pos == kvs.length) {
+305  return -1; // doesn't exist
+306}
+307return pos;
+308  }
+309
+310  /**
+311   * Searches for the latest value for 
the specified column.
+312   *
+313   * @param kvs the array to search
+314   * @param family family name
+315   * @param foffset family offset
+316   * @param flength family length
+317   * @param qualifier column qualifier
+318   * @param qoffset qualifier offset
+319   * @param qlength qualifier length
+320   *
+321   * @return the index where the value 
was found, or -1 otherwise
+322   */
+323  protected int binarySearch(final Cell 
[] kvs,
+324  final byte [] family, final int 
foffset, final int flength,
+325  final byte [] qualifier, final int 
qoffset, final int qlength) {
+326
+327double keyValueSize = (double)
+328
KeyValue.getKeyValueDataStructureSize(kvs[0].getRowLength(), flength, qlength, 
0);
 329
-330// pos === ( -(insertion point) - 
1)
-331int pos = Arrays.binarySearch(kvs, 
searchTerm, CellComparator.COMPARATOR);
-332// never will exact match
-333if (pos  0) {
-334  pos = (pos+1) * -1;
-335  // pos is now insertion point
-336}
-337if (pos == kvs.length) {
-338  return -1; // doesn't exist
-339}
-340return pos;
-341  }
-342
-343  /**
-344   * The Cell for the most recent 
timestamp for a given column.
-345   *
-346   * @param family

[03/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/HConstants.html 
b/devapidocs/org/apache/hadoop/hbase/HConstants.html
index e6652c7..bdd073e 100644
--- a/devapidocs/org/apache/hadoop/hbase/HConstants.html
+++ b/devapidocs/org/apache/hadoop/hbase/HConstants.html
@@ -190,24 +190,30 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+BULKLOAD_STAGING_DIR_NAME
+Staging dir used by bulk load
+
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 BYTES_PER_CHECKSUM
 The name of the configuration parameter that specifies
  the number of bytes in a newly created checksum chunk.
 
 
-
+
 static byte[]
 CATALOG_FAMILY
 The catalog family
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CATALOG_FAMILY_STR
 The catalog family as a string
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CHECKSUM_TYPE_NAME
 The name of the configuration parameter that specifies
@@ -215,668 +221,668 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
  for newly created blocks.
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CIPHER_AES
 Default cipher for encryption
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLIENT_PORT_STR
 The ZK client port key in the ZK properties map.
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLUSTER_DISTRIBUTED
 Cluster is in distributed mode or not
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLUSTER_ID_DEFAULT
 Default value for cluster ID
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLUSTER_ID_FILE_NAME
 name of the file for unique cluster ID
 
 
-
+
 static boolean
 CLUSTER_IS_DISTRIBUTED
 Cluster is fully-distributed
 
 
-
+
 static boolean
 CLUSTER_IS_LOCAL
 Cluster is standalone or pseudo-distributed
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 COMPACTION_KV_MAX
 Parameter name for the maximum batch of KVs to be used in 
flushes and compactions
 
 
-
+
 static int
 COMPACTION_KV_MAX_DEFAULT
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CONFIGURATION
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CORRUPT_DIR_NAME
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_INCLUSION_KEY
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern
 CP_HTD_ATTR_KEY_PATTERN
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern
 CP_HTD_ATTR_VALUE_PARAM_PATTERN
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern
 CP_HTD_ATTR_VALUE_PATTERN
 
  Pattern that matches a coprocessor specification.
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_ALTERNATE_KEY_ALGORITHM_CONF_KEY
 Configuration key for the name of the alternate cipher 
algorithm for the cluster, a string
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 

[20/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/util/ByteBufferUtils.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/ByteBufferUtils.html 
b/apidocs/src-html/org/apache/hadoop/hbase/util/ByteBufferUtils.html
index 006f111..e954828 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/util/ByteBufferUtils.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/util/ByteBufferUtils.html
@@ -25,1013 +25,1022 @@
 017package org.apache.hadoop.hbase.util;
 018
 019import java.io.ByteArrayOutputStream;
-020import java.io.DataInputStream;
-021import java.io.IOException;
-022import java.io.InputStream;
-023import java.io.OutputStream;
-024import java.math.BigDecimal;
-025import java.math.BigInteger;
-026import java.nio.ByteBuffer;
-027import java.util.Arrays;
-028
-029import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-030import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-031import 
org.apache.hadoop.hbase.io.ByteBufferSupportOutputStream;
-032import 
org.apache.hadoop.hbase.io.util.StreamUtils;
-033import org.apache.hadoop.io.IOUtils;
-034import 
org.apache.hadoop.io.WritableUtils;
-035
-036import sun.nio.ch.DirectBuffer;
-037
-038/**
-039 * Utility functions for working with 
byte buffers, such as reading/writing
-040 * variable-length long numbers.
-041 */
-042@SuppressWarnings("restriction")
-043@InterfaceAudience.Public
-044@InterfaceStability.Evolving
-045public final class ByteBufferUtils {
-046
-047  // "Compressed integer" serialization 
helper constants.
-048  public final static int VALUE_MASK = 
0x7f;
-049  public final static int NEXT_BIT_SHIFT 
= 7;
-050  public final static int NEXT_BIT_MASK = 
1  7;
-051  private static final boolean 
UNSAFE_AVAIL = UnsafeAvailChecker.isAvailable();
-052  private static final boolean 
UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
-053
-054  private ByteBufferUtils() {
-055  }
-056
-057  /**
-058   * Similar to {@link 
WritableUtils#writeVLong(java.io.DataOutput, long)},
-059   * but writes to a {@link 
ByteBuffer}.
-060   */
-061  public static void 
writeVLong(ByteBuffer out, long i) {
-062if (i = -112  i = 
127) {
-063  out.put((byte) i);
-064  return;
-065}
-066
-067int len = -112;
-068if (i  0) {
-069  i ^= -1L; // take one's 
complement
-070  len = -120;
-071}
-072
-073long tmp = i;
-074while (tmp != 0) {
-075  tmp = tmp  8;
-076  len--;
-077}
-078
-079out.put((byte) len);
-080
-081len = (len  -120) ? -(len + 120) 
: -(len + 112);
-082
-083for (int idx = len; idx != 0; idx--) 
{
-084  int shiftbits = (idx - 1) * 8;
-085  long mask = 0xFFL  
shiftbits;
-086  out.put((byte) ((i  mask) 
 shiftbits));
-087}
-088  }
-089
-090  /**
-091   * Similar to {@link 
WritableUtils#readVLong(DataInput)} but reads from a
-092   * {@link ByteBuffer}.
-093   */
-094  public static long readVLong(ByteBuffer 
in) {
-095byte firstByte = in.get();
-096int len = 
WritableUtils.decodeVIntSize(firstByte);
-097if (len == 1) {
-098  return firstByte;
-099}
-100long i = 0;
-101for (int idx = 0; idx  len-1; 
idx++) {
-102  byte b = in.get();
-103  i = i  8;
-104  i = i | (b  0xFF);
-105}
-106return 
(WritableUtils.isNegativeVInt(firstByte) ? (i ^ -1L) : i);
-107  }
-108
+020import java.io.DataInput;
+021import java.io.DataInputStream;
+022import java.io.IOException;
+023import java.io.InputStream;
+024import java.io.OutputStream;
+025import java.math.BigDecimal;
+026import java.math.BigInteger;
+027import java.nio.ByteBuffer;
+028import java.util.Arrays;
+029
+030import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+031import 
org.apache.hadoop.hbase.classification.InterfaceStability;
+032import 
org.apache.hadoop.hbase.io.ByteBufferSupportOutputStream;
+033import 
org.apache.hadoop.hbase.io.util.StreamUtils;
+034import org.apache.hadoop.io.IOUtils;
+035import 
org.apache.hadoop.io.WritableUtils;
+036
+037import sun.nio.ch.DirectBuffer;
+038
+039/**
+040 * Utility functions for working with 
byte buffers, such as reading/writing
+041 * variable-length long numbers.
+042 */
+043@SuppressWarnings("restriction")
+044@InterfaceAudience.Public
+045@InterfaceStability.Evolving
+046public final class ByteBufferUtils {
+047
+048  // "Compressed integer" serialization 
helper constants.
+049  public final static int VALUE_MASK = 
0x7f;
+050  public final static int NEXT_BIT_SHIFT 
= 7;
+051  public final static int NEXT_BIT_MASK = 
1  7;
+052  private static final boolean 
UNSAFE_AVAIL = UnsafeAvailChecker.isAvailable();
+053  private static final boolean 
UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
+054
+055  private ByteBufferUtils() {
+056  }
+057
+058  /**
+059   * Similar to {@link 
WritableUtils#writeVLong(java.io.DataOutput, long)},
+060   * but writes to a {@link 

[42/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html 
b/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
index a4ba957..a86c3ae 100644
--- a/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
+++ b/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
@@ -263,7 +263,7 @@ the order they are declared.
 
 
 values
-public staticKeepDeletedCells[]values()
+public staticKeepDeletedCells[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -283,7 +283,7 @@ for (KeepDeletedCells c : KeepDeletedCells.values())
 
 
 valueOf
-public staticKeepDeletedCellsvalueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+public staticKeepDeletedCellsvalueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/UnknownScannerException.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/UnknownScannerException.html 
b/apidocs/org/apache/hadoop/hbase/UnknownScannerException.html
index 4a34db0..b3b2829 100644
--- a/apidocs/org/apache/hadoop/hbase/UnknownScannerException.html
+++ b/apidocs/org/apache/hadoop/hbase/UnknownScannerException.html
@@ -171,6 +171,10 @@ extends Constructor
 
 
+
+UnknownScannerException(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Strings,
+   http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exceptione)
+
 
 
 
@@ -221,7 +225,7 @@ extends 
 
 
-
+
 
 UnknownScannerException
 publicUnknownScannerException(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Strings)
@@ -232,6 +236,16 @@ extends 
 
 
+
+
+
+
+
+UnknownScannerException
+publicUnknownScannerException(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Strings,
+   http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exceptione)
+
+
 
 
 



[13/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 68ccf4a..39d2b20 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
 en-us
 2007 - 2016 The Apache Software Foundation
 
-  File: 1845,
- Errors: 11576,
+  File: 1850,
+ Errors: 11585,
  Warnings: 0,
  Infos: 0
   
@@ -125,7 +125,7 @@ under the License.
   0
 
 
-  55
+  54
 
   
   
@@ -480,7 +480,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.MasterKeepAliveConnection.java;>org/apache/hadoop/hbase/client/MasterKeepAliveConnection.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.snapshot.HBaseSnapshotException.java;>org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java
 
 
   0
@@ -494,7 +494,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.snapshot.HBaseSnapshotException.java;>org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.MasterKeepAliveConnection.java;>org/apache/hadoop/hbase/client/MasterKeepAliveConnection.java
 
 
   0
@@ -755,7 +755,7 @@ under the License.
   0
 
 
-  5
+  4
 
   
   
@@ -1026,7 +1026,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.TableSnapshotScanner.java;>org/apache/hadoop/hbase/client/TableSnapshotScanner.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.wal.FSHLogProvider.java;>org/apache/hadoop/hbase/wal/FSHLogProvider.java
 
 
   0
@@ -1035,12 +1035,12 @@ under the License.
   0
 
 
-  4
+  0
 
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.wal.FSHLogProvider.java;>org/apache/hadoop/hbase/wal/FSHLogProvider.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.TableSnapshotScanner.java;>org/apache/hadoop/hbase/client/TableSnapshotScanner.java
 
 
   0
@@ -1049,7 +1049,7 @@ under the License.
   0
 
 
-  0
+  4
 
   
   
@@ -1567,7 +1567,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -2505,7 +2505,7 @@ under the License.
   0
 
 
-  203
+  211
 
   
   
@@ -2636,7 +2636,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.CompactionPipeline.java;>org/apache/hadoop/hbase/regionserver/CompactionPipeline.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.mapreduce.PutSortReducer.java;>org/apache/hadoop/hbase/mapreduce/PutSortReducer.java
 
 
   0
@@ -2645,12 +2645,12 @@ under the License.
   0
 
 
-  1
+  2
 
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.mapreduce.PutSortReducer.java;>org/apache/hadoop/hbase/mapreduce/PutSortReducer.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.CompactionPipeline.java;>org/apache/hadoop/hbase/regionserver/CompactionPipeline.java
 
 
   0
@@ -2659,7 +2659,7 @@ under the License.
   0
 
 
-  2
+  1
 
   

[31/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/util/package-summary.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/util/package-summary.html 
b/apidocs/org/apache/hadoop/hbase/util/package-summary.html
index 992912b..d27ecac 100644
--- a/apidocs/org/apache/hadoop/hbase/util/package-summary.html
+++ b/apidocs/org/apache/hadoop/hbase/util/package-summary.html
@@ -171,8 +171,8 @@
 
 
 Counter
-
-High scalable counter.
+Deprecated
+use http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">LongAdder instead.
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/overview-frame.html
--
diff --git a/apidocs/overview-frame.html b/apidocs/overview-frame.html
index bd42c17..8f2ef06 100644
--- a/apidocs/overview-frame.html
+++ b/apidocs/overview-frame.html
@@ -41,6 +41,7 @@
 org.apache.hadoop.hbase.io.hfile
 org.apache.hadoop.hbase.io.util
 org.apache.hadoop.hbase.ipc
+org.apache.hadoop.hbase.jetty
 org.apache.hadoop.hbase.mapred
 org.apache.hadoop.hbase.mapreduce
 org.apache.hadoop.hbase.master

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/overview-summary.html
--
diff --git a/apidocs/overview-summary.html b/apidocs/overview-summary.html
index c94078e..30656ad 100644
--- a/apidocs/overview-summary.html
+++ b/apidocs/overview-summary.html
@@ -212,100 +212,104 @@
 
 
 
+org.apache.hadoop.hbase.jetty
+
+
+
 org.apache.hadoop.hbase.mapred
 
 Provides HBase http://wiki.apache.org/hadoop/HadoopMapReduce;>MapReduce
 Input/OutputFormats, a table indexing MapReduce job, and utility methods.
 
 
-
+
 org.apache.hadoop.hbase.mapreduce
 
 Provides HBase http://wiki.apache.org/hadoop/HadoopMapReduce;>MapReduce
 Input/OutputFormats, a table indexing MapReduce job, and utility methods.
 
 
-
+
 org.apache.hadoop.hbase.master
 
 
-
+
 org.apache.hadoop.hbase.mob
 
 
-
+
 org.apache.hadoop.hbase.mob.compactions
 
 
-
+
 org.apache.hadoop.hbase.mob.mapreduce
 
 
-
+
 org.apache.hadoop.hbase.namespace
 
 
-
+
 org.apache.hadoop.hbase.nio
 
 
-
+
 org.apache.hadoop.hbase.quotas
 
 
-
+
 org.apache.hadoop.hbase.regionserver
 
 
-
+
 org.apache.hadoop.hbase.regionserver.querymatcher
 
 
-
+
 org.apache.hadoop.hbase.regionserver.throttle
 
 
-
+
 org.apache.hadoop.hbase.regionserver.wal
 
 
-
+
 org.apache.hadoop.hbase.replication
 
 Multi Cluster Replication
 
 
-
+
 org.apache.hadoop.hbase.rest
 
 HBase REST
 
 
-
+
 org.apache.hadoop.hbase.rest.client
 
 
-
+
 org.apache.hadoop.hbase.rsgroup
 
 
-
+
 org.apache.hadoop.hbase.security
 
 
-
+
 org.apache.hadoop.hbase.snapshot
 
 
-
+
 org.apache.hadoop.hbase.spark
 
 
-
+
 org.apache.hadoop.hbase.spark.example.hbasecontext
 
 
-
+
 org.apache.hadoop.hbase.types
 
 
@@ -313,23 +317,23 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  extensible data type API.
 
 
-
+
 org.apache.hadoop.hbase.util
 
 
-
+
 org.apache.hadoop.hbase.util.hbck
 
 
-
+
 org.apache.hadoop.hbase.wal
 
 
-
+
 org.apache.hadoop.hbase.zookeeper
 
 
-
+
 org.apache.hbase.archetypes.exemplars.client
 
 This package provides fully-functional exemplar Java code 
demonstrating
@@ -337,7 +341,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  archetype with hbase-client dependency.
 
 
-
+
 org.apache.hbase.archetypes.exemplars.shaded_client
 
 This package provides fully-functional exemplar Java code 
demonstrating

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/overview-tree.html
--
diff --git a/apidocs/overview-tree.html b/apidocs/overview-tree.html
index 27872c5..b4b03d2 100644
--- a/apidocs/overview-tree.html
+++ b/apidocs/overview-tree.html
@@ -101,6 +101,7 @@
 org.apache.hadoop.hbase.io.hfile,
 
 org.apache.hadoop.hbase.io.util,
 
 org.apache.hadoop.hbase.ipc,
 
+org.apache.hadoop.hbase.jetty,
 
 org.apache.hadoop.hbase.mapred,
 
 org.apache.hadoop.hbase.mapreduce,
 
 org.apache.hadoop.hbase.master,
 
@@ -586,6 +587,7 @@
 org.apache.hadoop.hbase.quotas.ThrottlingException
 
 
+org.apache.hadoop.hbase.exceptions.ScannerResetException
 org.apache.hadoop.hbase.client.ScannerTimeoutException
 org.apache.hadoop.hbase.ipc.ServerTooBusyException
 org.apache.hadoop.hbase.TableExistsException
@@ -848,23 +850,23 @@
 org.apache.hadoop.hbase.KeepDeletedCells
 org.apache.hadoop.hbase.ProcedureState
 org.apache.hadoop.hbase.io.encoding.DataBlockEncoding
+org.apache.hadoop.hbase.filter.FilterList.Operator
+org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
 org.apache.hadoop.hbase.filter.Filter.ReturnCode
 org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp

[23/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.html
index 480107e..070e398 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.html
@@ -33,120 +33,120 @@
 025import org.apache.hadoop.hbase.Cell;
 026import 
org.apache.hadoop.hbase.CellUtil;
 027import 
org.apache.hadoop.hbase.HConstants;
-028import 
org.apache.hadoop.hbase.KeyValueUtil;
-029import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-030import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-031import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-032import 
org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
-033import 
org.apache.hadoop.hbase.util.ByteStringer;
-034import 
org.apache.hadoop.hbase.util.Bytes;
-035
-036import 
com.google.protobuf.InvalidProtocolBufferException;
-037
-038/**
-039 * Filter to support scan multiple row 
key ranges. It can construct the row key ranges from the
-040 * passed list which can be accessed by 
each region server.
-041 *
-042 * HBase is quite efficient when scanning 
only one small row key range. If user needs to specify
-043 * multiple row key ranges in one scan, 
the typical solutions are: 1. through FilterList which is a
-044 * list of row key Filters, 2. using the 
SQL layer over HBase to join with two table, such as hive,
-045 * phoenix etc. However, both solutions 
are inefficient. Both of them can't utilize the range info
-046 * to perform fast forwarding during scan 
which is quite time consuming. If the number of ranges
-047 * are quite big (e.g. millions), join is 
a proper solution though it is slow. However, there are
-048 * cases that user wants to specify a 
small number of ranges to scan (e.g. lt;1000 ranges). Both
-049 * solutions can't provide satisfactory 
performance in such case. MultiRowRangeFilter is to support
-050 * such usec ase (scan multiple row key 
ranges), which can construct the row key ranges from user
-051 * specified list and perform 
fast-forwarding during scan. Thus, the scan will be quite efficient.
-052 */
-053@InterfaceAudience.Public
-054@InterfaceStability.Evolving
-055public class MultiRowRangeFilter extends 
FilterBase {
-056
-057  private ListRowRange 
rangeList;
-058
-059  private static final int 
ROW_BEFORE_FIRST_RANGE = -1;
-060  private boolean EXCLUSIVE = false;
-061  private boolean done = false;
-062  private boolean initialized = false;
-063  private int index;
-064  private RowRange range;
-065  private ReturnCode currentReturnCode;
-066
-067  /**
-068   * @param list A list of 
codeRowRange/code
-069   * @throws java.io.IOException
-070   *   throw an exception if the 
range list is not in an natural order or any
-071   *   
codeRowRange/code is invalid
-072   */
-073  public 
MultiRowRangeFilter(ListRowRange list) throws IOException {
-074this.rangeList = 
sortAndMerge(list);
-075  }
-076
-077  @Override
-078  public boolean filterAllRemaining() {
-079return done;
-080  }
-081
-082  public ListRowRange 
getRowRanges() {
-083return this.rangeList;
-084  }
-085
-086  @Override
-087  public boolean filterRowKey(Cell 
firstRowCell) {
-088if (filterAllRemaining()) return 
true;
-089// If it is the first time of 
running, calculate the current range index for
-090// the row key. If index is out of 
bound which happens when the start row
-091// user sets is after the largest 
stop row of the ranges, stop the scan.
-092// If row key is after the current 
range, find the next range and update index.
-093byte[] rowArr = 
firstRowCell.getRowArray();
-094int length = 
firstRowCell.getRowLength();
-095int offset = 
firstRowCell.getRowOffset();
-096if (!initialized
-097|| !range.contains(rowArr, 
offset, length)) {
-098  byte[] rowkey = 
CellUtil.cloneRow(firstRowCell);
-099  index = 
getNextRangeIndex(rowkey);
-100  if (index = rangeList.size()) 
{
-101done = true;
-102currentReturnCode = 
ReturnCode.NEXT_ROW;
-103return false;
-104  }
-105  if(index != ROW_BEFORE_FIRST_RANGE) 
{
-106range = rangeList.get(index);
-107  } else {
-108range = rangeList.get(0);
-109  }
-110  if (EXCLUSIVE) {
-111EXCLUSIVE = false;
-112currentReturnCode = 
ReturnCode.NEXT_ROW;
-113return false;
-114  }
-115  if (!initialized) {
-116if(index != 
ROW_BEFORE_FIRST_RANGE) {
-117  currentReturnCode = 
ReturnCode.INCLUDE;
-118} else {
-119  currentReturnCode = 
ReturnCode.SEEK_NEXT_USING_HINT;
-120}
-121

[35/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html 
b/apidocs/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
index 99a1e7d..5c7a2b6 100644
--- a/apidocs/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
+++ b/apidocs/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -111,7 +111,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Public
  @InterfaceStability.Evolving
-public class ReplicationPeerConfig
+public class ReplicationPeerConfig
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 A configuration for the replication peer cluster.
 
@@ -158,35 +158,43 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 getConfiguration()
 
 
+http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Sethttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+getNamespaces()
+
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in 
java.util">Mapbyte[],byte[]
 getPeerData()
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 getReplicationEndpointImpl()
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 getTableCFsMap()
 
-
+
 ReplicationPeerConfig
 setClusterKey(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringclusterKey)
 Set the clusterKey which is the concatenation of the slave 
cluster's:
   
hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
 
 
-
+
+ReplicationPeerConfig
+setNamespaces(http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Sethttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">Stringnamespaces)
+
+
 ReplicationPeerConfig
 setReplicationEndpointImpl(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringreplicationEndpointImpl)
 Sets the ReplicationEndpoint plugin class for this 
peer.
 
 
-
+
 ReplicationPeerConfig
 setTableCFsMap(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,? extends http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">Collectionhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringtableCFsMap)
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 
@@ -218,7 +226,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 ReplicationPeerConfig
-publicReplicationPeerConfig()
+publicReplicationPeerConfig()
 
 
 
@@ -235,7 +243,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 setClusterKey
-publicReplicationPeerConfigsetClusterKey(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringclusterKey)
+publicReplicationPeerConfigsetClusterKey(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringclusterKey)
 Set the clusterKey which is the concatenation of the slave 
cluster's:
   
hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
 
@@ -246,7 +254,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 setReplicationEndpointImpl

[02/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/JMXListener.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/JMXListener.html 
b/devapidocs/org/apache/hadoop/hbase/JMXListener.html
index f9a3a87..4f61da7 100644
--- a/devapidocs/org/apache/hadoop/hbase/JMXListener.html
+++ b/devapidocs/org/apache/hadoop/hbase/JMXListener.html
@@ -183,6 +183,10 @@ implements static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 RMI_REGISTRY_PORT_CONF_KEY
 
+
+private http://docs.oracle.com/javase/8/docs/api/java/rmi/registry/Registry.html?is-external=true;
 title="class or interface in java.rmi.registry">Registry
+rmiRegistry
+
 
 
 
@@ -330,7 +334,7 @@ implements 
 
 
-
+
 
 JMX_CS
 private statichttp://docs.oracle.com/javase/8/docs/api/javax/management/remote/JMXConnectorServer.html?is-external=true;
 title="class or interface in javax.management.remote">JMXConnectorServer 
JMX_CS
@@ -340,6 +344,15 @@ implements 
 
 
+
+
+
+
+
+rmiRegistry
+privatehttp://docs.oracle.com/javase/8/docs/api/java/rmi/registry/Registry.html?is-external=true;
 title="class or interface in java.rmi.registry">Registry rmiRegistry
+
+
 
 
 
@@ -371,7 +384,7 @@ implements 
 
 buildJMXServiceURL
-public statichttp://docs.oracle.com/javase/8/docs/api/javax/management/remote/JMXServiceURL.html?is-external=true;
 title="class or interface in 
javax.management.remote">JMXServiceURLbuildJMXServiceURL(intrmiRegistryPort,
+public statichttp://docs.oracle.com/javase/8/docs/api/javax/management/remote/JMXServiceURL.html?is-external=true;
 title="class or interface in 
javax.management.remote">JMXServiceURLbuildJMXServiceURL(intrmiRegistryPort,
intrmiConnectorPort)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
@@ -386,7 +399,7 @@ implements 
 
 startConnectorServer
-publicvoidstartConnectorServer(intrmiRegistryPort,
+publicvoidstartConnectorServer(intrmiRegistryPort,
  intrmiConnectorPort)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
@@ -401,7 +414,7 @@ implements 
 
 stopConnectorServer
-publicvoidstopConnectorServer()
+publicvoidstopConnectorServer()
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -415,7 +428,7 @@ implements 
 
 start
-publicvoidstart(CoprocessorEnvironmentenv)
+publicvoidstart(CoprocessorEnvironmentenv)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Specified by:
@@ -431,7 +444,7 @@ implements 
 
 stop
-publicvoidstop(CoprocessorEnvironmentenv)
+publicvoidstop(CoprocessorEnvironmentenv)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Specified by:

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html 
b/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
index 965a0e1..1aeb9d8 100644
--- a/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
+++ b/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
@@ -263,7 +263,7 @@ the order they are declared.
 
 
 values
-public staticKeepDeletedCells[]values()
+public staticKeepDeletedCells[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -283,7 +283,7 @@ for (KeepDeletedCells c : KeepDeletedCells.values())
 
 
 valueOf
-public staticKeepDeletedCellsvalueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+public staticKeepDeletedCellsvalueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/KeyValue.KVComparator.html
--
diff --git 

[50/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/acid-semantics.html
--
diff --git a/acid-semantics.html b/acid-semantics.html
index 381cd85..4879e2d 100644
--- a/acid-semantics.html
+++ b/acid-semantics.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase   
   Apache HBase (TM) ACID Properties
@@ -600,7 +600,7 @@ under the License. -->
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-09-16
+  Last Published: 
2016-09-29
 
 
 



[47/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/constant-values.html
--
diff --git a/apidocs/constant-values.html b/apidocs/constant-values.html
index 4c60407..07faf6f 100644
--- a/apidocs/constant-values.html
+++ b/apidocs/constant-values.html
@@ -493,1923 +493,1923 @@
 "hbase.bulkload.retries.number"
 
 
+
+
+publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+BULKLOAD_STAGING_DIR_NAME
+"staging"
+
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 BYTES_PER_CHECKSUM
 "hbase.hstore.bytes.per.checksum"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CATALOG_FAMILY_STR
 "info"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CHECKSUM_TYPE_NAME
 "hbase.hstore.checksum.algorithm"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CIPHER_AES
 "AES"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLIENT_PORT_STR
 "clientPort"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLUSTER_DISTRIBUTED
 "hbase.cluster.distributed"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLUSTER_ID_DEFAULT
 "default-cluster"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLUSTER_ID_FILE_NAME
 "hbase.id"
 
-
+
 
 
 publicstaticfinalboolean
 CLUSTER_IS_DISTRIBUTED
 true
 
-
+
 
 
 publicstaticfinalboolean
 CLUSTER_IS_LOCAL
 false
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 COMPACTION_KV_MAX
 "hbase.hstore.compaction.kv.max"
 
-
+
 
 
 publicstaticfinalint
 COMPACTION_KV_MAX_DEFAULT
 10
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CONFIGURATION
 "CONFIGURATION"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CORRUPT_DIR_NAME
 "corrupt"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_INCLUSION_KEY
 "hbase.coprocessor.classloader.included.classes"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
 "[^=,]+"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN
 "[^,]+"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_ALTERNATE_KEY_ALGORITHM_CONF_KEY
 "hbase.crypto.alternate.key.algorithm"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_CIPHERPROVIDER_CONF_KEY
 "hbase.crypto.cipherprovider"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_KEY_ALGORITHM_CONF_KEY
 "hbase.crypto.key.algorithm"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_KEYPROVIDER_CONF_KEY
 "hbase.crypto.keyprovider"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_KEYPROVIDER_PARAMETERS_KEY
 "hbase.crypto.keyprovider.parameters"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_MASTERKEY_ALTERNATE_NAME_CONF_KEY
 "hbase.crypto.master.alternate.key.name"
 
-
+
 
 
 

[08/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/ByteBufferedCell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ByteBufferedCell.html 
b/devapidocs/org/apache/hadoop/hbase/ByteBufferedCell.html
index 192136d..a1bd0d3 100644
--- a/devapidocs/org/apache/hadoop/hbase/ByteBufferedCell.html
+++ b/devapidocs/org/apache/hadoop/hbase/ByteBufferedCell.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 Direct Known Subclasses:
-BufferedDataBlockEncoder.OffheapDecodedCell,
 ByteBufferedKeyOnlyKeyValue, CellUtil.EmptyByteBufferedCell, OffheapKeyValue, PrefixTreeCell, Pre
 fixTreeSeeker.OffheapPrefixTreeCell
+BufferedDataBlockEncoder.OffheapDecodedCell,
 ByteBufferedKeyOnlyKeyValue, CellUtil.EmptyByteBufferedCell, KeyOnlyFilter.KeyOnlyByteBufferedCell, OffheapKeyValue, P
 refixTreeCell, PrefixTreeSeeker.OffheapPrefixTreeCell
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/Cell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/Cell.html 
b/devapidocs/org/apache/hadoop/hbase/Cell.html
index e1c2ccb..d719a38 100644
--- a/devapidocs/org/apache/hadoop/hbase/Cell.html
+++ b/devapidocs/org/apache/hadoop/hbase/Cell.html
@@ -100,8 +100,12 @@ var activeTableTab = "activeTableTab";
 
 
 
+All Known Subinterfaces:
+ExtendedCell
+
+
 All Known Implementing Classes:
-BufferedDataBlockEncoder.OffheapDecodedCell,
 BufferedDataBlockEncoder.OnheapDecodedCell,
 ByteBufferedCell, ByteBufferedKeyOnlyKeyValue, CellUtil.EmptyByteBufferedCell, CellUtil.EmptyCell,
  CellUtil.FirstOnRowByteBufferedCell, CellUtil.FirstOnRowCell, CellUtil.FirstOnRowColByteBufferedCell, CellUtil.FirstOnRowColCell, CellUtil.FirstOnRowColTSByteBufferedCell, CellUtil.FirstOnRowColTSCell, CellUtil.FirstOnRowDeleteFamilyCell, CellUtil.LastOnRowByteBufferedCell, CellUtil.LastOnRowCell, CellUtil.LastOnRowColByteBufferedCell, CellUtil.LastOnRowColCell, CellUtil.ShareableMemoryTagRewriteCell, CellUtil.TagRewriteCell, KeyValue, KeyValue.KeyOnlyKeyValue, KeyValueCodec.ByteBufferedKeyValueDecoder.ShareableMemoryKeyValue,
 KeyValueCodec.ByteBufferedKeyValueDecoder.ShareableMemoryNoTagsKeyValue,
 NoTagsKeyValue, OffheapKeyValue, PrefixTreeArrayReversibleScanner,
 PrefixTreeArrayScanner, PrefixTreeArraySearcher, 
PrefixTreeCell, PrefixTreeSeeker.OffheapPrefixTreeCell,
 PrefixTreeSeeker.OnheapPrefixTreeCell,
 RedundantKVGenerator.ExtendedOffheapKeyValue,
 SizeCachedKeyValue, SizeCachedNoTagsKeyValue
+BufferedDataBlockEncoder.OffheapDecodedCell,
 BufferedDataBlockEncoder.OnheapDecodedCell,
 ByteBufferedCell, ByteBufferedKeyOnlyKeyValue, CellUtil.EmptyByteBufferedCell, CellUtil.EmptyCell,
  CellUtil.FirstOnRowByteBufferedCell, CellUtil.FirstOnRowCell, CellUtil.FirstOnRowColByteBufferedCell, CellUtil.FirstOnRowColCell, CellUtil.FirstOnRowColTSByteBufferedCell, CellUtil.FirstOnRowColTSCell, CellUtil.FirstOnRowDeleteFamilyCell, CellUtil.LastOnRowByteBufferedCell, CellUtil.LastOnRowCell, CellUtil.LastOnRowColByteBufferedCell, CellUtil.LastOnRowColCell, CellUtil.ShareableMemoryTagRewriteCell, CellUtil.TagRewriteCell, KeyOnlyFilter.KeyOnlyByteBufferedCell, KeyOnlyFilter.KeyOnlyCell, 
KeyValue, KeyValue.KeyOnlyKeyValue, KeyValueCodec.ByteBufferedKeyValueDecoder.ShareableMemoryKeyValue,
 KeyValueCodec.ByteBufferedKeyValueDecoder.ShareableMemoryNoTagsKeyValue,
 NoTagsKeyValue, OffheapKeyValue, PrefixTreeArrayReversibleScanner,
 PrefixTreeArrayScanner, PrefixTreeArraySearcher, PrefixTreeCell, PrefixTreeSeeker.OffheapPrefixTreeCell,
 PrefixTreeSeeker.OnheapPrefixTreeCell,
 RedundantKVGenerator.ExtendedOffheapKeyValue,
 SizeCachedKeyValue,
  SizeCachedNoTagsKeyValue
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/CellUtil.EmptyByteBufferedCell.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/CellUtil.EmptyByteBufferedCell.html 
b/devapidocs/org/apache/hadoop/hbase/CellUtil.EmptyByteBufferedCell.html
index 1326054..d7e71be 100644
--- a/devapidocs/org/apache/hadoop/hbase/CellUtil.EmptyByteBufferedCell.html
+++ b/devapidocs/org/apache/hadoop/hbase/CellUtil.EmptyByteBufferedCell.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-private abstract static class CellUtil.EmptyByteBufferedCell
+private abstract static class CellUtil.EmptyByteBufferedCell
 extends ByteBufferedCell
 implements SettableSequenceId
 
@@ -323,7 +323,7 @@ implements 
 
 EmptyByteBufferedCell
-privateEmptyByteBufferedCell()
+privateEmptyByteBufferedCell()
 
 
 
@@ -340,7 +340,7 @@ implements 
 
 setSequenceId
-publicvoidsetSequenceId(longseqId)
+publicvoidsetSequenceId(longseqId)
 Description copied from 
interface:SettableSequenceId
 Sets with the given seqId.
 
@@ -355,7 

[11/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/constant-values.html
--
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index fe0c8a0..a60320f 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -732,1923 +732,1923 @@
 "hbase.bulkload.retries.number"
 
 
+
+
+publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+BULKLOAD_STAGING_DIR_NAME
+"staging"
+
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 BYTES_PER_CHECKSUM
 "hbase.hstore.bytes.per.checksum"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CATALOG_FAMILY_STR
 "info"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CHECKSUM_TYPE_NAME
 "hbase.hstore.checksum.algorithm"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CIPHER_AES
 "AES"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLIENT_PORT_STR
 "clientPort"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLUSTER_DISTRIBUTED
 "hbase.cluster.distributed"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLUSTER_ID_DEFAULT
 "default-cluster"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CLUSTER_ID_FILE_NAME
 "hbase.id"
 
-
+
 
 
 publicstaticfinalboolean
 CLUSTER_IS_DISTRIBUTED
 true
 
-
+
 
 
 publicstaticfinalboolean
 CLUSTER_IS_LOCAL
 false
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 COMPACTION_KV_MAX
 "hbase.hstore.compaction.kv.max"
 
-
+
 
 
 publicstaticfinalint
 COMPACTION_KV_MAX_DEFAULT
 10
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CONFIGURATION
 "CONFIGURATION"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CORRUPT_DIR_NAME
 "corrupt"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_INCLUSION_KEY
 "hbase.coprocessor.classloader.included.classes"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
 "[^=,]+"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN
 "[^,]+"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_ALTERNATE_KEY_ALGORITHM_CONF_KEY
 "hbase.crypto.alternate.key.algorithm"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_CIPHERPROVIDER_CONF_KEY
 "hbase.crypto.cipherprovider"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_KEY_ALGORITHM_CONF_KEY
 "hbase.crypto.key.algorithm"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_KEYPROVIDER_CONF_KEY
 "hbase.crypto.keyprovider"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_KEYPROVIDER_PARAMETERS_KEY
 "hbase.crypto.keyprovider.parameters"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_MASTERKEY_ALTERNATE_NAME_CONF_KEY
 "hbase.crypto.master.alternate.key.name"
 
-
+
 
 
 

[44/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/DoNotRetryIOException.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/DoNotRetryIOException.html 
b/apidocs/org/apache/hadoop/hbase/DoNotRetryIOException.html
index 05d9d67..9e0f7cd 100644
--- a/apidocs/org/apache/hadoop/hbase/DoNotRetryIOException.html
+++ b/apidocs/org/apache/hadoop/hbase/DoNotRetryIOException.html
@@ -127,7 +127,7 @@
 
 
 Direct Known Subclasses:
-AccessDeniedException, CoprocessorException, 
DoNotRetryRegionException, 
FailedSanityCheckException, FatalConnectionException, HBaseSnapshotException, 
InvalidFamilyOperationException, LeaseException, LockTimeoutException, 
NamespaceExistException, NamespaceNotFoundException, NoSuchColumnFamilyException, NotAllMetaRegionsOnlineException, OperationConflictException, QuotaExceededException, ScannerTimeoutException, ServerTooBusyException, TableExistsException, Tab
 leNotDisabledException, TableNotEnabledException, TableNotFoundException, UnknownProtocolException, UnknownScannerException
+AccessDeniedException, CoprocessorException, 
DoNotRetryRegionException, 
FailedSanityCheckException, FatalConnectionException, HBaseSnapshotException, 
InvalidFamilyOperationException, LeaseException, LockTimeoutException, 
NamespaceExistException, NamespaceNotFoundException, NoSuchColumnFamilyException, NotAllMetaRegionsOnlineException, OperationConflictException, QuotaExceededException, ScannerResetException, 
ScannerTimeoutException, ServerTooBusyException, TableExistsException, TableNotDisabledException, TableNotEnabledException, TableNotFoundException, UnknownProtocolException, UnknownScannerException
 
 
 



[46/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/deprecated-list.html
--
diff --git a/apidocs/deprecated-list.html b/apidocs/deprecated-list.html
index cc25efd..5d7de15 100644
--- a/apidocs/deprecated-list.html
+++ b/apidocs/deprecated-list.html
@@ -92,11 +92,16 @@
 
 
 
+org.apache.hadoop.hbase.util.Counter
+use http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">LongAdder instead.
+
+
+
 org.apache.hadoop.hbase.filter.FirstKeyValueMatchingQualifiersFilter
 Deprecated in 2.0. See 
HBASE-13347
 
 
-
+
 org.apache.hadoop.hbase.mapreduce.HLogInputFormat
 use WALInputFormat.  Remove in 
hadoop 3.0
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/index-all.html
--
diff --git a/apidocs/index-all.html b/apidocs/index-all.html
index 360aa33..47fe0bd 100644
--- a/apidocs/index-all.html
+++ b/apidocs/index-all.html
@@ -151,7 +151,9 @@
 add(byte[][])
 - Static method in class org.apache.hadoop.hbase.util.Bytes
 
 add(long) 
- Method in class org.apache.hadoop.hbase.util.Counter
-
+
+Deprecated.
+
 add(long,
 long) - Method in class org.apache.hadoop.hbase.util.FastLongHistogram
 
 Adds a value to the histogram.
@@ -394,6 +396,8 @@
 
 Define for 'return-all-versions'.
 
+ALWAYS_COPY_FILES
 - Static variable in class org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles
+
 AND - 
Static variable in class org.apache.hadoop.hbase.filter.ParseConstants
 
 AND Byte Array
@@ -720,7 +724,11 @@
 
 Maximum time to retry for a failed bulk load request
 
-bulkLoadPhase(Table,
 Connection, ExecutorService, DequeLoadIncrementalHFiles.LoadQueueItem, 
MultimapByteBuffer, LoadIncrementalHFiles.LoadQueueItem) - 
Method in class org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles
+BULKLOAD_STAGING_DIR_NAME
 - Static variable in class org.apache.hadoop.hbase.HConstants
+
+Staging dir used by bulk load
+
+bulkLoadPhase(Table,
 Connection, ExecutorService, DequeLoadIncrementalHFiles.LoadQueueItem, 
MultimapByteBuffer, LoadIncrementalHFiles.LoadQueueItem, 
boolean) - Method in class org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles
 
 This takes the LQI's grouped by likely regions and attempts 
to bulk load
  them.
@@ -1822,12 +1830,18 @@
 
 Counter - 
Class in org.apache.hadoop.hbase.util
 
-High scalable counter.
+Deprecated.
+use http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">LongAdder instead.
+
 
 Counter() 
- Constructor for class org.apache.hadoop.hbase.util.Counter
-
+
+Deprecated.
+
 Counter(long)
 - Constructor for class org.apache.hadoop.hbase.util.Counter
-
+
+Deprecated.
+
 CP_HTD_ATTR_INCLUSION_KEY
 - Static variable in class org.apache.hadoop.hbase.HConstants
 
 CP_HTD_ATTR_KEY_PATTERN
 - Static variable in class org.apache.hadoop.hbase.HConstants
@@ -2055,6 +2069,12 @@
 
 createFirstOnRow(byte[],
 int, short) - Static method in class org.apache.hadoop.hbase.CellUtil
 
+createFirstOnRow(byte[],
 byte[], byte[]) - Static method in class org.apache.hadoop.hbase.CellUtil
+
+createFirstOnRow(byte[],
 int, short, byte[], int, byte, byte[], int, int) - Static method in 
class org.apache.hadoop.hbase.CellUtil
+
+createFirstOnRow(byte[])
 - Static method in class org.apache.hadoop.hbase.CellUtil
+
 createFirstOnRowCol(Cell)
 - Static method in class org.apache.hadoop.hbase.CellUtil
 
 Create a Cell that is smaller than all other possible Cells 
for the given Cell's row.
@@ -2084,6 +2104,8 @@
 
 Create a Cell that is larger than all other possible Cells 
for the given Cell's row.
 
+createLastOnRow(byte[])
 - Static method in class org.apache.hadoop.hbase.CellUtil
+
 createLastOnRowCol(Cell)
 - Static method in class org.apache.hadoop.hbase.CellUtil
 
 Create a Cell that is larger than all other possible Cells 
for the given Cell's rk:cf:q.
@@ -2509,7 +2531,9 @@
 Attempts to decode Base64 data and deserialize a Java 
Object within.
 
 decrement()
 - Method in class org.apache.hadoop.hbase.util.Counter
-
+
+Deprecated.
+
 decrypt(byte[],
 int, InputStream, int, Decryptor) - Static method in class 
org.apache.hadoop.hbase.io.crypto.Encryption
 
 Decrypt a block of ciphertext read in from a stream with 
the given
@@ -3110,7 +3134,9 @@
 Takes a series of bytes and tries to generate an 
ForeignException instance for it.
 
 destroy() 
- Method in class org.apache.hadoop.hbase.util.Counter
-
+
+Deprecated.
+
 DFS_REPLICATION
 - Static variable in class org.apache.hadoop.hbase.HColumnDescriptor
 
 disablePeer(String)
 - Method in class org.apache.hadoop.hbase.client.replication.ReplicationAdmin
@@ -3148,7 +3174,12 @@
 Perform a bulk load of the given directory into the given
  pre-existing table.
 
-doBulkLoad(Path,
 Admin, 

[39/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Connection.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
index bbc75bb..106f0d4 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
@@ -206,11 +206,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 protected void
-LoadIncrementalHFiles.bulkLoadPhase(Tabletable,
+LoadIncrementalHFiles.bulkLoadPhase(Tabletable,
  Connectionconn,
  http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ExecutorServicepool,
  http://docs.oracle.com/javase/8/docs/api/java/util/Deque.html?is-external=true;
 title="class or interface in 
java.util">Dequeorg.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItemqueue,
- com.google.common.collect.Multimaphttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in 
java.nio">ByteBuffer,org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItemregionGroups)
+ com.google.common.collect.Multimaphttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in 
java.nio">ByteBuffer,org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItemregionGroups,
+ booleancopyFile)
 This takes the LQI's grouped by likely regions and attempts 
to bulk load
  them.
 
@@ -232,11 +233,22 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
+void
+LoadIncrementalHFiles.loadHFileQueue(Tabletable,
+  Connectionconn,
+  http://docs.oracle.com/javase/8/docs/api/java/util/Deque.html?is-external=true;
 title="class or interface in 
java.util">Dequeorg.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItemqueue,
+  Pairbyte[][],byte[][]startEndKeys,
+  booleancopyFile)
+Used by the replication sink to load the hfiles from the 
source cluster.
+
+
+
 protected http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItem
-LoadIncrementalHFiles.tryAtomicRegionLoad(Connectionconn,
+LoadIncrementalHFiles.tryAtomicRegionLoad(Connectionconn,
TableNametableName,
byte[]first,
-   http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in 
java.util">Collectionorg.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItemlqis)
+   http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in 
java.util">Collectionorg.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItemlqis,
+   booleancopyFile)
 Attempts to do an atomic load of many hfiles into a 
region.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
index 06be9bb..1edee8e 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
@@ -146,19 +146,19 @@ the order they are declared.
 
 
 
+Query
+Query.setConsistency(Consistencyconsistency)
+Sets the consistency level for this operation
+
+
+
 Get
 Get.setConsistency(Consistencyconsistency)
 
-
+
 Scan
 Scan.setConsistency(Consistencyconsistency)
 
-
-Query
-Query.setConsistency(Consistencyconsistency)
-Sets the consistency level for this operation
-
-
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
index 52f0603..27ee7c1 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
@@ -203,8 +203,8 @@ the order they are declared.
 Put.setDurability(Durabilityd)
 
 
-Append
-Append.setDurability(Durabilityd)
+Increment
+Increment.setDurability(Durabilityd)
 
 
 Mutation
@@ -213,8 +213,8 @@ the 

[01/52] [partial] hbase-site git commit: Published site at 63808a224c8689d07e55f90efd25f9597b0d04dd.

2016-09-29 Thread dimaspivak
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site e3ab1d1d0 -> a16440acb


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/044b3379/devapidocs/org/apache/hadoop/hbase/KeyValue.RowOnlyComparator.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/KeyValue.RowOnlyComparator.html 
b/devapidocs/org/apache/hadoop/hbase/KeyValue.RowOnlyComparator.html
index 36c1f51..9bd9af2 100644
--- a/devapidocs/org/apache/hadoop/hbase/KeyValue.RowOnlyComparator.html
+++ b/devapidocs/org/apache/hadoop/hbase/KeyValue.RowOnlyComparator.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class KeyValue.RowOnlyComparator
+public static class KeyValue.RowOnlyComparator
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">ComparatorKeyValue
 Comparator that compares row component only of a 
KeyValue.
@@ -215,7 +215,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/Comparato
 
 
 comparator
-finalKeyValue.KVComparator comparator
+finalKeyValue.KVComparator comparator
 
 
 
@@ -232,7 +232,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/Comparato
 
 
 RowOnlyComparator
-publicRowOnlyComparator(KeyValue.KVComparatorc)
+publicRowOnlyComparator(KeyValue.KVComparatorc)
 
 
 
@@ -249,7 +249,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/Comparato
 
 
 compare
-publicintcompare(KeyValueleft,
+publicintcompare(KeyValueleft,
KeyValueright)
 
 Specified by:



hbase git commit: HBASE-15984 Handle premature EOF treatment of WALs in replication.

2016-09-29 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 e0066e713 -> 39a79d50f


HBASE-15984 Handle premature EOF treatment of WALs in replication.

In some particular deployments, the Replication code believes it has
reached EOF for a WAL prior to succesfully parsing all bytes known to
exist in a cleanly closed file.

Consistently this failure happens due to an InvalidProtobufException
after some number of seeks during our attempts to tail the in-progress
RegionServer WAL. As a work-around, this patch treats cleanly closed
files differently than other execution paths. If an EOF is detected due
to parsing or other errors while there are still unparsed bytes before
the end-of-file trailer, we now reset the WAL to the very beginning and
attempt a clean read-through.

In current testing, a single such reset is sufficient to work around
observed dataloss. However, the above change will retry a given WAL file
indefinitely. On each such attempt, a log message like the below will
be emitted at the WARN level:

  Processing end of WAL file '{}'. At position {}, which is too far away
  from reported file length {}. Restarting WAL reading (see HBASE-15983
  for details).

Additionally, this patch adds some additional log detail at the TRACE
level about file offsets seen while handling recoverable errors. It also
add metrics that measure the use of this recovery mechanism.

 Conflicts:

hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java

hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSourceImpl.java

hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/39a79d50
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/39a79d50
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/39a79d50

Branch: refs/heads/branch-1.3
Commit: 39a79d50f1bde8ec54e08e7c249ba07562a30f63
Parents: e0066e7
Author: Sean Busbey 
Authored: Tue Jun 7 16:00:46 2016 -0500
Committer: Sean Busbey 
Committed: Thu Sep 29 13:01:43 2016 -0500

--
 .../MetricsReplicationSourceSource.java | 17 
 .../MetricsReplicationGlobalSourceSource.java   | 45 +++
 .../MetricsReplicationSourceSourceImpl.java | 81 
 .../regionserver/wal/ProtobufLogReader.java | 45 +--
 .../replication/regionserver/MetricsSource.java | 35 +
 .../regionserver/ReplicationSource.java | 39 --
 .../ReplicationWALReaderManager.java| 10 +++
 src/main/asciidoc/_chapters/ops_mgt.adoc| 24 +-
 8 files changed, 280 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/39a79d50/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
index 271f0ac..1ed5a6b 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
@@ -38,6 +38,16 @@ public interface MetricsReplicationSourceSource {
   public static final String SOURCE_SHIPPED_HFILES = "source.shippedHFiles";
   public static final String SOURCE_SIZE_OF_HFILE_REFS_QUEUE = 
"source.sizeOfHFileRefsQueue";
 
+  public static final String SOURCE_CLOSED_LOGS_WITH_UNKNOWN_LENGTH =
+  "source.closedLogsWithUnknownFileLength";
+  public static final String SOURCE_UNCLEANLY_CLOSED_LOGS = 
"source.uncleanlyClosedLogs";
+  public static final String SOURCE_UNCLEANLY_CLOSED_IGNORED_IN_BYTES =
+  "source.ignoredUncleanlyClosedLogContentsInBytes";
+  public static final String SOURCE_RESTARTED_LOG_READING = 
"source.restartedLogReading";
+  public static final String SOURCE_REPEATED_LOG_FILE_BYTES = 
"source.repeatedLogFileBytes";
+  public static final String SOURCE_COMPLETED_LOGS = "source.completedLogs";
+  public static final String SOURCE_COMPLETED_RECOVERY_QUEUES = 
"source.completedRecoverQueues";
+
   void setLastShippedAge(long age);
   void incrSizeOfLogQueue(int size);
   void decrSizeOfLogQueue(int size);
@@ -53,4 +63,11 @@ public interface MetricsReplicationSourceSource {
   void incrHFilesShipped(long hfiles);
   void 

[20/51] [partial] hbase git commit: HBASE-16264 Figure how to deal with endpoints and shaded pb Shade our protobufs. Do it in a manner that makes it so we can still have in our API references to com.g

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ComparatorProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ComparatorProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ComparatorProtos.java
new file mode 100644
index 000..aa8e31c
--- /dev/null
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ComparatorProtos.java
@@ -0,0 +1,5419 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: Comparator.proto
+
+package org.apache.hadoop.hbase.shaded.protobuf.generated;
+
+public final class ComparatorProtos {
+  private ComparatorProtos() {}
+  public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistry registry) {
+  }
+  public interface ComparatorOrBuilder
+  extends com.google.protobuf.MessageOrBuilder {
+
+// required string name = 1;
+/**
+ * required string name = 1;
+ */
+boolean hasName();
+/**
+ * required string name = 1;
+ */
+java.lang.String getName();
+/**
+ * required string name = 1;
+ */
+com.google.protobuf.ByteString
+getNameBytes();
+
+// optional bytes serialized_comparator = 2;
+/**
+ * optional bytes serialized_comparator = 2;
+ */
+boolean hasSerializedComparator();
+/**
+ * optional bytes serialized_comparator = 2;
+ */
+com.google.protobuf.ByteString getSerializedComparator();
+  }
+  /**
+   * Protobuf type {@code hbase.pb.Comparator}
+   */
+  public static final class Comparator extends
+  com.google.protobuf.GeneratedMessage
+  implements ComparatorOrBuilder {
+// Use Comparator.newBuilder() to construct.
+private Comparator(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+  super(builder);
+  this.unknownFields = builder.getUnknownFields();
+}
+private Comparator(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+private static final Comparator defaultInstance;
+public static Comparator getDefaultInstance() {
+  return defaultInstance;
+}
+
+public Comparator getDefaultInstanceForType() {
+  return defaultInstance;
+}
+
+private final com.google.protobuf.UnknownFieldSet unknownFields;
+@java.lang.Override
+public final com.google.protobuf.UnknownFieldSet
+getUnknownFields() {
+  return this.unknownFields;
+}
+private Comparator(
+com.google.protobuf.CodedInputStream input,
+com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+throws com.google.protobuf.InvalidProtocolBufferException {
+  initFields();
+  int mutable_bitField0_ = 0;
+  com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+  com.google.protobuf.UnknownFieldSet.newBuilder();
+  try {
+boolean done = false;
+while (!done) {
+  int tag = input.readTag();
+  switch (tag) {
+case 0:
+  done = true;
+  break;
+default: {
+  if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+done = true;
+  }
+  break;
+}
+case 10: {
+  bitField0_ |= 0x0001;
+  name_ = input.readBytes();
+  break;
+}
+case 18: {
+  bitField0_ |= 0x0002;
+  serializedComparator_ = input.readBytes();
+  break;
+}
+  }
+}
+  } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+throw e.setUnfinishedMessage(this);
+  } catch (java.io.IOException e) {
+throw new com.google.protobuf.InvalidProtocolBufferException(
+e.getMessage()).setUnfinishedMessage(this);
+  } finally {
+this.unknownFields = unknownFields.build();
+makeExtensionsImmutable();
+  }
+}
+public static final com.google.protobuf.Descriptors.Descriptor
+getDescriptor() {
+  return 
org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_Comparator_descriptor;
+}
+
+protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+internalGetFieldAccessorTable() {
+  return 
org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_Comparator_fieldAccessorTable
+  .ensureFieldAccessorsInitialized(
+  
org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.class,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.Builder.class);
+ 

[38/51] [partial] hbase git commit: HBASE-16264 Figure how to deal with endpoints and shaded pb Shade our protobufs. Do it in a manner that makes it so we can still have in our API references to com.g

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationProtos.java
--
diff --git 
a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationProtos.java
 
b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationProtos.java
new file mode 100644
index 000..61b47ff
--- /dev/null
+++ 
b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationProtos.java
@@ -0,0 +1,1277 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: ColumnAggregationProtocol.proto
+
+package org.apache.hadoop.hbase.coprocessor.protobuf.generated;
+
+public final class ColumnAggregationProtos {
+  private ColumnAggregationProtos() {}
+  public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistry registry) {
+  }
+  public interface SumRequestOrBuilder
+  extends com.google.protobuf.MessageOrBuilder {
+
+// required bytes family = 1;
+/**
+ * required bytes family = 1;
+ */
+boolean hasFamily();
+/**
+ * required bytes family = 1;
+ */
+com.google.protobuf.ByteString getFamily();
+
+// optional bytes qualifier = 2;
+/**
+ * optional bytes qualifier = 2;
+ */
+boolean hasQualifier();
+/**
+ * optional bytes qualifier = 2;
+ */
+com.google.protobuf.ByteString getQualifier();
+  }
+  /**
+   * Protobuf type {@code SumRequest}
+   */
+  public static final class SumRequest extends
+  com.google.protobuf.GeneratedMessage
+  implements SumRequestOrBuilder {
+// Use SumRequest.newBuilder() to construct.
+private SumRequest(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+  super(builder);
+  this.unknownFields = builder.getUnknownFields();
+}
+private SumRequest(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+private static final SumRequest defaultInstance;
+public static SumRequest getDefaultInstance() {
+  return defaultInstance;
+}
+
+public SumRequest getDefaultInstanceForType() {
+  return defaultInstance;
+}
+
+private final com.google.protobuf.UnknownFieldSet unknownFields;
+@java.lang.Override
+public final com.google.protobuf.UnknownFieldSet
+getUnknownFields() {
+  return this.unknownFields;
+}
+private SumRequest(
+com.google.protobuf.CodedInputStream input,
+com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+throws com.google.protobuf.InvalidProtocolBufferException {
+  initFields();
+  int mutable_bitField0_ = 0;
+  com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+  com.google.protobuf.UnknownFieldSet.newBuilder();
+  try {
+boolean done = false;
+while (!done) {
+  int tag = input.readTag();
+  switch (tag) {
+case 0:
+  done = true;
+  break;
+default: {
+  if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+done = true;
+  }
+  break;
+}
+case 10: {
+  bitField0_ |= 0x0001;
+  family_ = input.readBytes();
+  break;
+}
+case 18: {
+  bitField0_ |= 0x0002;
+  qualifier_ = input.readBytes();
+  break;
+}
+  }
+}
+  } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+throw e.setUnfinishedMessage(this);
+  } catch (java.io.IOException e) {
+throw new com.google.protobuf.InvalidProtocolBufferException(
+e.getMessage()).setUnfinishedMessage(this);
+  } finally {
+this.unknownFields = unknownFields.build();
+makeExtensionsImmutable();
+  }
+}
+public static final com.google.protobuf.Descriptors.Descriptor
+getDescriptor() {
+  return 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_descriptor;
+}
+
+protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+internalGetFieldAccessorTable() {
+  return 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_fieldAccessorTable
+  .ensureFieldAccessorsInitialized(
+  
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.class,
 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.Builder.class);
+}
+
+public static com.google.protobuf.Parser PARSER =
+new 

[29/51] [partial] hbase git commit: HBASE-16264 Figure how to deal with endpoints and shaded pb Shade our protobufs. Do it in a manner that makes it so we can still have in our API references to com.g

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java
--
diff --git 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java
 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java
new file mode 100644
index 000..7ef9b9c
--- /dev/null
+++ 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java
@@ -0,0 +1,179 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicLong;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.MultithreadedTestUtil.RepeatingTestThread;
+import org.apache.hadoop.hbase.MultithreadedTestUtil.TestContext;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.ClientServiceCallable;
+import org.apache.hadoop.hbase.client.ClusterConnection;
+import org.apache.hadoop.hbase.client.RpcRetryingCaller;
+import org.apache.hadoop.hbase.client.RpcRetryingCallerFactory;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
+import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
+import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.RegionServerTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Pair;
+import org.junit.BeforeClass;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import com.google.common.collect.Lists;
+
+/**
+ * Tests bulk loading of HFiles with old secure Endpoint client for backward 
compatibility. Will be
+ * removed when old non-secure client for backward compatibility is not 
supported.
+ */
+@RunWith(Parameterized.class)
+@Category({RegionServerTests.class, LargeTests.class})
+public class TestHRegionServerBulkLoadWithOldSecureEndpoint extends 
TestHRegionServerBulkLoad {
+  public TestHRegionServerBulkLoadWithOldSecureEndpoint(int duration) {
+super(duration);
+  }
+
+  private static final Log LOG =
+  LogFactory.getLog(TestHRegionServerBulkLoadWithOldSecureEndpoint.class);
+
+  @BeforeClass
+  public static void setUpBeforeClass() throws IOException {
+conf.setInt("hbase.rpc.timeout", 10 * 1000);
+conf.set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
+  "org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint");
+  }
+
+  public static class AtomicHFileLoader extends RepeatingTestThread {
+final AtomicLong numBulkLoads = new AtomicLong();
+final AtomicLong numCompactions = new AtomicLong();
+private TableName tableName;
+
+public AtomicHFileLoader(TableName tableName, TestContext ctx,
+byte targetFamilies[][]) throws IOException {
+  super(ctx);
+  this.tableName = tableName;
+}
+
+public void doAnAction() throws Exception {
+  long iteration = numBulkLoads.getAndIncrement();
+  Path dir =  UTIL.getDataTestDirOnTestFS(String.format("bulkLoad_%08d",
+  iteration));
+
+  // create HFiles for different column families
+  FileSystem fs = UTIL.getTestFileSystem();
+  byte[] val = Bytes.toBytes(String.format("%010d", iteration));
+  final List> famPaths = new ArrayList>(
+  NUM_CFS);
+  for (int i = 0; i < NUM_CFS; i++) {
+Path hfile = new Path(dir, family(i));
+byte[] fam = 

[11/51] [partial] hbase git commit: HBASE-16264 Figure how to deal with endpoints and shaded pb Shade our protobufs. Do it in a manner that makes it so we can still have in our API references to com.g

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java
new file mode 100644
index 000..a758109
--- /dev/null
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java
@@ -0,0 +1,22424 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: MasterProcedure.proto
+
+package org.apache.hadoop.hbase.shaded.protobuf.generated;
+
+public final class MasterProcedureProtos {
+  private MasterProcedureProtos() {}
+  public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistry registry) {
+  }
+  /**
+   * Protobuf enum {@code hbase.pb.CreateTableState}
+   */
+  public enum CreateTableState
+  implements com.google.protobuf.ProtocolMessageEnum {
+/**
+ * CREATE_TABLE_PRE_OPERATION = 1;
+ */
+CREATE_TABLE_PRE_OPERATION(0, 1),
+/**
+ * CREATE_TABLE_WRITE_FS_LAYOUT = 2;
+ */
+CREATE_TABLE_WRITE_FS_LAYOUT(1, 2),
+/**
+ * CREATE_TABLE_ADD_TO_META = 3;
+ */
+CREATE_TABLE_ADD_TO_META(2, 3),
+/**
+ * CREATE_TABLE_ASSIGN_REGIONS = 4;
+ */
+CREATE_TABLE_ASSIGN_REGIONS(3, 4),
+/**
+ * CREATE_TABLE_UPDATE_DESC_CACHE = 5;
+ */
+CREATE_TABLE_UPDATE_DESC_CACHE(4, 5),
+/**
+ * CREATE_TABLE_POST_OPERATION = 6;
+ */
+CREATE_TABLE_POST_OPERATION(5, 6),
+;
+
+/**
+ * CREATE_TABLE_PRE_OPERATION = 1;
+ */
+public static final int CREATE_TABLE_PRE_OPERATION_VALUE = 1;
+/**
+ * CREATE_TABLE_WRITE_FS_LAYOUT = 2;
+ */
+public static final int CREATE_TABLE_WRITE_FS_LAYOUT_VALUE = 2;
+/**
+ * CREATE_TABLE_ADD_TO_META = 3;
+ */
+public static final int CREATE_TABLE_ADD_TO_META_VALUE = 3;
+/**
+ * CREATE_TABLE_ASSIGN_REGIONS = 4;
+ */
+public static final int CREATE_TABLE_ASSIGN_REGIONS_VALUE = 4;
+/**
+ * CREATE_TABLE_UPDATE_DESC_CACHE = 5;
+ */
+public static final int CREATE_TABLE_UPDATE_DESC_CACHE_VALUE = 5;
+/**
+ * CREATE_TABLE_POST_OPERATION = 6;
+ */
+public static final int CREATE_TABLE_POST_OPERATION_VALUE = 6;
+
+
+public final int getNumber() { return value; }
+
+public static CreateTableState valueOf(int value) {
+  switch (value) {
+case 1: return CREATE_TABLE_PRE_OPERATION;
+case 2: return CREATE_TABLE_WRITE_FS_LAYOUT;
+case 3: return CREATE_TABLE_ADD_TO_META;
+case 4: return CREATE_TABLE_ASSIGN_REGIONS;
+case 5: return CREATE_TABLE_UPDATE_DESC_CACHE;
+case 6: return CREATE_TABLE_POST_OPERATION;
+default: return null;
+  }
+}
+
+public static com.google.protobuf.Internal.EnumLiteMap
+internalGetValueMap() {
+  return internalValueMap;
+}
+private static com.google.protobuf.Internal.EnumLiteMap
+internalValueMap =
+  new com.google.protobuf.Internal.EnumLiteMap() {
+public CreateTableState findValueByNumber(int number) {
+  return CreateTableState.valueOf(number);
+}
+  };
+
+public final com.google.protobuf.Descriptors.EnumValueDescriptor
+getValueDescriptor() {
+  return getDescriptor().getValues().get(index);
+}
+public final com.google.protobuf.Descriptors.EnumDescriptor
+getDescriptorForType() {
+  return getDescriptor();
+}
+public static final com.google.protobuf.Descriptors.EnumDescriptor
+getDescriptor() {
+  return 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.getDescriptor().getEnumTypes().get(0);
+}
+
+private static final CreateTableState[] VALUES = values();
+
+public static CreateTableState valueOf(
+com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+  if (desc.getType() != getDescriptor()) {
+throw new java.lang.IllegalArgumentException(
+  "EnumValueDescriptor is not for this type.");
+  }
+  return VALUES[desc.getIndex()];
+}
+
+private final int index;
+private final int value;
+
+private CreateTableState(int index, int value) {
+  this.index = index;
+  this.value = value;
+}
+
+// @@protoc_insertion_point(enum_scope:hbase.pb.CreateTableState)
+  }
+
+  /**
+   * Protobuf enum {@code hbase.pb.ModifyTableState}
+   */
+  public enum ModifyTableState
+  implements com.google.protobuf.ProtocolMessageEnum {
+/**
+ * MODIFY_TABLE_PREPARE = 1;
+ */
+MODIFY_TABLE_PREPARE(0, 1),
+/**
+ * MODIFY_TABLE_PRE_OPERATION = 2;
+ */
+

[41/51] [partial] hbase git commit: HBASE-16264 Figure how to deal with endpoints and shaded pb Shade our protobufs. Do it in a manner that makes it so we can still have in our API references to com.g

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
--
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
index 426b6a7..d0b6317 100644
--- 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
+++ 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
@@ -76,6 +76,14 @@ public class TestInterfaceAudienceAnnotations {
 }
   }
 
+  class ShadedProtobufClassFilter implements ClassFinder.ClassFilter {
+@Override
+public boolean isCandidateClass(Class c) {
+  return c.getPackage().getName().
+  contains("org.apache.hadoop.hbase.shaded.com.google.protobuf");
+}
+  }
+
   /** Selects classes with one of the {@link InterfaceAudience} annotation in 
their class
* declaration.
*/
@@ -273,6 +281,7 @@ public class TestInterfaceAudienceAnnotations {
   new And(new PublicClassFilter(),
   new Not(new TestClassFilter()),
   new Not(new GeneratedClassFilter()),
+  new Not(new ShadedProtobufClassFilter()),
   new Not(new IsInterfaceStabilityClassFilter()),
   new Not(new InterfaceAudienceAnnotatedClassFilter()),
   new Not(new CloverInstrumentationFilter()))
@@ -312,6 +321,7 @@ public class TestInterfaceAudienceAnnotations {
   new And(new PublicClassFilter(),
   new Not(new TestClassFilter()),
   new Not(new GeneratedClassFilter()),
+  new Not(new ShadedProtobufClassFilter()),
   new InterfaceAudiencePublicAnnotatedClassFilter(),
   new Not(new IsInterfaceStabilityClassFilter()),
   new Not(new InterfaceStabilityAnnotatedClassFilter()))
@@ -355,6 +365,7 @@ public class TestInterfaceAudienceAnnotations {
 new Not((FileNameFilter) new TestFileNameFilter()),
 new And(new PublicClassFilter(), new Not(new TestClassFilter()),
 new Not(new GeneratedClassFilter()),
+new Not(new ShadedProtobufClassFilter()),
 new InterfaceAudiencePublicAnnotatedClassFilter()));
 Set classes = classFinder.findClasses(false);
 return classes;

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientExponentialBackoff.java
--
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientExponentialBackoff.java
 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientExponentialBackoff.java
index 838c40e..d7aa2f0 100644
--- 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientExponentialBackoff.java
+++ 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientExponentialBackoff.java
@@ -21,8 +21,8 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.client.backoff.ExponentialClientBackoffPolicy;
 import org.apache.hadoop.hbase.client.backoff.ServerStatistics;
-import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.util.Bytes;

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
--
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
index 1ece448..df6de42 100644
--- 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
+++ 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
@@ -49,35 +49,35 @@ import org.apache.hadoop.hbase.RegionLocations;
 import org.apache.hadoop.hbase.RegionTooBusyException;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.protobuf.generated.CellProtos;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
-import 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse;
-import 

[35/51] [partial] hbase git commit: HBASE-16264 Figure how to deal with endpoints and shaded pb Shade our protobufs. Do it in a manner that makes it so we can still have in our API references to com.g

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/DummyRegionServerEndpointProtos.java
--
diff --git 
a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/DummyRegionServerEndpointProtos.java
 
b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/DummyRegionServerEndpointProtos.java
new file mode 100644
index 000..a011b30
--- /dev/null
+++ 
b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/DummyRegionServerEndpointProtos.java
@@ -0,0 +1,1225 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: DummyRegionServerEndpoint.proto
+
+package org.apache.hadoop.hbase.coprocessor.protobuf.generated;
+
+public final class DummyRegionServerEndpointProtos {
+  private DummyRegionServerEndpointProtos() {}
+  public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistry registry) {
+  }
+  public interface DummyRequestOrBuilder
+  extends com.google.protobuf.MessageOrBuilder {
+  }
+  /**
+   * Protobuf type {@code hbase.test.pb.DummyRequest}
+   */
+  public static final class DummyRequest extends
+  com.google.protobuf.GeneratedMessage
+  implements DummyRequestOrBuilder {
+// Use DummyRequest.newBuilder() to construct.
+private DummyRequest(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+  super(builder);
+  this.unknownFields = builder.getUnknownFields();
+}
+private DummyRequest(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+private static final DummyRequest defaultInstance;
+public static DummyRequest getDefaultInstance() {
+  return defaultInstance;
+}
+
+public DummyRequest getDefaultInstanceForType() {
+  return defaultInstance;
+}
+
+private final com.google.protobuf.UnknownFieldSet unknownFields;
+@java.lang.Override
+public final com.google.protobuf.UnknownFieldSet
+getUnknownFields() {
+  return this.unknownFields;
+}
+private DummyRequest(
+com.google.protobuf.CodedInputStream input,
+com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+throws com.google.protobuf.InvalidProtocolBufferException {
+  initFields();
+  com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+  com.google.protobuf.UnknownFieldSet.newBuilder();
+  try {
+boolean done = false;
+while (!done) {
+  int tag = input.readTag();
+  switch (tag) {
+case 0:
+  done = true;
+  break;
+default: {
+  if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+done = true;
+  }
+  break;
+}
+  }
+}
+  } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+throw e.setUnfinishedMessage(this);
+  } catch (java.io.IOException e) {
+throw new com.google.protobuf.InvalidProtocolBufferException(
+e.getMessage()).setUnfinishedMessage(this);
+  } finally {
+this.unknownFields = unknownFields.build();
+makeExtensionsImmutable();
+  }
+}
+public static final com.google.protobuf.Descriptors.Descriptor
+getDescriptor() {
+  return 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyRequest_descriptor;
+}
+
+protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+internalGetFieldAccessorTable() {
+  return 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyRequest_fieldAccessorTable
+  .ensureFieldAccessorsInitialized(
+  
org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.class,
 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.Builder.class);
+}
+
+public static com.google.protobuf.Parser PARSER =
+new com.google.protobuf.AbstractParser() {
+  public DummyRequest parsePartialFrom(
+  com.google.protobuf.CodedInputStream input,
+  com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+  throws com.google.protobuf.InvalidProtocolBufferException {
+return new DummyRequest(input, extensionRegistry);
+  }
+};
+
+@java.lang.Override
+public com.google.protobuf.Parser getParserForType() {
+  return PARSER;
+}
+
+private void initFields() {
+}
+private byte memoizedIsInitialized = -1;
+public final boolean isInitialized() {
+  byte isInitialized = 

[36/51] [partial] hbase git commit: HBASE-16264 Figure how to deal with endpoints and shaded pb Shade our protobufs. Do it in a manner that makes it so we can still have in our API references to com.g

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
--
diff --git 
a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
 
b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
new file mode 100644
index 000..b25f7aa
--- /dev/null
+++ 
b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java
@@ -0,0 +1,1283 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: ColumnAggregationNullResponseProtocol.proto
+
+package org.apache.hadoop.hbase.coprocessor.protobuf.generated;
+
+public final class ColumnAggregationWithNullResponseProtos {
+  private ColumnAggregationWithNullResponseProtos() {}
+  public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistry registry) {
+  }
+  public interface ColumnAggregationNullResponseSumRequestOrBuilder
+  extends com.google.protobuf.MessageOrBuilder {
+
+// required bytes family = 1;
+/**
+ * required bytes family = 1;
+ */
+boolean hasFamily();
+/**
+ * required bytes family = 1;
+ */
+com.google.protobuf.ByteString getFamily();
+
+// optional bytes qualifier = 2;
+/**
+ * optional bytes qualifier = 2;
+ */
+boolean hasQualifier();
+/**
+ * optional bytes qualifier = 2;
+ */
+com.google.protobuf.ByteString getQualifier();
+  }
+  /**
+   * Protobuf type {@code ColumnAggregationNullResponseSumRequest}
+   *
+   * 
+   * use unique names for messages in ColumnAggregationXXX.protos due to a bug 
in
+   * protoc or hadoop's protoc compiler.
+   * 
+   */
+  public static final class ColumnAggregationNullResponseSumRequest extends
+  com.google.protobuf.GeneratedMessage
+  implements ColumnAggregationNullResponseSumRequestOrBuilder {
+// Use ColumnAggregationNullResponseSumRequest.newBuilder() to construct.
+private 
ColumnAggregationNullResponseSumRequest(com.google.protobuf.GeneratedMessage.Builder
 builder) {
+  super(builder);
+  this.unknownFields = builder.getUnknownFields();
+}
+private ColumnAggregationNullResponseSumRequest(boolean noInit) { 
this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+private static final ColumnAggregationNullResponseSumRequest 
defaultInstance;
+public static ColumnAggregationNullResponseSumRequest getDefaultInstance() 
{
+  return defaultInstance;
+}
+
+public ColumnAggregationNullResponseSumRequest getDefaultInstanceForType() 
{
+  return defaultInstance;
+}
+
+private final com.google.protobuf.UnknownFieldSet unknownFields;
+@java.lang.Override
+public final com.google.protobuf.UnknownFieldSet
+getUnknownFields() {
+  return this.unknownFields;
+}
+private ColumnAggregationNullResponseSumRequest(
+com.google.protobuf.CodedInputStream input,
+com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+throws com.google.protobuf.InvalidProtocolBufferException {
+  initFields();
+  int mutable_bitField0_ = 0;
+  com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+  com.google.protobuf.UnknownFieldSet.newBuilder();
+  try {
+boolean done = false;
+while (!done) {
+  int tag = input.readTag();
+  switch (tag) {
+case 0:
+  done = true;
+  break;
+default: {
+  if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+done = true;
+  }
+  break;
+}
+case 10: {
+  bitField0_ |= 0x0001;
+  family_ = input.readBytes();
+  break;
+}
+case 18: {
+  bitField0_ |= 0x0002;
+  qualifier_ = input.readBytes();
+  break;
+}
+  }
+}
+  } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+throw e.setUnfinishedMessage(this);
+  } catch (java.io.IOException e) {
+throw new com.google.protobuf.InvalidProtocolBufferException(
+e.getMessage()).setUnfinishedMessage(this);
+  } finally {
+this.unknownFields = unknownFields.build();
+makeExtensionsImmutable();
+  }
+}
+public static final com.google.protobuf.Descriptors.Descriptor
+getDescriptor() {
+  return 

[12/51] [partial] hbase git commit: HBASE-16264 Figure how to deal with endpoints and shaded pb Shade our protobufs. Do it in a manner that makes it so we can still have in our API references to com.g

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MapReduceProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MapReduceProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MapReduceProtos.java
new file mode 100644
index 000..ee901f0
--- /dev/null
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MapReduceProtos.java
@@ -0,0 +1,1737 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: MapReduce.proto
+
+package org.apache.hadoop.hbase.shaded.protobuf.generated;
+
+public final class MapReduceProtos {
+  private MapReduceProtos() {}
+  public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistry registry) {
+  }
+  public interface ScanMetricsOrBuilder
+  extends com.google.protobuf.MessageOrBuilder {
+
+// repeated .hbase.pb.NameInt64Pair metrics = 1;
+/**
+ * repeated .hbase.pb.NameInt64Pair metrics = 1;
+ */
+
java.util.List
 
+getMetricsList();
+/**
+ * repeated .hbase.pb.NameInt64Pair metrics = 1;
+ */
+
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair 
getMetrics(int index);
+/**
+ * repeated .hbase.pb.NameInt64Pair metrics = 1;
+ */
+int getMetricsCount();
+/**
+ * repeated .hbase.pb.NameInt64Pair metrics = 1;
+ */
+java.util.List
 
+getMetricsOrBuilderList();
+/**
+ * repeated .hbase.pb.NameInt64Pair metrics = 1;
+ */
+
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder
 getMetricsOrBuilder(
+int index);
+  }
+  /**
+   * Protobuf type {@code hbase.pb.ScanMetrics}
+   */
+  public static final class ScanMetrics extends
+  com.google.protobuf.GeneratedMessage
+  implements ScanMetricsOrBuilder {
+// Use ScanMetrics.newBuilder() to construct.
+private ScanMetrics(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+  super(builder);
+  this.unknownFields = builder.getUnknownFields();
+}
+private ScanMetrics(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+private static final ScanMetrics defaultInstance;
+public static ScanMetrics getDefaultInstance() {
+  return defaultInstance;
+}
+
+public ScanMetrics getDefaultInstanceForType() {
+  return defaultInstance;
+}
+
+private final com.google.protobuf.UnknownFieldSet unknownFields;
+@java.lang.Override
+public final com.google.protobuf.UnknownFieldSet
+getUnknownFields() {
+  return this.unknownFields;
+}
+private ScanMetrics(
+com.google.protobuf.CodedInputStream input,
+com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+throws com.google.protobuf.InvalidProtocolBufferException {
+  initFields();
+  int mutable_bitField0_ = 0;
+  com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+  com.google.protobuf.UnknownFieldSet.newBuilder();
+  try {
+boolean done = false;
+while (!done) {
+  int tag = input.readTag();
+  switch (tag) {
+case 0:
+  done = true;
+  break;
+default: {
+  if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+done = true;
+  }
+  break;
+}
+case 10: {
+  if (!((mutable_bitField0_ & 0x0001) == 0x0001)) {
+metrics_ = new 
java.util.ArrayList();
+mutable_bitField0_ |= 0x0001;
+  }
+  
metrics_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair.PARSER,
 extensionRegistry));
+  break;
+}
+  }
+}
+  } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+throw e.setUnfinishedMessage(this);
+  } catch (java.io.IOException e) {
+throw new com.google.protobuf.InvalidProtocolBufferException(
+e.getMessage()).setUnfinishedMessage(this);
+  } finally {
+if (((mutable_bitField0_ & 0x0001) == 0x0001)) {
+  metrics_ = java.util.Collections.unmodifiableList(metrics_);
+}
+this.unknownFields = unknownFields.build();
+makeExtensionsImmutable();
+  }
+}
+public static final com.google.protobuf.Descriptors.Descriptor
+getDescriptor() {
+  return 
org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.internal_static_hbase_pb_ScanMetrics_descriptor;
+}
+
+protected 

[17/51] [partial] hbase git commit: HBASE-16264 Figure how to deal with endpoints and shaded pb Shade our protobufs. Do it in a manner that makes it so we can still have in our API references to com.g

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FSProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FSProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FSProtos.java
new file mode 100644
index 000..74d3f86
--- /dev/null
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FSProtos.java
@@ -0,0 +1,1235 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: FS.proto
+
+package org.apache.hadoop.hbase.shaded.protobuf.generated;
+
+public final class FSProtos {
+  private FSProtos() {}
+  public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistry registry) {
+  }
+  public interface HBaseVersionFileContentOrBuilder
+  extends com.google.protobuf.MessageOrBuilder {
+
+// required string version = 1;
+/**
+ * required string version = 1;
+ */
+boolean hasVersion();
+/**
+ * required string version = 1;
+ */
+java.lang.String getVersion();
+/**
+ * required string version = 1;
+ */
+com.google.protobuf.ByteString
+getVersionBytes();
+  }
+  /**
+   * Protobuf type {@code hbase.pb.HBaseVersionFileContent}
+   *
+   * 
+   **
+   * The ${HBASE_ROOTDIR}/hbase.version file content
+   * 
+   */
+  public static final class HBaseVersionFileContent extends
+  com.google.protobuf.GeneratedMessage
+  implements HBaseVersionFileContentOrBuilder {
+// Use HBaseVersionFileContent.newBuilder() to construct.
+private 
HBaseVersionFileContent(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+  super(builder);
+  this.unknownFields = builder.getUnknownFields();
+}
+private HBaseVersionFileContent(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+private static final HBaseVersionFileContent defaultInstance;
+public static HBaseVersionFileContent getDefaultInstance() {
+  return defaultInstance;
+}
+
+public HBaseVersionFileContent getDefaultInstanceForType() {
+  return defaultInstance;
+}
+
+private final com.google.protobuf.UnknownFieldSet unknownFields;
+@java.lang.Override
+public final com.google.protobuf.UnknownFieldSet
+getUnknownFields() {
+  return this.unknownFields;
+}
+private HBaseVersionFileContent(
+com.google.protobuf.CodedInputStream input,
+com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+throws com.google.protobuf.InvalidProtocolBufferException {
+  initFields();
+  int mutable_bitField0_ = 0;
+  com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+  com.google.protobuf.UnknownFieldSet.newBuilder();
+  try {
+boolean done = false;
+while (!done) {
+  int tag = input.readTag();
+  switch (tag) {
+case 0:
+  done = true;
+  break;
+default: {
+  if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+done = true;
+  }
+  break;
+}
+case 10: {
+  bitField0_ |= 0x0001;
+  version_ = input.readBytes();
+  break;
+}
+  }
+}
+  } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+throw e.setUnfinishedMessage(this);
+  } catch (java.io.IOException e) {
+throw new com.google.protobuf.InvalidProtocolBufferException(
+e.getMessage()).setUnfinishedMessage(this);
+  } finally {
+this.unknownFields = unknownFields.build();
+makeExtensionsImmutable();
+  }
+}
+public static final com.google.protobuf.Descriptors.Descriptor
+getDescriptor() {
+  return 
org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.internal_static_hbase_pb_HBaseVersionFileContent_descriptor;
+}
+
+protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+internalGetFieldAccessorTable() {
+  return 
org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.internal_static_hbase_pb_HBaseVersionFileContent_fieldAccessorTable
+  .ensureFieldAccessorsInitialized(
+  
org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent.class,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder.class);
+}
+
+public static com.google.protobuf.Parser PARSER =
+new com.google.protobuf.AbstractParser() {
+  public HBaseVersionFileContent parsePartialFrom(
+  com.google.protobuf.CodedInputStream input,
+  

[28/51] [partial] hbase git commit: HBASE-16264 Figure how to deal with endpoints and shaded pb Shade our protobufs. Do it in a manner that makes it so we can still have in our API references to com.g

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
index a363c2e..2ec4418 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
@@ -29,7 +29,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
-import 
org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.StateMachineProcedureData;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData;
 
 /**
  * Procedure described by a series of steps.

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/ProcedureStoreTracker.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/ProcedureStoreTracker.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/ProcedureStoreTracker.java
index a60ba3f..4fea0d4 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/ProcedureStoreTracker.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/ProcedureStoreTracker.java
@@ -26,7 +26,7 @@ import java.util.TreeMap;
 
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
-import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
 
 /**
  * Keeps track of live procedures.
@@ -278,7 +278,7 @@ public class ProcedureStoreTracker {
 
 /**
  * Convert to
- * {@link 
org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode}
+ * 
org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode
  * protobuf.
  */
 public ProcedureProtos.ProcedureStoreTracker.TrackerNode convert() {
@@ -768,7 +768,7 @@ public class ProcedureStoreTracker {
 
   /**
* Builds
-   * {@link 
org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureStoreTracker}
+   * 
org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureStoreTracker
* protocol buffer from current state.
*/
   public ProcedureProtos.ProcedureStoreTracker toProto() throws IOException {

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java
index b9726a8..012ddeb 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java
@@ -29,9 +29,9 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.procedure2.store.ProcedureStoreTracker;
-import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos;
-import 
org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureWALHeader;
-import 
org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureWALTrailer;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer;
 
 /**
  * Describes a WAL File

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java
index 5f726d0..e26e2d4 100644
--- 

[18/51] [partial] hbase git commit: HBASE-16264 Figure how to deal with endpoints and shaded pb Shade our protobufs. Do it in a manner that makes it so we can still have in our API references to com.g

2016-09-29 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ErrorHandlingProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ErrorHandlingProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ErrorHandlingProtos.java
new file mode 100644
index 000..16e6b11
--- /dev/null
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ErrorHandlingProtos.java
@@ -0,0 +1,2895 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: ErrorHandling.proto
+
+package org.apache.hadoop.hbase.shaded.protobuf.generated;
+
+public final class ErrorHandlingProtos {
+  private ErrorHandlingProtos() {}
+  public static void registerAllExtensions(
+  com.google.protobuf.ExtensionRegistry registry) {
+  }
+  public interface StackTraceElementMessageOrBuilder
+  extends com.google.protobuf.MessageOrBuilder {
+
+// optional string declaring_class = 1;
+/**
+ * optional string declaring_class = 1;
+ */
+boolean hasDeclaringClass();
+/**
+ * optional string declaring_class = 1;
+ */
+java.lang.String getDeclaringClass();
+/**
+ * optional string declaring_class = 1;
+ */
+com.google.protobuf.ByteString
+getDeclaringClassBytes();
+
+// optional string method_name = 2;
+/**
+ * optional string method_name = 2;
+ */
+boolean hasMethodName();
+/**
+ * optional string method_name = 2;
+ */
+java.lang.String getMethodName();
+/**
+ * optional string method_name = 2;
+ */
+com.google.protobuf.ByteString
+getMethodNameBytes();
+
+// optional string file_name = 3;
+/**
+ * optional string file_name = 3;
+ */
+boolean hasFileName();
+/**
+ * optional string file_name = 3;
+ */
+java.lang.String getFileName();
+/**
+ * optional string file_name = 3;
+ */
+com.google.protobuf.ByteString
+getFileNameBytes();
+
+// optional int32 line_number = 4;
+/**
+ * optional int32 line_number = 4;
+ */
+boolean hasLineNumber();
+/**
+ * optional int32 line_number = 4;
+ */
+int getLineNumber();
+  }
+  /**
+   * Protobuf type {@code hbase.pb.StackTraceElementMessage}
+   *
+   * 
+   **
+   * Protobuf version of a java.lang.StackTraceElement
+   * so we can serialize exceptions.
+   * 
+   */
+  public static final class StackTraceElementMessage extends
+  com.google.protobuf.GeneratedMessage
+  implements StackTraceElementMessageOrBuilder {
+// Use StackTraceElementMessage.newBuilder() to construct.
+private 
StackTraceElementMessage(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+  super(builder);
+  this.unknownFields = builder.getUnknownFields();
+}
+private StackTraceElementMessage(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+private static final StackTraceElementMessage defaultInstance;
+public static StackTraceElementMessage getDefaultInstance() {
+  return defaultInstance;
+}
+
+public StackTraceElementMessage getDefaultInstanceForType() {
+  return defaultInstance;
+}
+
+private final com.google.protobuf.UnknownFieldSet unknownFields;
+@java.lang.Override
+public final com.google.protobuf.UnknownFieldSet
+getUnknownFields() {
+  return this.unknownFields;
+}
+private StackTraceElementMessage(
+com.google.protobuf.CodedInputStream input,
+com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+throws com.google.protobuf.InvalidProtocolBufferException {
+  initFields();
+  int mutable_bitField0_ = 0;
+  com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+  com.google.protobuf.UnknownFieldSet.newBuilder();
+  try {
+boolean done = false;
+while (!done) {
+  int tag = input.readTag();
+  switch (tag) {
+case 0:
+  done = true;
+  break;
+default: {
+  if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+done = true;
+  }
+  break;
+}
+case 10: {
+  bitField0_ |= 0x0001;
+  declaringClass_ = input.readBytes();
+  break;
+}
+case 18: {
+  bitField0_ |= 0x0002;
+  methodName_ = input.readBytes();
+  break;
+}
+case 26: {
+  bitField0_ |= 0x0004;
+  fileName_ = input.readBytes();
+  break;
+}
+case 32: 

  1   2   3   >