hbase git commit: HBASE-15181 adds TestCompactionPolicy which was missing in first commit

2016-02-26 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master f7f96b9fb -> 03ffb30ef


HBASE-15181 adds TestCompactionPolicy which was missing in first commit


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/03ffb30e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/03ffb30e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/03ffb30e

Branch: refs/heads/master
Commit: 03ffb30efe341c226a19b4e80ec0e3352e55806c
Parents: f7f96b9
Author: tedyu 
Authored: Fri Feb 26 19:58:33 2016 -0800
Committer: tedyu 
Committed: Fri Feb 26 19:58:33 2016 -0800

--
 .../regionserver/TestCompactionPolicy.java  | 207 +++
 1 file changed, 207 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/03ffb30e/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java
new file mode 100644
index 000..f5f0926
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java
@@ -0,0 +1,207 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.regionserver;
+
+import com.google.common.collect.Lists;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration;
+import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
+import 
org.apache.hadoop.hbase.regionserver.compactions.RatioBasedCompactionPolicy;
+import org.apache.hadoop.hbase.regionserver.wal.FSHLog;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.FSUtils;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.experimental.categories.Category;
+
+@Category(SmallTests.class)
+public class TestCompactionPolicy {
+  private final static Log LOG = 
LogFactory.getLog(TestDefaultCompactSelection.class);
+  protected final static HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
+
+  protected Configuration conf;
+  protected HStore store;
+  private static final String DIR = TEST_UTIL.getDataTestDir(
+TestDefaultCompactSelection.class.getSimpleName()).toString();
+  protected static Path TEST_FILE;
+  protected static final int minFiles = 3;
+  protected static final int maxFiles = 5;
+
+  protected static final long minSize = 10;
+  protected static final long maxSize = 2100;
+
+  private FSHLog hlog;
+  private HRegion region;
+
+  @Before
+  public void setUp() throws Exception {
+config();
+initialize();
+  }
+
+  /**
+   * setup config values necessary for store
+   */
+  protected void config() {
+this.conf = TEST_UTIL.getConfiguration();
+this.conf.setLong(HConstants.MAJOR_COMPACTION_PERIOD, 0);
+this.conf.setInt(CompactionConfiguration.HBASE_HSTORE_COMPACTION_MIN_KEY, 
minFiles);
+this.conf.setInt(CompactionConfiguration.HBASE_HSTORE_COMPACTION_MAX_KEY, 
maxFiles);
+
this.conf.setLong(CompactionConfiguration.HBASE_HSTORE_COMPACTION_MIN_SIZE_KEY, 
minSize);
+
this.conf.setLong(CompactionConfiguration.HBASE_HSTORE_COMPACTION_MAX_SIZE_KEY, 
maxSize);
+   

[1/2] hbase git commit: HBASE-15130 Backport 0.98 Scan different TimeRange for each column family

2016-02-26 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/0.98 2538b9236 -> 5db9aba3a


http://git-wip-us.apache.org/repos/asf/hbase/blob/5db9aba3/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java
--
diff --git 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java
 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java
index 9c0447e..0fe5d3e 100644
--- 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java
+++ 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java
@@ -6593,6 +6593,668 @@ public final class HBaseProtos {
 // @@protoc_insertion_point(class_scope:TimeRange)
   }
 
+  public interface ColumnFamilyTimeRangeOrBuilder
+  extends com.google.protobuf.MessageOrBuilder {
+
+// required bytes column_family = 1;
+/**
+ * required bytes column_family = 1;
+ */
+boolean hasColumnFamily();
+/**
+ * required bytes column_family = 1;
+ */
+com.google.protobuf.ByteString getColumnFamily();
+
+// required .TimeRange time_range = 2;
+/**
+ * required .TimeRange time_range = 2;
+ */
+boolean hasTimeRange();
+/**
+ * required .TimeRange time_range = 2;
+ */
+org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange 
getTimeRange();
+/**
+ * required .TimeRange time_range = 2;
+ */
+org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder 
getTimeRangeOrBuilder();
+  }
+  /**
+   * Protobuf type {@code ColumnFamilyTimeRange}
+   *
+   * 
+   * ColumnFamily Specific TimeRange
+   * 
+   */
+  public static final class ColumnFamilyTimeRange extends
+  com.google.protobuf.GeneratedMessage
+  implements ColumnFamilyTimeRangeOrBuilder {
+// Use ColumnFamilyTimeRange.newBuilder() to construct.
+private 
ColumnFamilyTimeRange(com.google.protobuf.GeneratedMessage.Builder builder) {
+  super(builder);
+  this.unknownFields = builder.getUnknownFields();
+}
+private ColumnFamilyTimeRange(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+private static final ColumnFamilyTimeRange defaultInstance;
+public static ColumnFamilyTimeRange getDefaultInstance() {
+  return defaultInstance;
+}
+
+public ColumnFamilyTimeRange getDefaultInstanceForType() {
+  return defaultInstance;
+}
+
+private final com.google.protobuf.UnknownFieldSet unknownFields;
+@java.lang.Override
+public final com.google.protobuf.UnknownFieldSet
+getUnknownFields() {
+  return this.unknownFields;
+}
+private ColumnFamilyTimeRange(
+com.google.protobuf.CodedInputStream input,
+com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+throws com.google.protobuf.InvalidProtocolBufferException {
+  initFields();
+  int mutable_bitField0_ = 0;
+  com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+  com.google.protobuf.UnknownFieldSet.newBuilder();
+  try {
+boolean done = false;
+while (!done) {
+  int tag = input.readTag();
+  switch (tag) {
+case 0:
+  done = true;
+  break;
+default: {
+  if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+done = true;
+  }
+  break;
+}
+case 10: {
+  bitField0_ |= 0x0001;
+  columnFamily_ = input.readBytes();
+  break;
+}
+case 18: {
+  
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder 
subBuilder = null;
+  if (((bitField0_ & 0x0002) == 0x0002)) {
+subBuilder = timeRange_.toBuilder();
+  }
+  timeRange_ = 
input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.PARSER,
 extensionRegistry);
+  if (subBuilder != null) {
+subBuilder.mergeFrom(timeRange_);
+timeRange_ = subBuilder.buildPartial();
+  }
+  bitField0_ |= 0x0002;
+  break;
+}
+  }
+}
+  } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+throw e.setUnfinishedMessage(this);
+  } catch (java.io.IOException e) {
+throw new com.google.protobuf.InvalidProtocolBufferException(
+e.getMessage()).setUnfinishedMessage(this);
+  } finally {
+this.unknownFields = unknownFields.build();
+makeExtensionsImmutable();
+  }
+}
+public static final com.google.protobuf.Descriptors.Descriptor
+getDescriptor() {
+  return 

[2/2] hbase git commit: HBASE-15130 Backport 0.98 Scan different TimeRange for each column family

2016-02-26 Thread apurtell
HBASE-15130 Backport 0.98 Scan different TimeRange for each column family

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5db9aba3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5db9aba3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5db9aba3

Branch: refs/heads/0.98
Commit: 5db9aba3a339b9f7e5eced21c6da32951e116763
Parents: 2538b92
Author: Rahul Gidwani 
Authored: Wed Feb 17 14:16:23 2016 -0800
Committer: Andrew Purtell 
Committed: Fri Feb 26 15:12:13 2016 -0800

--
 .../org/apache/hadoop/hbase/client/Get.java |  17 +
 .../org/apache/hadoop/hbase/client/Query.java   |  42 +
 .../org/apache/hadoop/hbase/client/Scan.java|  12 +
 .../hadoop/hbase/protobuf/ProtobufUtil.java | 104 +-
 .../org/apache/hadoop/hbase/io/TimeRange.java   |  16 +-
 .../hbase/protobuf/generated/ClientProtos.java  | 965 +--
 .../hbase/protobuf/generated/HBaseProtos.java   | 717 +-
 hbase-protocol/src/main/protobuf/Client.proto   |   2 +
 hbase-protocol/src/main/protobuf/HBase.proto|   6 +
 .../hbase/regionserver/KeyValueScanner.java |   5 +-
 .../hadoop/hbase/regionserver/MemStore.java |  17 +-
 .../regionserver/NonLazyKeyValueScanner.java|   3 +-
 .../hbase/regionserver/ScanQueryMatcher.java|   7 +-
 .../hadoop/hbase/regionserver/StoreFile.java|   7 +-
 .../hbase/regionserver/StoreFileScanner.java|  16 +-
 .../hadoop/hbase/regionserver/StoreScanner.java |   2 +-
 .../hbase/io/hfile/TestHFileWriterV2.java   |   2 +-
 .../regionserver/TestCompoundBloomFilter.java   |  12 +-
 .../hadoop/hbase/regionserver/TestMemStore.java |  36 +-
 .../hbase/regionserver/TestStoreFile.java   |  58 +-
 20 files changed, 1832 insertions(+), 214 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5db9aba3/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java
--
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java
index 0abc09d..590aa5a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java
@@ -120,6 +120,10 @@ public class Get extends Query
 for (Map.Entry attr : get.getAttributesMap().entrySet()) {
   setAttribute(attr.getKey(), attr.getValue());
 }
+for (Map.Entry entry : 
get.getColumnFamilyTimeRange().entrySet()) {
+  TimeRange tr = entry.getValue();
+  setColumnFamilyTimeRange(entry.getKey(), tr.getMin(), tr.getMax());
+}
   }
 
   public boolean isCheckExistenceOnly() {
@@ -227,6 +231,19 @@ public class Get extends Query
   }
 
   /**
+   * Get versions of columns only within the specified timestamp range and 
column family,
+   * [cf, minStamp, maxStamp).
+   * @param cf the column family to restrict
+   * @param minStamp minimum timestamp value, inclusive
+   * @param maxStamp maximum timestamp value, exclusive
+   * @throws IOException if invalid time range
+   * @return this for invocation chaining
+   */
+  public Get setColumnFamilyTimeRange(byte[] cf, long minStamp, long maxStamp) 
{
+return (Get) super.setColumnFamilyTimeRange(cf, minStamp, maxStamp);
+  }
+
+  /*
* Set the maximum number of values to return per row per Column Family
* @param limit the maximum number of values returned / row / CF
* @return this for invocation chaining

http://git-wip-us.apache.org/repos/asf/hbase/blob/5db9aba3/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java
index 423e401..79762ac 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java
@@ -17,12 +17,15 @@
  */
 package org.apache.hadoop.hbase.client;
 
+import java.io.IOException;
 import java.util.Map;
 
+import com.google.common.collect.Maps;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.io.TimeRange;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.security.access.AccessControlConstants;
 import 

hbase git commit: HBASE-15181 A simple implementation of date based tiered compaction (Clara Xiong)

2016-02-26 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master 99955a324 -> f7f96b9fb


HBASE-15181 A simple implementation of date based tiered compaction (Clara 
Xiong)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f7f96b9f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f7f96b9f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f7f96b9f

Branch: refs/heads/master
Commit: f7f96b9fb70f5b2243558cf531ab7fa51162e656
Parents: 99955a3
Author: tedyu 
Authored: Fri Feb 26 17:36:23 2016 -0800
Committer: tedyu 
Committed: Fri Feb 26 17:36:23 2016 -0800

--
 .../hadoop/hbase/regionserver/StoreFile.java|   7 +
 .../compactions/CompactionConfiguration.java|  82 +-
 .../compactions/DateTieredCompactionPolicy.java | 294 +++
 .../compactions/RatioBasedCompactionPolicy.java |  18 +-
 .../hbase/regionserver/MockStoreFile.java   |  12 +
 .../regionserver/TestDateTieredCompaction.java  | 211 +
 .../TestDefaultCompactSelection.java| 187 +---
 7 files changed, 622 insertions(+), 189 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f7f96b9f/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java
index 4ced556..61eb9b8 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java
@@ -768,6 +768,13 @@ public class StoreFile {
 getReader().timeRangeTracker.getMinimumTimestamp();
   }
 
+  public Long getMaximumTimestamp() {
+return (getReader().timeRangeTracker == null) ?
+null :
+getReader().timeRangeTracker.getMaximumTimestamp();
+  }
+
+
   /**
* Gets the approximate mid-point of this file that is optimal for use in 
splitting it.
* @param comparator Comparator used to compare KVs.

http://git-wip-us.apache.org/repos/asf/hbase/blob/f7f96b9f/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionConfiguration.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionConfiguration.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionConfiguration.java
index 633477e..9bb4c77 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionConfiguration.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionConfiguration.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.regionserver.compactions;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
 
 /**
@@ -67,6 +67,23 @@ public class CompactionConfiguration {
   public static final String HBASE_HFILE_COMPACTION_DISCHARGER_THREAD_COUNT =
   "hbase.hfile.compaction.discharger.thread.count";
 
+  /*
+   * The epoch time length for the windows we no longer compact
+   */
+  public static final String MAX_AGE_MILLIS_KEY =
+"hbase.hstore.compaction.date.tiered.max.storefile.age.millis";
+  public static final String BASE_WINDOW_MILLIS_KEY =
+"hbase.hstore.compaction.date.tiered.base.window.millis";
+  public static final String WINDOWS_PER_TIER_KEY =
+"hbase.hstore.compaction.date.tiered.windows.per.tier";
+  public static final String INCOMING_WINDOW_MIN_KEY =
+"hbase.hstore.compaction.date.tiered.incoming.window.min";
+  public static final String COMPACTION_POLICY_CLASS_FOR_TIERED_WINDOWS_KEY =
+"hbase.hstore.compaction.date.tiered.window.policy.class";
+
+  private static final Class
+DEFAULT_TIER_COMPACTION_POLICY_CLASS = ExploringCompactionPolicy.class;
+
   Configuration conf;
   StoreConfigInformation storeConfigInfo;
 
@@ -75,13 +92,19 @@ public class CompactionConfiguration {
   private final long maxCompactSize;
   private final long offPeakMaxCompactSize;
   private final long minCompactSize;
-  private final int minFilesToCompact;
+  /** This one can be update **/
+  private int minFilesToCompact;
   private final int maxFilesToCompact;
   private final double compactionRatio;
   private final long 

[4/4] hbase git commit: HBASE-15128 Disable region splits and merges switch in master

2016-02-26 Thread chenheng
HBASE-15128 Disable region splits and merges switch in master


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/99955a32
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/99955a32
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/99955a32

Branch: refs/heads/master
Commit: 99955a3240c5032daae471cacebe595134f71fc3
Parents: 793babf
Author: chenheng 
Authored: Sat Feb 27 08:36:59 2016 +0800
Committer: chenheng 
Committed: Sat Feb 27 08:36:59 2016 +0800

--
 .../org/apache/hadoop/hbase/client/Admin.java   |   27 +-
 .../hbase/client/ConnectionImplementation.java  |   14 +
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |   31 +
 .../hadoop/hbase/protobuf/RequestConverter.java |   49 +
 .../hbase/zookeeper/ZooKeeperWatcher.java   |   10 +
 .../hbase/protobuf/generated/MasterProtos.java  | 4304 ++
 .../protobuf/generated/SnapshotProtos.java  |  500 +-
 .../protobuf/generated/ZooKeeperProtos.java |  462 +-
 hbase-protocol/src/main/protobuf/Master.proto   |   36 +
 .../src/main/protobuf/ZooKeeper.proto   |7 +
 .../hadoop/hbase/master/AssignmentManager.java  |   10 +
 .../org/apache/hadoop/hbase/master/HMaster.java |   28 +
 .../hadoop/hbase/master/MasterRpcServices.java  |   42 +
 .../org/apache/hadoop/hbase/util/HBaseFsck.java |   37 +
 .../hbase/zookeeper/SplitOrMergeTracker.java|  151 +
 .../hbase/client/TestSplitOrMergeStatus.java|  198 +
 hbase-shell/src/main/ruby/hbase/admin.rb|   32 +
 hbase-shell/src/main/ruby/shell.rb  |2 +
 .../ruby/shell/commands/splitormerge_enabled.rb |   41 +
 .../ruby/shell/commands/splitormerge_switch.rb  |   43 +
 20 files changed, 4824 insertions(+), 1200 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/99955a32/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
index d7b52d5..c3b524b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
@@ -1678,11 +1678,28 @@ public interface Admin extends Abortable, Closeable {
   List getSecurityCapabilities() throws IOException;
 
   /**
+   * Turn the Split or Merge switches on or off.
+   *
+   * @param enabled enabled or not
+   * @param synchronous If true, it waits until current split() call, if 
outstanding, to return.
+   * @param switchTypes switchType list {@link MasterSwitchType}
+   * @return Previous switch value array
+   */
+  boolean[] setSplitOrMergeEnabled(final boolean enabled, final boolean 
synchronous,
+   final MasterSwitchType... switchTypes) 
throws IOException;
+
+  /**
+   * Query the current state of the switch
+   *
+   * @return true if the switch is enabled, false otherwise.
+   */
+  boolean isSplitOrMergeEnabled(final MasterSwitchType switchType) throws 
IOException;
+
+  /**
* Currently, there are only two compact types:
* {@code NORMAL} means do store files compaction;
* {@code MOB} means do mob files compaction.
* */
-
   @InterfaceAudience.Public
   @InterfaceStability.Unstable
   public enum CompactType {
@@ -1692,4 +1709,12 @@ public interface Admin extends Abortable, Closeable {
 
 CompactType(int value) {}
   }
+  
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
+  public enum MasterSwitchType {
+SPLIT,
+MERGE
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/99955a32/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
index dfa9937..64eb9fb 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
@@ -1742,6 +1742,20 @@ class ConnectionImplementation implements 
ClusterConnection, Closeable {
   }
 
   @Override
+  public MasterProtos.SetSplitOrMergeEnabledResponse 
setSplitOrMergeEnabled(
+RpcController controller, MasterProtos.SetSplitOrMergeEnabledRequest 
request)
+throws ServiceException {
+return stub.setSplitOrMergeEnabled(controller, request);
+  }
+
+  @Override
+  public MasterProtos.IsSplitOrMergeEnabledResponse isSplitOrMergeEnabled(
+   

[2/4] hbase git commit: HBASE-15128 Disable region splits and merges switch in master

2016-02-26 Thread chenheng
http://git-wip-us.apache.org/repos/asf/hbase/blob/99955a32/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
--
diff --git 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
index 8dbb5ad..9805d50 100644
--- 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
+++ 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
@@ -11,13 +11,13 @@ public final class SnapshotProtos {
   public interface SnapshotFileInfoOrBuilder
   extends com.google.protobuf.MessageOrBuilder {
 
-// required .SnapshotFileInfo.Type type = 1;
+// required .hbase.pb.SnapshotFileInfo.Type type = 1;
 /**
- * required .SnapshotFileInfo.Type type = 1;
+ * required .hbase.pb.SnapshotFileInfo.Type type = 1;
  */
 boolean hasType();
 /**
- * required .SnapshotFileInfo.Type type = 1;
+ * required .hbase.pb.SnapshotFileInfo.Type type = 1;
  */
 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type 
getType();
 
@@ -67,7 +67,7 @@ public final class SnapshotProtos {
 getWalNameBytes();
   }
   /**
-   * Protobuf type {@code SnapshotFileInfo}
+   * Protobuf type {@code hbase.pb.SnapshotFileInfo}
*/
   public static final class SnapshotFileInfo extends
   com.google.protobuf.GeneratedMessage
@@ -157,12 +157,12 @@ public final class SnapshotProtos {
 }
 public static final com.google.protobuf.Descriptors.Descriptor
 getDescriptor() {
-  return 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotFileInfo_descriptor;
+  return 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_descriptor;
 }
 
 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
 internalGetFieldAccessorTable() {
-  return 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotFileInfo_fieldAccessorTable
+  return 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_fieldAccessorTable
   .ensureFieldAccessorsInitialized(
   
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.class,
 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Builder.class);
 }
@@ -183,7 +183,7 @@ public final class SnapshotProtos {
 }
 
 /**
- * Protobuf enum {@code SnapshotFileInfo.Type}
+ * Protobuf enum {@code hbase.pb.SnapshotFileInfo.Type}
  */
 public enum Type
 implements com.google.protobuf.ProtocolMessageEnum {
@@ -261,21 +261,21 @@ public final class SnapshotProtos {
 this.value = value;
   }
 
-  // @@protoc_insertion_point(enum_scope:SnapshotFileInfo.Type)
+  // @@protoc_insertion_point(enum_scope:hbase.pb.SnapshotFileInfo.Type)
 }
 
 private int bitField0_;
-// required .SnapshotFileInfo.Type type = 1;
+// required .hbase.pb.SnapshotFileInfo.Type type = 1;
 public static final int TYPE_FIELD_NUMBER = 1;
 private 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type 
type_;
 /**
- * required .SnapshotFileInfo.Type type = 1;
+ * required .hbase.pb.SnapshotFileInfo.Type type = 1;
  */
 public boolean hasType() {
   return ((bitField0_ & 0x0001) == 0x0001);
 }
 /**
- * required .SnapshotFileInfo.Type type = 1;
+ * required .hbase.pb.SnapshotFileInfo.Type type = 1;
  */
 public 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type 
getType() {
   return type_;
@@ -613,19 +613,19 @@ public final class SnapshotProtos {
   return builder;
 }
 /**
- * Protobuf type {@code SnapshotFileInfo}
+ * Protobuf type {@code hbase.pb.SnapshotFileInfo}
  */
 public static final class Builder extends
 com.google.protobuf.GeneratedMessage.Builder
implements 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfoOrBuilder
 {
   public static final com.google.protobuf.Descriptors.Descriptor
   getDescriptor() {
-return 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotFileInfo_descriptor;
+return 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_descriptor;
   }
 
   protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
   internalGetFieldAccessorTable() {
-return 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotFileInfo_fieldAccessorTable
+return 

[1/4] hbase git commit: HBASE-15128 Disable region splits and merges switch in master

2016-02-26 Thread chenheng
Repository: hbase
Updated Branches:
  refs/heads/master 793babf4a -> 99955a324


http://git-wip-us.apache.org/repos/asf/hbase/blob/99955a32/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java
--
diff --git 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java
 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java
index 4371739..0240a67 100644
--- 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java
+++ 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java
@@ -8196,6 +8196,450 @@ public final class ZooKeeperProtos {
 // @@protoc_insertion_point(class_scope:hbase.pb.TableLock)
   }
 
+  public interface SwitchStateOrBuilder
+  extends com.google.protobuf.MessageOrBuilder {
+
+// optional bool enabled = 1;
+/**
+ * optional bool enabled = 1;
+ */
+boolean hasEnabled();
+/**
+ * optional bool enabled = 1;
+ */
+boolean getEnabled();
+  }
+  /**
+   * Protobuf type {@code hbase.pb.SwitchState}
+   *
+   * 
+   **
+   * State of the switch.
+   * 
+   */
+  public static final class SwitchState extends
+  com.google.protobuf.GeneratedMessage
+  implements SwitchStateOrBuilder {
+// Use SwitchState.newBuilder() to construct.
+private SwitchState(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+  super(builder);
+  this.unknownFields = builder.getUnknownFields();
+}
+private SwitchState(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+private static final SwitchState defaultInstance;
+public static SwitchState getDefaultInstance() {
+  return defaultInstance;
+}
+
+public SwitchState getDefaultInstanceForType() {
+  return defaultInstance;
+}
+
+private final com.google.protobuf.UnknownFieldSet unknownFields;
+@java.lang.Override
+public final com.google.protobuf.UnknownFieldSet
+getUnknownFields() {
+  return this.unknownFields;
+}
+private SwitchState(
+com.google.protobuf.CodedInputStream input,
+com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+throws com.google.protobuf.InvalidProtocolBufferException {
+  initFields();
+  int mutable_bitField0_ = 0;
+  com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+  com.google.protobuf.UnknownFieldSet.newBuilder();
+  try {
+boolean done = false;
+while (!done) {
+  int tag = input.readTag();
+  switch (tag) {
+case 0:
+  done = true;
+  break;
+default: {
+  if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+done = true;
+  }
+  break;
+}
+case 8: {
+  bitField0_ |= 0x0001;
+  enabled_ = input.readBool();
+  break;
+}
+  }
+}
+  } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+throw e.setUnfinishedMessage(this);
+  } catch (java.io.IOException e) {
+throw new com.google.protobuf.InvalidProtocolBufferException(
+e.getMessage()).setUnfinishedMessage(this);
+  } finally {
+this.unknownFields = unknownFields.build();
+makeExtensionsImmutable();
+  }
+}
+public static final com.google.protobuf.Descriptors.Descriptor
+getDescriptor() {
+  return 
org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SwitchState_descriptor;
+}
+
+protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+internalGetFieldAccessorTable() {
+  return 
org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SwitchState_fieldAccessorTable
+  .ensureFieldAccessorsInitialized(
+  
org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState.class, 
org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState.Builder.class);
+}
+
+public static com.google.protobuf.Parser PARSER =
+new com.google.protobuf.AbstractParser() {
+  public SwitchState parsePartialFrom(
+  com.google.protobuf.CodedInputStream input,
+  com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+  throws com.google.protobuf.InvalidProtocolBufferException {
+return new SwitchState(input, extensionRegistry);
+  }
+};
+
+@java.lang.Override
+public com.google.protobuf.Parser getParserForType() {
+  return PARSER;
+}
+
+private int bitField0_;
+// optional bool enabled = 1;
+public static final int 

[3/4] hbase git commit: HBASE-15128 Disable region splits and merges switch in master

2016-02-26 Thread chenheng
http://git-wip-us.apache.org/repos/asf/hbase/blob/99955a32/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
--
diff --git 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
index 043d549..073eba9 100644
--- 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
+++ 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
@@ -8,6 +8,88 @@ public final class MasterProtos {
   public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
   }
+  /**
+   * Protobuf enum {@code hbase.pb.MasterSwitchType}
+   */
+  public enum MasterSwitchType
+  implements com.google.protobuf.ProtocolMessageEnum {
+/**
+ * SPLIT = 0;
+ */
+SPLIT(0, 0),
+/**
+ * MERGE = 1;
+ */
+MERGE(1, 1),
+;
+
+/**
+ * SPLIT = 0;
+ */
+public static final int SPLIT_VALUE = 0;
+/**
+ * MERGE = 1;
+ */
+public static final int MERGE_VALUE = 1;
+
+
+public final int getNumber() { return value; }
+
+public static MasterSwitchType valueOf(int value) {
+  switch (value) {
+case 0: return SPLIT;
+case 1: return MERGE;
+default: return null;
+  }
+}
+
+public static com.google.protobuf.Internal.EnumLiteMap
+internalGetValueMap() {
+  return internalValueMap;
+}
+private static com.google.protobuf.Internal.EnumLiteMap
+internalValueMap =
+  new com.google.protobuf.Internal.EnumLiteMap() {
+public MasterSwitchType findValueByNumber(int number) {
+  return MasterSwitchType.valueOf(number);
+}
+  };
+
+public final com.google.protobuf.Descriptors.EnumValueDescriptor
+getValueDescriptor() {
+  return getDescriptor().getValues().get(index);
+}
+public final com.google.protobuf.Descriptors.EnumDescriptor
+getDescriptorForType() {
+  return getDescriptor();
+}
+public static final com.google.protobuf.Descriptors.EnumDescriptor
+getDescriptor() {
+  return 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.getDescriptor().getEnumTypes().get(0);
+}
+
+private static final MasterSwitchType[] VALUES = values();
+
+public static MasterSwitchType valueOf(
+com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+  if (desc.getType() != getDescriptor()) {
+throw new java.lang.IllegalArgumentException(
+  "EnumValueDescriptor is not for this type.");
+  }
+  return VALUES[desc.getIndex()];
+}
+
+private final int index;
+private final int value;
+
+private MasterSwitchType(int index, int value) {
+  this.index = index;
+  this.value = value;
+}
+
+// @@protoc_insertion_point(enum_scope:hbase.pb.MasterSwitchType)
+  }
+
   public interface AddColumnRequestOrBuilder
   extends com.google.protobuf.MessageOrBuilder {
 
@@ -28764,28 +28846,62 @@ public final class MasterProtos {
 // @@protoc_insertion_point(class_scope:hbase.pb.IsBalancerEnabledResponse)
   }
 
-  public interface NormalizeRequestOrBuilder
+  public interface SetSplitOrMergeEnabledRequestOrBuilder
   extends com.google.protobuf.MessageOrBuilder {
+
+// required bool enabled = 1;
+/**
+ * required bool enabled = 1;
+ */
+boolean hasEnabled();
+/**
+ * required bool enabled = 1;
+ */
+boolean getEnabled();
+
+// optional bool synchronous = 2;
+/**
+ * optional bool synchronous = 2;
+ */
+boolean hasSynchronous();
+/**
+ * optional bool synchronous = 2;
+ */
+boolean getSynchronous();
+
+// repeated .hbase.pb.MasterSwitchType switch_types = 3;
+/**
+ * repeated .hbase.pb.MasterSwitchType switch_types = 3;
+ */
+
java.util.List
 getSwitchTypesList();
+/**
+ * repeated .hbase.pb.MasterSwitchType switch_types = 3;
+ */
+int getSwitchTypesCount();
+/**
+ * repeated .hbase.pb.MasterSwitchType switch_types = 3;
+ */
+org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType 
getSwitchTypes(int index);
   }
   /**
-   * Protobuf type {@code hbase.pb.NormalizeRequest}
+   * Protobuf type {@code hbase.pb.SetSplitOrMergeEnabledRequest}
*/
-  public static final class NormalizeRequest extends
+  public static final class SetSplitOrMergeEnabledRequest extends
   com.google.protobuf.GeneratedMessage
-  implements NormalizeRequestOrBuilder {
-// Use NormalizeRequest.newBuilder() to construct.
-private NormalizeRequest(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+  implements SetSplitOrMergeEnabledRequestOrBuilder {
+// Use 

[2/4] hbase git commit: HBASE-15290 Hbase Rest CheckAndAPI should save other cells along with compared cell (Ajith)

2016-02-26 Thread enis
HBASE-15290 Hbase Rest CheckAndAPI should save other cells along with compared 
cell (Ajith)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d233e09c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d233e09c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d233e09c

Branch: refs/heads/branch-1
Commit: d233e09c1479ab0e46adf7bfc27cb890b493ae23
Parents: 46ffa85
Author: Enis Soztutar 
Authored: Fri Feb 26 15:05:59 2016 -0800
Committer: Enis Soztutar 
Committed: Fri Feb 26 15:06:05 2016 -0800

--
 .../apache/hadoop/hbase/rest/RowResource.java   | 32 +++--
 .../hadoop/hbase/rest/RowResourceBase.java  | 39 +--
 .../hbase/rest/TestGetAndPutResource.java   | 69 
 3 files changed, 129 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d233e09c/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
--
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
index dad5a32..39a4128 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
@@ -455,20 +455,40 @@ public class RowResource extends ResourceBase {
   byte[][] valueToPutParts = KeyValue.parseColumn(valueToCheckColumn);
   if (valueToPutParts.length == 2 && valueToPutParts[1].length > 0) {
 CellModel valueToPutCell = null;
+
+// Copy all the cells to the Put request
+// and track if the check cell's latest value is also sent
 for (int i = 0, n = cellModelCount - 1; i < n ; i++) {
-  if(Bytes.equals(cellModels.get(i).getColumn(),
-  valueToCheckCell.getColumn())) {
-valueToPutCell = cellModels.get(i);
-break;
+  CellModel cell = cellModels.get(i);
+  byte[] col = cell.getColumn();
+
+  if (col == null) {
+servlet.getMetrics().incrementFailedPutRequests(1);
+return Response.status(Response.Status.BAD_REQUEST)
+.type(MIMETYPE_TEXT).entity("Bad request: Column found to 
be null." + CRLF)
+.build();
+  }
+
+  byte [][] parts = KeyValue.parseColumn(col);
+
+  if (parts.length != 2) {
+return Response.status(Response.Status.BAD_REQUEST)
+.type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
+.build();
+  }
+  put.addImmutable(parts[0], parts[1], cell.getTimestamp(), 
cell.getValue());
+
+  if(Bytes.equals(col,
+  valueToCheckCell.getColumn())) {
+valueToPutCell = cell;
   }
 }
+
 if (valueToPutCell == null) {
   servlet.getMetrics().incrementFailedPutRequests(1);
   return 
Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT)
   .entity("Bad request: The column to put and check do not match." 
+ CRLF).build();
 } else {
-  put.addImmutable(valueToPutParts[0], valueToPutParts[1], 
valueToPutCell.getTimestamp(),
-valueToPutCell.getValue());
   retValue = table.checkAndPut(key, valueToPutParts[0], 
valueToPutParts[1],
 valueToCheckCell.getValue(), put);
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/d233e09c/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java
--
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java
index 0e74b46..c88bd4c 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java
@@ -22,6 +22,7 @@ import static org.junit.Assert.assertEquals;
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.StringWriter;
+import java.util.*;
 
 import javax.ws.rs.core.MediaType;
 import javax.xml.bind.JAXBContext;
@@ -29,6 +30,7 @@ import javax.xml.bind.JAXBException;
 import javax.xml.bind.Marshaller;
 import javax.xml.bind.Unmarshaller;
 
+import org.apache.commons.collections.keyvalue.AbstractMapEntry;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -228,13 +230,22 @@ public class RowResourceBase {
   }
 
   protected static Response checkAndPutValuePB(String url, String table,
-  String row, 

[4/4] hbase git commit: HBASE-15290 Hbase Rest CheckAndAPI should save other cells along with compared cell (Ajith)

2016-02-26 Thread enis
HBASE-15290 Hbase Rest CheckAndAPI should save other cells along with compared 
cell (Ajith)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/12a3d441
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/12a3d441
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/12a3d441

Branch: refs/heads/branch-1.1
Commit: 12a3d441ea0ca0825cc65c60e70bae6028bfcd5b
Parents: 41efb92
Author: Enis Soztutar 
Authored: Fri Feb 26 15:05:59 2016 -0800
Committer: Enis Soztutar 
Committed: Fri Feb 26 15:09:06 2016 -0800

--
 .../apache/hadoop/hbase/rest/RowResource.java   | 32 +++--
 .../hadoop/hbase/rest/RowResourceBase.java  | 39 +--
 .../hbase/rest/TestGetAndPutResource.java   | 69 
 3 files changed, 129 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/12a3d441/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
--
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
index dad5a32..39a4128 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
@@ -455,20 +455,40 @@ public class RowResource extends ResourceBase {
   byte[][] valueToPutParts = KeyValue.parseColumn(valueToCheckColumn);
   if (valueToPutParts.length == 2 && valueToPutParts[1].length > 0) {
 CellModel valueToPutCell = null;
+
+// Copy all the cells to the Put request
+// and track if the check cell's latest value is also sent
 for (int i = 0, n = cellModelCount - 1; i < n ; i++) {
-  if(Bytes.equals(cellModels.get(i).getColumn(),
-  valueToCheckCell.getColumn())) {
-valueToPutCell = cellModels.get(i);
-break;
+  CellModel cell = cellModels.get(i);
+  byte[] col = cell.getColumn();
+
+  if (col == null) {
+servlet.getMetrics().incrementFailedPutRequests(1);
+return Response.status(Response.Status.BAD_REQUEST)
+.type(MIMETYPE_TEXT).entity("Bad request: Column found to 
be null." + CRLF)
+.build();
+  }
+
+  byte [][] parts = KeyValue.parseColumn(col);
+
+  if (parts.length != 2) {
+return Response.status(Response.Status.BAD_REQUEST)
+.type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
+.build();
+  }
+  put.addImmutable(parts[0], parts[1], cell.getTimestamp(), 
cell.getValue());
+
+  if(Bytes.equals(col,
+  valueToCheckCell.getColumn())) {
+valueToPutCell = cell;
   }
 }
+
 if (valueToPutCell == null) {
   servlet.getMetrics().incrementFailedPutRequests(1);
   return 
Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT)
   .entity("Bad request: The column to put and check do not match." 
+ CRLF).build();
 } else {
-  put.addImmutable(valueToPutParts[0], valueToPutParts[1], 
valueToPutCell.getTimestamp(),
-valueToPutCell.getValue());
   retValue = table.checkAndPut(key, valueToPutParts[0], 
valueToPutParts[1],
 valueToCheckCell.getValue(), put);
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/12a3d441/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java
--
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java
index 0e74b46..c88bd4c 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java
@@ -22,6 +22,7 @@ import static org.junit.Assert.assertEquals;
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.StringWriter;
+import java.util.*;
 
 import javax.ws.rs.core.MediaType;
 import javax.xml.bind.JAXBContext;
@@ -29,6 +30,7 @@ import javax.xml.bind.JAXBException;
 import javax.xml.bind.Marshaller;
 import javax.xml.bind.Unmarshaller;
 
+import org.apache.commons.collections.keyvalue.AbstractMapEntry;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -228,13 +230,22 @@ public class RowResourceBase {
   }
 
   protected static Response checkAndPutValuePB(String url, String table,
-  String row, 

[3/4] hbase git commit: HBASE-15290 Hbase Rest CheckAndAPI should save other cells along with compared cell (Ajith)

2016-02-26 Thread enis
HBASE-15290 Hbase Rest CheckAndAPI should save other cells along with compared 
cell (Ajith)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/61852848
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/61852848
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/61852848

Branch: refs/heads/branch-1.2
Commit: 61852848cdc3dd0172f730eeb6e31b2b0ba861ed
Parents: 31b12fd
Author: Enis Soztutar 
Authored: Fri Feb 26 15:05:59 2016 -0800
Committer: Enis Soztutar 
Committed: Fri Feb 26 15:07:33 2016 -0800

--
 .../apache/hadoop/hbase/rest/RowResource.java   | 32 +++--
 .../hadoop/hbase/rest/RowResourceBase.java  | 39 +--
 .../hbase/rest/TestGetAndPutResource.java   | 69 
 3 files changed, 129 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/61852848/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
--
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
index dad5a32..39a4128 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
@@ -455,20 +455,40 @@ public class RowResource extends ResourceBase {
   byte[][] valueToPutParts = KeyValue.parseColumn(valueToCheckColumn);
   if (valueToPutParts.length == 2 && valueToPutParts[1].length > 0) {
 CellModel valueToPutCell = null;
+
+// Copy all the cells to the Put request
+// and track if the check cell's latest value is also sent
 for (int i = 0, n = cellModelCount - 1; i < n ; i++) {
-  if(Bytes.equals(cellModels.get(i).getColumn(),
-  valueToCheckCell.getColumn())) {
-valueToPutCell = cellModels.get(i);
-break;
+  CellModel cell = cellModels.get(i);
+  byte[] col = cell.getColumn();
+
+  if (col == null) {
+servlet.getMetrics().incrementFailedPutRequests(1);
+return Response.status(Response.Status.BAD_REQUEST)
+.type(MIMETYPE_TEXT).entity("Bad request: Column found to 
be null." + CRLF)
+.build();
+  }
+
+  byte [][] parts = KeyValue.parseColumn(col);
+
+  if (parts.length != 2) {
+return Response.status(Response.Status.BAD_REQUEST)
+.type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
+.build();
+  }
+  put.addImmutable(parts[0], parts[1], cell.getTimestamp(), 
cell.getValue());
+
+  if(Bytes.equals(col,
+  valueToCheckCell.getColumn())) {
+valueToPutCell = cell;
   }
 }
+
 if (valueToPutCell == null) {
   servlet.getMetrics().incrementFailedPutRequests(1);
   return 
Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT)
   .entity("Bad request: The column to put and check do not match." 
+ CRLF).build();
 } else {
-  put.addImmutable(valueToPutParts[0], valueToPutParts[1], 
valueToPutCell.getTimestamp(),
-valueToPutCell.getValue());
   retValue = table.checkAndPut(key, valueToPutParts[0], 
valueToPutParts[1],
 valueToCheckCell.getValue(), put);
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/61852848/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java
--
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java
index 0e74b46..c88bd4c 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java
@@ -22,6 +22,7 @@ import static org.junit.Assert.assertEquals;
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.StringWriter;
+import java.util.*;
 
 import javax.ws.rs.core.MediaType;
 import javax.xml.bind.JAXBContext;
@@ -29,6 +30,7 @@ import javax.xml.bind.JAXBException;
 import javax.xml.bind.Marshaller;
 import javax.xml.bind.Unmarshaller;
 
+import org.apache.commons.collections.keyvalue.AbstractMapEntry;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -228,13 +230,22 @@ public class RowResourceBase {
   }
 
   protected static Response checkAndPutValuePB(String url, String table,
-  String row, 

[1/4] hbase git commit: HBASE-15290 Hbase Rest CheckAndAPI should save other cells along with compared cell (Ajith)

2016-02-26 Thread enis
Repository: hbase
Updated Branches:
  refs/heads/branch-1 46ffa8598 -> d233e09c1
  refs/heads/branch-1.1 41efb9233 -> 12a3d441e
  refs/heads/branch-1.2 31b12fda0 -> 61852848c
  refs/heads/master 8f6e29785 -> 793babf4a


HBASE-15290 Hbase Rest CheckAndAPI should save other cells along with compared 
cell (Ajith)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/793babf4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/793babf4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/793babf4

Branch: refs/heads/master
Commit: 793babf4a4c0156f9e712a2bbf9578e2a1d6e1e4
Parents: 8f6e297
Author: Enis Soztutar 
Authored: Fri Feb 26 15:05:59 2016 -0800
Committer: Enis Soztutar 
Committed: Fri Feb 26 15:05:59 2016 -0800

--
 .../apache/hadoop/hbase/rest/RowResource.java   | 32 +++--
 .../hadoop/hbase/rest/RowResourceBase.java  | 39 +--
 .../hbase/rest/TestGetAndPutResource.java   | 69 
 3 files changed, 129 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/793babf4/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
--
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
index f922343..bac4edb 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
@@ -455,20 +455,40 @@ public class RowResource extends ResourceBase {
   byte[][] valueToPutParts = KeyValue.parseColumn(valueToCheckColumn);
   if (valueToPutParts.length == 2 && valueToPutParts[1].length > 0) {
 CellModel valueToPutCell = null;
+
+// Copy all the cells to the Put request
+// and track if the check cell's latest value is also sent
 for (int i = 0, n = cellModelCount - 1; i < n ; i++) {
-  if(Bytes.equals(cellModels.get(i).getColumn(),
-  valueToCheckCell.getColumn())) {
-valueToPutCell = cellModels.get(i);
-break;
+  CellModel cell = cellModels.get(i);
+  byte[] col = cell.getColumn();
+
+  if (col == null) {
+servlet.getMetrics().incrementFailedPutRequests(1);
+return Response.status(Response.Status.BAD_REQUEST)
+.type(MIMETYPE_TEXT).entity("Bad request: Column found to 
be null." + CRLF)
+.build();
+  }
+
+  byte [][] parts = KeyValue.parseColumn(col);
+
+  if (parts.length != 2) {
+return Response.status(Response.Status.BAD_REQUEST)
+.type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
+.build();
+  }
+  put.addImmutable(parts[0], parts[1], cell.getTimestamp(), 
cell.getValue());
+
+  if(Bytes.equals(col,
+  valueToCheckCell.getColumn())) {
+valueToPutCell = cell;
   }
 }
+
 if (valueToPutCell == null) {
   servlet.getMetrics().incrementFailedPutRequests(1);
   return 
Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT)
   .entity("Bad request: The column to put and check do not match." 
+ CRLF).build();
 } else {
-  put.addImmutable(valueToPutParts[0], valueToPutParts[1], 
valueToPutCell.getTimestamp(),
-valueToPutCell.getValue());
   retValue = table.checkAndPut(key, valueToPutParts[0], 
valueToPutParts[1],
 valueToCheckCell.getValue(), put);
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/793babf4/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java
--
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java
index 48cebb2..b2fc0a6 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java
@@ -22,6 +22,7 @@ import static org.junit.Assert.assertEquals;
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.StringWriter;
+import java.util.*;
 
 import javax.ws.rs.core.MediaType;
 import javax.xml.bind.JAXBContext;
@@ -29,6 +30,7 @@ import javax.xml.bind.JAXBException;
 import javax.xml.bind.Marshaller;
 import javax.xml.bind.Unmarshaller;
 
+import org.apache.commons.collections.keyvalue.AbstractMapEntry;
 import org.apache.hadoop.conf.Configuration;
 import 

[06/37] hbase git commit: HBASE-15306 Make RPC call queue length dynamically configurable

2016-02-26 Thread syuanjiang
HBASE-15306 Make RPC call queue length dynamically configurable


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f47dba74
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f47dba74
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f47dba74

Branch: refs/heads/hbase-12439
Commit: f47dba74d498d5d39f124ad8ea5723c437acbc85
Parents: 58283fa
Author: Mikhail Antonov 
Authored: Tue Feb 23 14:20:40 2016 -0800
Committer: Mikhail Antonov 
Committed: Tue Feb 23 14:20:40 2016 -0800

--
 .../hbase/ipc/BalancedQueueRpcExecutor.java | 11 +-
 .../hadoop/hbase/ipc/RWQueueRpcExecutor.java| 19 +-
 .../apache/hadoop/hbase/ipc/RpcExecutor.java| 11 ++
 .../org/apache/hadoop/hbase/ipc/RpcServer.java  |  3 ++
 .../hadoop/hbase/ipc/SimpleRpcScheduler.java| 18 -
 .../hbase/ipc/TestSimpleRpcScheduler.java   | 39 
 6 files changed, 97 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f47dba74/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/BalancedQueueRpcExecutor.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/BalancedQueueRpcExecutor.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/BalancedQueueRpcExecutor.java
index 79b4ec8..e4205eb 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/BalancedQueueRpcExecutor.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/BalancedQueueRpcExecutor.java
@@ -66,6 +66,10 @@ public class BalancedQueueRpcExecutor extends RpcExecutor {
 
   protected void initializeQueues(final int numQueues,
   final Class queueClass, Object... initargs) {
+if (initargs.length > 0) {
+  currentQueueLimit = (int) initargs[0];
+  initargs[0] = Math.max((int) initargs[0], 
DEFAULT_CALL_QUEUE_SIZE_HARD_LIMIT);
+}
 for (int i = 0; i < numQueues; ++i) {
   queues.add((BlockingQueue) 
ReflectionUtils.newInstance(queueClass, initargs));
 }
@@ -74,7 +78,12 @@ public class BalancedQueueRpcExecutor extends RpcExecutor {
   @Override
   public boolean dispatch(final CallRunner callTask) throws 
InterruptedException {
 int queueIndex = balancer.getNextQueue();
-return queues.get(queueIndex).offer(callTask);
+BlockingQueue queue = queues.get(queueIndex);
+// that means we can overflow by at most  size (5), that's ok
+if (queue.size() >= currentQueueLimit) {
+  return false;
+}
+return queue.offer(callTask);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/f47dba74/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java
index 544370d..a9648b0 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java
@@ -139,12 +139,22 @@ public class RWQueueRpcExecutor extends RpcExecutor {
   " readQueues=" + numReadQueues + " readHandlers=" + 
readHandlersCount +
   ((numScanQueues == 0) ? "" : " scanQueues=" + numScanQueues +
 " scanHandlers=" + scanHandlersCount));
-
+if (writeQueueInitArgs.length > 0) {
+  currentQueueLimit = (int) writeQueueInitArgs[0];
+  writeQueueInitArgs[0] = Math.max((int) writeQueueInitArgs[0],
+DEFAULT_CALL_QUEUE_SIZE_HARD_LIMIT);
+}
 for (int i = 0; i < numWriteQueues; ++i) {
+
   queues.add((BlockingQueue)
 ReflectionUtils.newInstance(writeQueueClass, writeQueueInitArgs));
 }
 
+if (readQueueInitArgs.length > 0) {
+  currentQueueLimit = (int) readQueueInitArgs[0];
+  readQueueInitArgs[0] = Math.max((int) readQueueInitArgs[0],
+DEFAULT_CALL_QUEUE_SIZE_HARD_LIMIT);
+}
 for (int i = 0; i < (numReadQueues + numScanQueues); ++i) {
   queues.add((BlockingQueue)
 ReflectionUtils.newInstance(readQueueClass, readQueueInitArgs));
@@ -170,7 +180,12 @@ public class RWQueueRpcExecutor extends RpcExecutor {
 } else {
   queueIndex = numWriteQueues + readBalancer.getNextQueue();
 }
-return queues.get(queueIndex).offer(callTask);
+
+BlockingQueue queue = queues.get(queueIndex);
+if (queue.size() >= currentQueueLimit) {
+  return false;
+}
+return queue.offer(callTask);
   }
 
   private boolean isWriteRequest(final RequestHeader header, final Message 
param) {


[37/37] hbase git commit: HBASE-15351 Fix description of hbase.bucketcache.size in hbase-default.xml

2016-02-26 Thread syuanjiang
HBASE-15351 Fix description of hbase.bucketcache.size in hbase-default.xml


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8f6e2978
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8f6e2978
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8f6e2978

Branch: refs/heads/hbase-12439
Commit: 8f6e29785abff6b3205d7058d9b88c1cf27faa29
Parents: 3c660e2
Author: stack 
Authored: Fri Feb 26 13:49:26 2016 -0800
Committer: stack 
Committed: Fri Feb 26 13:49:26 2016 -0800

--
 .../src/main/resources/hbase-default.xml| 28 
 1 file changed, 17 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8f6e2978/hbase-common/src/main/resources/hbase-default.xml
--
diff --git a/hbase-common/src/main/resources/hbase-default.xml 
b/hbase-common/src/main/resources/hbase-default.xml
index e446a24..e50e89e 100644
--- a/hbase-common/src/main/resources/hbase-default.xml
+++ b/hbase-common/src/main/resources/hbase-default.xml
@@ -857,29 +857,35 @@ possible configurations would overwhelm and obscure the 
important.
 hbase.bucketcache.ioengine
 
 Where to store the contents of the bucketcache. One of: heap,
-  offheap, or file. If a file, set it to file:PATH_TO_FILE. See
-  http://hbase.apache.org/book.html#offheap.blockcache for more 
information.
+offheap, or file. If a file, set it to file:PATH_TO_FILE. See
+http://hbase.apache.org/book.html#offheap.blockcache for more information.
 
   
   
 hbase.bucketcache.combinedcache.enabled
 true
 Whether or not the bucketcache is used in league with the LRU
-  on-heap block cache. In this mode, indices and blooms are kept in the LRU
-  blockcache and the data blocks are kept in the bucketcache.
+on-heap block cache. In this mode, indices and blooms are kept in the LRU
+blockcache and the data blocks are kept in the bucketcache.
   
   
 hbase.bucketcache.size
-65536
-The size of the buckets for the bucketcache if you only use a 
single size.
-  Defaults to the default blocksize, which is 64 * 1024.
+
+A float that EITHER represents a percentage of total heap 
memory
+size to give to the cache (if  1.0) OR, it is the total capacity in
+megabytes of BucketCache. Default: 0.0
   
   
 hbase.bucketcache.sizes
 
-A comma-separated list of sizes for buckets for the 
bucketcache
-  if you use multiple sizes. Should be a list of block sizes in order from 
smallest
-  to largest. The sizes you use will depend on your data access 
patterns.
+A comma-separated list of sizes for buckets for the 
bucketcache.
+Can be multiple sizes. List block sizes in order from smallest to largest.
+The sizes you use will depend on your data access patterns.
+Must be a multiple of 1024 else you will run into
+'java.io.IOException: Invalid HFile block magic' when you go to read from 
cache.
+If you specify no values here, then you pick up the default bucketsizes set
+in code (See BucketAllocator#DEFAULT_BUCKET_SIZES). 
+  
   
   
   hfile.format.version
@@ -907,7 +913,7 @@ possible configurations would overwhelm and obscure the 
important.
   hbase.rs.cacheblocksonwrite
   false
   Whether an HFile block should be added to the block cache 
when the
-  block is finished.
+block is finished.
   
   
 hbase.rpc.timeout



[26/37] hbase git commit: HBASE-15128 Disable region splits and merges switch in master

2016-02-26 Thread syuanjiang
HBASE-15128 Disable region splits and merges switch in master


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/24d481c5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/24d481c5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/24d481c5

Branch: refs/heads/hbase-12439
Commit: 24d481c5803e69a6190339cd8bb218b2c4585459
Parents: 75c57a0
Author: chenheng 
Authored: Fri Feb 26 08:11:16 2016 +0800
Committer: chenheng 
Committed: Fri Feb 26 08:11:16 2016 +0800

--
 .../org/apache/hadoop/hbase/client/Admin.java   |   27 +-
 .../hbase/client/ConnectionImplementation.java  |   14 +
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |   31 +
 .../hadoop/hbase/protobuf/RequestConverter.java |   49 +
 .../hbase/zookeeper/ZooKeeperWatcher.java   |   10 +
 .../hbase/protobuf/generated/MasterProtos.java  | 4304 ++
 .../protobuf/generated/SnapshotProtos.java  |  500 +-
 .../protobuf/generated/ZooKeeperProtos.java |  462 +-
 hbase-protocol/src/main/protobuf/Master.proto   |   36 +
 .../src/main/protobuf/ZooKeeper.proto   |7 +
 .../hadoop/hbase/master/AssignmentManager.java  |   10 +
 .../org/apache/hadoop/hbase/master/HMaster.java |   28 +
 .../hadoop/hbase/master/MasterRpcServices.java  |   42 +
 .../org/apache/hadoop/hbase/util/HBaseFsck.java |   35 +
 .../zookeeper/SplitOrMergeTrackerManager.java   |  151 +
 .../hbase/client/TestSplitOrMergeStatus.java|  198 +
 hbase-shell/src/main/ruby/hbase/admin.rb|   32 +
 hbase-shell/src/main/ruby/shell.rb  |2 +
 .../ruby/shell/commands/splitormerge_enabled.rb |   41 +
 .../ruby/shell/commands/splitormerge_switch.rb  |   43 +
 20 files changed, 4822 insertions(+), 1200 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/24d481c5/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
index d7b52d5..c3b524b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
@@ -1678,11 +1678,28 @@ public interface Admin extends Abortable, Closeable {
   List getSecurityCapabilities() throws IOException;
 
   /**
+   * Turn the Split or Merge switches on or off.
+   *
+   * @param enabled enabled or not
+   * @param synchronous If true, it waits until current split() call, if 
outstanding, to return.
+   * @param switchTypes switchType list {@link MasterSwitchType}
+   * @return Previous switch value array
+   */
+  boolean[] setSplitOrMergeEnabled(final boolean enabled, final boolean 
synchronous,
+   final MasterSwitchType... switchTypes) 
throws IOException;
+
+  /**
+   * Query the current state of the switch
+   *
+   * @return true if the switch is enabled, false otherwise.
+   */
+  boolean isSplitOrMergeEnabled(final MasterSwitchType switchType) throws 
IOException;
+
+  /**
* Currently, there are only two compact types:
* {@code NORMAL} means do store files compaction;
* {@code MOB} means do mob files compaction.
* */
-
   @InterfaceAudience.Public
   @InterfaceStability.Unstable
   public enum CompactType {
@@ -1692,4 +1709,12 @@ public interface Admin extends Abortable, Closeable {
 
 CompactType(int value) {}
   }
+  
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
+  public enum MasterSwitchType {
+SPLIT,
+MERGE
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/24d481c5/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
index dfa9937..64eb9fb 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
@@ -1742,6 +1742,20 @@ class ConnectionImplementation implements 
ClusterConnection, Closeable {
   }
 
   @Override
+  public MasterProtos.SetSplitOrMergeEnabledResponse 
setSplitOrMergeEnabled(
+RpcController controller, MasterProtos.SetSplitOrMergeEnabledRequest 
request)
+throws ServiceException {
+return stub.setSplitOrMergeEnabled(controller, request);
+  }
+
+  @Override
+  public MasterProtos.IsSplitOrMergeEnabledResponse isSplitOrMergeEnabled(
+  

[10/37] hbase git commit: HBASE-15302 Reenable the other tests disabled by HBASE-14678

2016-02-26 Thread syuanjiang
http://git-wip-us.apache.org/repos/asf/hbase/blob/30cec72f/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java
new file mode 100644
index 000..125f5a1
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java
@@ -0,0 +1,514 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.master.procedure;
+
+import java.io.IOException;
+import java.util.concurrent.CountDownLatch;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.master.HMaster;
+import org.apache.hadoop.hbase.procedure2.Procedure;
+import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
+import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
+import 
org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.TestProcedure;
+import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
+import org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore;
+import 
org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.CreateTableState;
+import 
org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteTableState;
+import 
org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DisableTableState;
+import 
org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.EnableTableState;
+import 
org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.TruncateTableState;
+import org.apache.hadoop.hbase.testclassification.MasterTests;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.FSUtils;
+import org.apache.hadoop.hbase.util.ModifyRegionUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.mockito.Mockito;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+@Category({MasterTests.class, LargeTests.class})
+public class TestMasterFailoverWithProcedures {
+  private static final Log LOG = 
LogFactory.getLog(TestMasterFailoverWithProcedures.class);
+
+  protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
+
+  private static void setupConf(Configuration conf) {
+// don't waste time retrying with the roll, the test is already slow 
enough.
+conf.setInt("hbase.procedure.store.wal.max.retries.before.roll", 1);
+conf.setInt("hbase.procedure.store.wal.wait.before.roll", 0);
+conf.setInt("hbase.procedure.store.wal.max.roll.retries", 1);
+conf.setInt("hbase.procedure.store.wal.sync.failure.roll.max", 1);
+  }
+
+  @Before
+  public void setup() throws Exception {
+setupConf(UTIL.getConfiguration());
+UTIL.startMiniCluster(2, 1);
+
+final ProcedureExecutor procExec = 
getMasterProcedureExecutor();
+ProcedureTestingUtility.setToggleKillBeforeStoreUpdate(procExec, false);
+ProcedureTestingUtility.setKillBeforeStoreUpdate(procExec, false);
+  }
+
+  @After
+  public void tearDown() throws Exception {
+try {
+  UTIL.shutdownMiniCluster();
+} catch (Exception e) {
+  LOG.warn("failure shutting down cluster", e);
+}
+  }
+
+  @Test(timeout=6)
+  public void testWalRecoverLease() throws Exception {
+final ProcedureStore masterStore = getMasterProcedureExecutor().getStore();
+assertTrue("expected WALStore for this 

[12/37] hbase git commit: HBASE-15302 Reenable the other tests disabled by HBASE-14678

2016-02-26 Thread syuanjiang
HBASE-15302 Reenable the other tests disabled by HBASE-14678

Signed-off-by: stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/30cec72f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/30cec72f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/30cec72f

Branch: refs/heads/hbase-12439
Commit: 30cec72f9ade972d7e9ce4bba527b0e6074cae60
Parents: 876a6ab
Author: Phil Yang 
Authored: Mon Feb 22 14:17:24 2016 +0800
Committer: stack 
Committed: Wed Feb 24 07:14:01 2016 -0800

--
 .../apache/hadoop/hbase/wal/WALSplitter.java|   11 +-
 .../hbase/TestPartialResultsFromClientSide.java |  832 
 .../TestMobSnapshotCloneIndependence.java   |   69 +
 .../client/TestSnapshotCloneIndependence.java   |  481 +
 .../master/TestDistributedLogSplitting.java | 1799 ++
 .../balancer/TestStochasticLoadBalancer2.java   |   90 +
 .../TestMasterFailoverWithProcedures.java   |  514 +
 .../TestMobFlushSnapshotFromClient.java |   72 +
 .../apache/hadoop/hbase/wal/TestWALSplit.java   | 1320 +
 .../hbase/wal/TestWALSplitCompressed.java   |   36 +
 .../hbase/client/TestReplicationShell.java  |   37 +
 11 files changed, 5256 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/30cec72f/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
index 54b82b2..010fd37 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
@@ -515,13 +515,14 @@ public class WALSplitter {
* @param fs
* @param logEntry
* @param rootDir HBase root dir.
-   * @param fileBeingSplit the file being split currently. Used to generate 
tmp file name.
+   * @param fileNameBeingSplit the file being split currently. Used to 
generate tmp file name.
* @return Path to file into which to dump split log edits.
* @throws IOException
*/
   @SuppressWarnings("deprecation")
-  private static Path getRegionSplitEditsPath(final FileSystem fs,
-  final Entry logEntry, final Path rootDir, FileStatus fileBeingSplit)
+  @VisibleForTesting
+  static Path getRegionSplitEditsPath(final FileSystem fs,
+  final Entry logEntry, final Path rootDir, String fileNameBeingSplit)
   throws IOException {
 Path tableDir = FSUtils.getTableDir(rootDir, 
logEntry.getKey().getTablename());
 String encodedRegionName = 
Bytes.toString(logEntry.getKey().getEncodedRegionName());
@@ -556,7 +557,7 @@ public class WALSplitter {
 // Append file name ends with RECOVERED_LOG_TMPFILE_SUFFIX to ensure
 // region's replayRecoveredEdits will not delete it
 String fileName = 
formatRecoveredEditsFileName(logEntry.getKey().getSequenceId());
-fileName = getTmpRecoveredEditsFileName(fileName + "-" + 
fileBeingSplit.getPath().getName());
+fileName = getTmpRecoveredEditsFileName(fileName + "-" + 
fileNameBeingSplit);
 return new Path(dir, fileName);
   }
 
@@ -1518,7 +1519,7 @@ public class WALSplitter {
  * @return a path with a write for that path. caller should close.
  */
 private WriterAndPath createWAP(byte[] region, Entry entry, Path rootdir) 
throws IOException {
-  Path regionedits = getRegionSplitEditsPath(fs, entry, rootdir, 
fileBeingSplit);
+  Path regionedits = getRegionSplitEditsPath(fs, entry, rootdir, 
fileBeingSplit.getPath().getName());
   if (regionedits == null) {
 return null;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/30cec72f/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
new file mode 100644
index 000..a6f8373
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
@@ -0,0 +1,832 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the 

[09/37] hbase git commit: HBASE-15016 Services a Store needs from a Region

2016-02-26 Thread syuanjiang
HBASE-15016 Services a Store needs from a Region

Signed-off-by: stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/876a6ab7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/876a6ab7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/876a6ab7

Branch: refs/heads/hbase-12439
Commit: 876a6ab73ecff71b9b4010a532272474ea241daf
Parents: 28cd48b
Author: eshcar 
Authored: Wed Feb 24 09:56:25 2016 +0200
Committer: stack 
Committed: Wed Feb 24 07:07:07 2016 -0800

--
 .../org/apache/hadoop/hbase/util/ClassSize.java |  4 +
 .../hbase/regionserver/DefaultMemStore.java |  4 +
 .../hadoop/hbase/regionserver/HMobStore.java|  3 +
 .../hadoop/hbase/regionserver/HRegion.java  | 93 +++-
 .../hadoop/hbase/regionserver/HStore.java   |  4 +
 .../hadoop/hbase/regionserver/MemStore.java |  7 ++
 .../hadoop/hbase/regionserver/Region.java   | 12 +--
 .../regionserver/RegionServicesForStores.java   | 53 +++
 .../apache/hadoop/hbase/regionserver/Store.java |  8 ++
 .../org/apache/hadoop/hbase/TestIOFencing.java  | 10 ++-
 10 files changed, 165 insertions(+), 33 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/876a6ab7/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
index 77acf9b..fdd0fae 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
@@ -110,6 +110,8 @@ public class ClassSize {
   /** Overhead for CellSkipListSet */
   public static final int CELL_SKIPLIST_SET;
 
+  public static final int STORE_SERVICES;
+
   /* Are we running on jdk7? */
   private static final boolean JDK7;
   static {
@@ -193,6 +195,8 @@ public class ClassSize {
 TIMERANGE_TRACKER = align(ClassSize.OBJECT + Bytes.SIZEOF_LONG * 2);
 
 CELL_SKIPLIST_SET = align(OBJECT + REFERENCE);
+
+STORE_SERVICES = align(OBJECT + REFERENCE + ATOMIC_LONG);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/876a6ab7/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java
index 82d40b6..92bb7b6 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java
@@ -162,6 +162,10 @@ public class DefaultMemStore extends AbstractMemStore {
 return;
   }
 
+  @Override
+  public void finalizeFlush() {
+  }
+
   /**
* Code to help figure if our approximation of object heap sizes is close
* enough.  See hbase-900.  Fills memstores then waits so user can heap

http://git-wip-us.apache.org/repos/asf/hbase/blob/876a6ab7/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java
index d666db5..7b44338 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java
@@ -511,6 +511,9 @@ public class HMobStore extends HStore {
 }
   }
 
+  @Override public void finalizeFlush() {
+  }
+
   public void updateCellsCountCompactedToMob(long count) {
 cellsCountCompactedToMob += count;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/876a6ab7/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 0d5a71e..b70a4c3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -17,6 +17,20 @@
  */
 package org.apache.hadoop.hbase.regionserver;
 
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;

[05/37] hbase git commit: HBASE-15291 FileSystem not closed in secure bulkLoad (Yong Zhang)

2016-02-26 Thread syuanjiang
HBASE-15291 FileSystem not closed in secure bulkLoad (Yong Zhang)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/58283fa1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/58283fa1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/58283fa1

Branch: refs/heads/hbase-12439
Commit: 58283fa1b1b10beec62cefa40babff6a1424b06c
Parents: 3ba1a7f
Author: tedyu 
Authored: Tue Feb 23 06:11:39 2016 -0800
Committer: tedyu 
Committed: Tue Feb 23 06:11:39 2016 -0800

--
 .../hbase/security/access/SecureBulkLoadEndpoint.java | 10 ++
 1 file changed, 10 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/58283fa1/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java
index 06a2298..710ab44 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java
@@ -292,6 +292,16 @@ public class SecureBulkLoadEndpoint extends 
SecureBulkLoadService
 new SecureBulkLoadListener(fs, bulkToken, conf));
   } catch (Exception e) {
 LOG.error("Failed to complete bulk load", e);
+  } finally {
+if (fs != null) {
+  try {
+if(!UserGroupInformation.getCurrentUser().equals(ugi)) {
+  FileSystem.closeAllForUGI(ugi);
+}
+  } catch (IOException e) {
+LOG.error("Failed to close FileSystem for " + 
ugi.getUserName(), e);
+  }
+}
   }
   return false;
 }



[23/37] hbase git commit: HBASE-15128 Disable region splits and merges switch in master

2016-02-26 Thread syuanjiang
http://git-wip-us.apache.org/repos/asf/hbase/blob/24d481c5/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java
--
diff --git 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java
 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java
index 4371739..0240a67 100644
--- 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java
+++ 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java
@@ -8196,6 +8196,450 @@ public final class ZooKeeperProtos {
 // @@protoc_insertion_point(class_scope:hbase.pb.TableLock)
   }
 
+  public interface SwitchStateOrBuilder
+  extends com.google.protobuf.MessageOrBuilder {
+
+// optional bool enabled = 1;
+/**
+ * optional bool enabled = 1;
+ */
+boolean hasEnabled();
+/**
+ * optional bool enabled = 1;
+ */
+boolean getEnabled();
+  }
+  /**
+   * Protobuf type {@code hbase.pb.SwitchState}
+   *
+   * 
+   **
+   * State of the switch.
+   * 
+   */
+  public static final class SwitchState extends
+  com.google.protobuf.GeneratedMessage
+  implements SwitchStateOrBuilder {
+// Use SwitchState.newBuilder() to construct.
+private SwitchState(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+  super(builder);
+  this.unknownFields = builder.getUnknownFields();
+}
+private SwitchState(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+private static final SwitchState defaultInstance;
+public static SwitchState getDefaultInstance() {
+  return defaultInstance;
+}
+
+public SwitchState getDefaultInstanceForType() {
+  return defaultInstance;
+}
+
+private final com.google.protobuf.UnknownFieldSet unknownFields;
+@java.lang.Override
+public final com.google.protobuf.UnknownFieldSet
+getUnknownFields() {
+  return this.unknownFields;
+}
+private SwitchState(
+com.google.protobuf.CodedInputStream input,
+com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+throws com.google.protobuf.InvalidProtocolBufferException {
+  initFields();
+  int mutable_bitField0_ = 0;
+  com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+  com.google.protobuf.UnknownFieldSet.newBuilder();
+  try {
+boolean done = false;
+while (!done) {
+  int tag = input.readTag();
+  switch (tag) {
+case 0:
+  done = true;
+  break;
+default: {
+  if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+done = true;
+  }
+  break;
+}
+case 8: {
+  bitField0_ |= 0x0001;
+  enabled_ = input.readBool();
+  break;
+}
+  }
+}
+  } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+throw e.setUnfinishedMessage(this);
+  } catch (java.io.IOException e) {
+throw new com.google.protobuf.InvalidProtocolBufferException(
+e.getMessage()).setUnfinishedMessage(this);
+  } finally {
+this.unknownFields = unknownFields.build();
+makeExtensionsImmutable();
+  }
+}
+public static final com.google.protobuf.Descriptors.Descriptor
+getDescriptor() {
+  return 
org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SwitchState_descriptor;
+}
+
+protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+internalGetFieldAccessorTable() {
+  return 
org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SwitchState_fieldAccessorTable
+  .ensureFieldAccessorsInitialized(
+  
org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState.class, 
org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState.Builder.class);
+}
+
+public static com.google.protobuf.Parser PARSER =
+new com.google.protobuf.AbstractParser() {
+  public SwitchState parsePartialFrom(
+  com.google.protobuf.CodedInputStream input,
+  com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+  throws com.google.protobuf.InvalidProtocolBufferException {
+return new SwitchState(input, extensionRegistry);
+  }
+};
+
+@java.lang.Override
+public com.google.protobuf.Parser getParserForType() {
+  return PARSER;
+}
+
+private int bitField0_;
+// optional bool enabled = 1;
+public static final int ENABLED_FIELD_NUMBER = 1;
+private boolean enabled_;
+/**
+ * optional bool 

[20/37] hbase git commit: HBASE-15222 Addendum - Use less contended classes for metrics

2016-02-26 Thread syuanjiang
HBASE-15222 Addendum - Use less contended classes for metrics


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/77133fd2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/77133fd2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/77133fd2

Branch: refs/heads/hbase-12439
Commit: 77133fd225df9f65be87ce97b38676d2bab48a71
Parents: 43f99de
Author: Elliott Clark 
Authored: Thu Feb 25 09:08:11 2016 -0800
Committer: Elliott Clark 
Committed: Thu Feb 25 09:08:11 2016 -0800

--
 .../org/apache/hadoop/hbase/util/FastLongHistogram.java   | 10 +++---
 .../org/apache/hadoop/metrics2/lib/MutableHistogram.java  |  4 +++-
 .../apache/hadoop/metrics2/lib/MutableRangeHistogram.java |  6 --
 3 files changed, 10 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/77133fd2/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
index 78b2bf0..9b403d9 100644
--- 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
+++ 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
@@ -310,12 +310,8 @@ public class FastLongHistogram {
* Resets the histogram for new counting.
*/
   public FastLongHistogram reset() {
-if (this.bins.hasData.get()) {
-  Bins oldBins = this.bins;
-  this.bins = new Bins(this.bins, this.bins.counts.length - 3, 0.01, 0.99);
-  return new FastLongHistogram(oldBins);
-}
-
-return null;
+Bins oldBins = this.bins;
+this.bins = new Bins(this.bins, this.bins.counts.length - 3, 0.01, 0.99);
+return new FastLongHistogram(oldBins);
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/77133fd2/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableHistogram.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableHistogram.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableHistogram.java
index 717e0ee..5b4a294 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableHistogram.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableHistogram.java
@@ -63,7 +63,9 @@ public class MutableHistogram extends MutableMetric 
implements MetricHistogram {
   public synchronized void snapshot(MetricsRecordBuilder metricsRecordBuilder, 
boolean all) {
 // Get a reference to the old histogram.
 FastLongHistogram histo = histogram.reset();
-updateSnapshotMetrics(metricsRecordBuilder, histo);
+if (histo != null) {
+  updateSnapshotMetrics(metricsRecordBuilder, histo);
+}
   }
 
   protected void updateSnapshotMetrics(MetricsRecordBuilder 
metricsRecordBuilder,

http://git-wip-us.apache.org/repos/asf/hbase/blob/77133fd2/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableRangeHistogram.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableRangeHistogram.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableRangeHistogram.java
index ac8aee0..13187af 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableRangeHistogram.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableRangeHistogram.java
@@ -56,8 +56,10 @@ public abstract class MutableRangeHistogram extends 
MutableHistogram implements
   public synchronized void snapshot(MetricsRecordBuilder metricsRecordBuilder, 
boolean all) {
 // Get a reference to the old histogram.
 FastLongHistogram histo = histogram.reset();
-updateSnapshotMetrics(metricsRecordBuilder, histo);
-updateSnapshotRangeMetrics(metricsRecordBuilder, histo);
+if (histo != null) {
+  updateSnapshotMetrics(metricsRecordBuilder, histo);
+  updateSnapshotRangeMetrics(metricsRecordBuilder, histo);
+}
   }
 
   public void updateSnapshotRangeMetrics(MetricsRecordBuilder 
metricsRecordBuilder,



[33/37] hbase git commit: HBASE-15205 Do not find the replication scope for every WAL#append() (Ram)

2016-02-26 Thread syuanjiang
HBASE-15205 Do not find the replication scope for every WAL#append() (Ram)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8f2bd060
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8f2bd060
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8f2bd060

Branch: refs/heads/hbase-12439
Commit: 8f2bd06019869a1738bcfd66066737cdb7802ca8
Parents: 538815d
Author: ramkrishna 
Authored: Fri Feb 26 22:30:55 2016 +0530
Committer: ramkrishna 
Committed: Fri Feb 26 22:30:55 2016 +0530

--
 .../hbase/protobuf/ReplicationProtbufUtil.java  |   2 +-
 .../hadoop/hbase/regionserver/HRegion.java  |  89 +-
 .../hadoop/hbase/regionserver/HStore.java   |   2 +-
 .../hadoop/hbase/regionserver/wal/FSHLog.java   |  13 +-
 .../hbase/regionserver/wal/FSWALEntry.java  |  10 +-
 .../hadoop/hbase/regionserver/wal/HLogKey.java  |  48 ++--
 .../regionserver/wal/WALActionsListener.java|   8 +-
 .../hadoop/hbase/regionserver/wal/WALUtil.java  |  57 +
 .../hbase/replication/ScopeWALEntryFilter.java  |   2 +-
 .../replication/regionserver/Replication.java   |  70 +++
 .../hadoop/hbase/wal/DisabledWALProvider.java   |   4 +-
 .../java/org/apache/hadoop/hbase/wal/WAL.java   |   9 +-
 .../org/apache/hadoop/hbase/wal/WALKey.java | 121 +++
 .../apache/hadoop/hbase/wal/WALSplitter.java|   2 +-
 .../org/apache/hadoop/hbase/TestIOFencing.java  |   3 +-
 .../hbase/coprocessor/TestWALObserver.java  |  48 +---
 .../hbase/mapreduce/TestHLogRecordReader.java   |   7 +-
 .../hbase/mapreduce/TestImportExport.java   |  16 +--
 .../hbase/mapreduce/TestWALRecordReader.java|  20 +--
 .../master/TestDistributedLogSplitting.java |   9 +-
 .../hadoop/hbase/regionserver/TestBulkLoad.java |  17 +--
 .../hadoop/hbase/regionserver/TestHRegion.java  |  16 +--
 .../regionserver/TestHRegionReplayEvents.java   |   6 +-
 .../regionserver/TestHRegionServerBulkLoad.java |   3 +-
 .../hbase/regionserver/TestWALLockup.java   |  10 +-
 .../hbase/regionserver/wal/TestFSHLog.java  |  57 ++---
 .../regionserver/wal/TestLogRollAbort.java  |  12 +-
 .../wal/TestLogRollingNoCluster.java|  11 +-
 .../wal/TestWALActionsListener.java |  12 +-
 .../hbase/regionserver/wal/TestWALReplay.java   |  47 ---
 .../hbase/replication/TestReplicationBase.java  |   9 ++
 .../replication/TestReplicationSmallTests.java  |  13 +-
 .../TestReplicationWALEntryFilters.java |  62 +-
 .../TestReplicationSourceManager.java   |  57 +
 .../TestReplicationWALReaderManager.java|  13 +-
 .../apache/hadoop/hbase/wal/FaultyFSLog.java|   7 +-
 .../hbase/wal/TestDefaultWALProvider.java   |  64 +++---
 .../wal/TestDefaultWALProviderWithHLogKey.java  |   7 +-
 .../apache/hadoop/hbase/wal/TestSecureWAL.java  |  11 +-
 .../apache/hadoop/hbase/wal/TestWALFactory.java |  74 
 .../hbase/wal/TestWALReaderOnSecureWAL.java |  11 +-
 .../hbase/wal/WALPerformanceEvaluation.java |  15 ++-
 42 files changed, 685 insertions(+), 389 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8f2bd060/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
index 91185af..8cb2237 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
@@ -134,7 +134,7 @@ public class ReplicationProtbufUtil {
 keyBuilder.setOrigSequenceNumber(key.getOrigLogSeqNum());
   }
   WALEdit edit = entry.getEdit();
-  NavigableMap scopes = key.getScopes();
+  NavigableMap scopes = key.getReplicationScopes();
   if (scopes != null && !scopes.isEmpty()) {
 for (Map.Entry scope: scopes.entrySet()) {
   scopeBuilder.setFamily(ByteStringer.wrap(scope.getKey()));

http://git-wip-us.apache.org/repos/asf/hbase/blob/8f2bd060/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index b70a4c3..406850e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 

[25/37] hbase git commit: HBASE-15128 Disable region splits and merges switch in master

2016-02-26 Thread syuanjiang
http://git-wip-us.apache.org/repos/asf/hbase/blob/24d481c5/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
--
diff --git 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
index 043d549..073eba9 100644
--- 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
+++ 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
@@ -8,6 +8,88 @@ public final class MasterProtos {
   public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
   }
+  /**
+   * Protobuf enum {@code hbase.pb.MasterSwitchType}
+   */
+  public enum MasterSwitchType
+  implements com.google.protobuf.ProtocolMessageEnum {
+/**
+ * SPLIT = 0;
+ */
+SPLIT(0, 0),
+/**
+ * MERGE = 1;
+ */
+MERGE(1, 1),
+;
+
+/**
+ * SPLIT = 0;
+ */
+public static final int SPLIT_VALUE = 0;
+/**
+ * MERGE = 1;
+ */
+public static final int MERGE_VALUE = 1;
+
+
+public final int getNumber() { return value; }
+
+public static MasterSwitchType valueOf(int value) {
+  switch (value) {
+case 0: return SPLIT;
+case 1: return MERGE;
+default: return null;
+  }
+}
+
+public static com.google.protobuf.Internal.EnumLiteMap
+internalGetValueMap() {
+  return internalValueMap;
+}
+private static com.google.protobuf.Internal.EnumLiteMap
+internalValueMap =
+  new com.google.protobuf.Internal.EnumLiteMap() {
+public MasterSwitchType findValueByNumber(int number) {
+  return MasterSwitchType.valueOf(number);
+}
+  };
+
+public final com.google.protobuf.Descriptors.EnumValueDescriptor
+getValueDescriptor() {
+  return getDescriptor().getValues().get(index);
+}
+public final com.google.protobuf.Descriptors.EnumDescriptor
+getDescriptorForType() {
+  return getDescriptor();
+}
+public static final com.google.protobuf.Descriptors.EnumDescriptor
+getDescriptor() {
+  return 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.getDescriptor().getEnumTypes().get(0);
+}
+
+private static final MasterSwitchType[] VALUES = values();
+
+public static MasterSwitchType valueOf(
+com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+  if (desc.getType() != getDescriptor()) {
+throw new java.lang.IllegalArgumentException(
+  "EnumValueDescriptor is not for this type.");
+  }
+  return VALUES[desc.getIndex()];
+}
+
+private final int index;
+private final int value;
+
+private MasterSwitchType(int index, int value) {
+  this.index = index;
+  this.value = value;
+}
+
+// @@protoc_insertion_point(enum_scope:hbase.pb.MasterSwitchType)
+  }
+
   public interface AddColumnRequestOrBuilder
   extends com.google.protobuf.MessageOrBuilder {
 
@@ -28764,28 +28846,62 @@ public final class MasterProtos {
 // @@protoc_insertion_point(class_scope:hbase.pb.IsBalancerEnabledResponse)
   }
 
-  public interface NormalizeRequestOrBuilder
+  public interface SetSplitOrMergeEnabledRequestOrBuilder
   extends com.google.protobuf.MessageOrBuilder {
+
+// required bool enabled = 1;
+/**
+ * required bool enabled = 1;
+ */
+boolean hasEnabled();
+/**
+ * required bool enabled = 1;
+ */
+boolean getEnabled();
+
+// optional bool synchronous = 2;
+/**
+ * optional bool synchronous = 2;
+ */
+boolean hasSynchronous();
+/**
+ * optional bool synchronous = 2;
+ */
+boolean getSynchronous();
+
+// repeated .hbase.pb.MasterSwitchType switch_types = 3;
+/**
+ * repeated .hbase.pb.MasterSwitchType switch_types = 3;
+ */
+
java.util.List
 getSwitchTypesList();
+/**
+ * repeated .hbase.pb.MasterSwitchType switch_types = 3;
+ */
+int getSwitchTypesCount();
+/**
+ * repeated .hbase.pb.MasterSwitchType switch_types = 3;
+ */
+org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType 
getSwitchTypes(int index);
   }
   /**
-   * Protobuf type {@code hbase.pb.NormalizeRequest}
+   * Protobuf type {@code hbase.pb.SetSplitOrMergeEnabledRequest}
*/
-  public static final class NormalizeRequest extends
+  public static final class SetSplitOrMergeEnabledRequest extends
   com.google.protobuf.GeneratedMessage
-  implements NormalizeRequestOrBuilder {
-// Use NormalizeRequest.newBuilder() to construct.
-private NormalizeRequest(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+  implements SetSplitOrMergeEnabledRequestOrBuilder {
+// Use 

[30/37] hbase git commit: Revert "HBASE-15128 Disable region splits and merges switch in master"

2016-02-26 Thread syuanjiang
Revert "HBASE-15128 Disable region splits and merges switch in master"

This reverts commit 24d481c5803e69a6190339cd8bb218b2c4585459.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bf4fcc30
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bf4fcc30
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bf4fcc30

Branch: refs/heads/hbase-12439
Commit: bf4fcc30c62395e8db9fe52fde07c752f9e00e54
Parents: 24d481c
Author: chenheng 
Authored: Fri Feb 26 08:52:12 2016 +0800
Committer: chenheng 
Committed: Fri Feb 26 08:52:12 2016 +0800

--
 .../org/apache/hadoop/hbase/client/Admin.java   |   27 +-
 .../hbase/client/ConnectionImplementation.java  |   14 -
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |   31 -
 .../hadoop/hbase/protobuf/RequestConverter.java |   49 -
 .../hbase/zookeeper/ZooKeeperWatcher.java   |   10 -
 .../hbase/protobuf/generated/MasterProtos.java  | 4304 --
 .../protobuf/generated/SnapshotProtos.java  |  500 +-
 .../protobuf/generated/ZooKeeperProtos.java |  462 +-
 hbase-protocol/src/main/protobuf/Master.proto   |   36 -
 .../src/main/protobuf/ZooKeeper.proto   |7 -
 .../hadoop/hbase/master/AssignmentManager.java  |   10 -
 .../org/apache/hadoop/hbase/master/HMaster.java |   28 -
 .../hadoop/hbase/master/MasterRpcServices.java  |   42 -
 .../org/apache/hadoop/hbase/util/HBaseFsck.java |   35 -
 .../zookeeper/SplitOrMergeTrackerManager.java   |  151 -
 .../hbase/client/TestSplitOrMergeStatus.java|  198 -
 hbase-shell/src/main/ruby/hbase/admin.rb|   32 -
 hbase-shell/src/main/ruby/shell.rb  |2 -
 .../ruby/shell/commands/splitormerge_enabled.rb |   41 -
 .../ruby/shell/commands/splitormerge_switch.rb  |   43 -
 20 files changed, 1200 insertions(+), 4822 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bf4fcc30/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
index c3b524b..d7b52d5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
@@ -1678,28 +1678,11 @@ public interface Admin extends Abortable, Closeable {
   List getSecurityCapabilities() throws IOException;
 
   /**
-   * Turn the Split or Merge switches on or off.
-   *
-   * @param enabled enabled or not
-   * @param synchronous If true, it waits until current split() call, if 
outstanding, to return.
-   * @param switchTypes switchType list {@link MasterSwitchType}
-   * @return Previous switch value array
-   */
-  boolean[] setSplitOrMergeEnabled(final boolean enabled, final boolean 
synchronous,
-   final MasterSwitchType... switchTypes) 
throws IOException;
-
-  /**
-   * Query the current state of the switch
-   *
-   * @return true if the switch is enabled, false otherwise.
-   */
-  boolean isSplitOrMergeEnabled(final MasterSwitchType switchType) throws 
IOException;
-
-  /**
* Currently, there are only two compact types:
* {@code NORMAL} means do store files compaction;
* {@code MOB} means do mob files compaction.
* */
+
   @InterfaceAudience.Public
   @InterfaceStability.Unstable
   public enum CompactType {
@@ -1709,12 +1692,4 @@ public interface Admin extends Abortable, Closeable {
 
 CompactType(int value) {}
   }
-  
-  @InterfaceAudience.Public
-  @InterfaceStability.Evolving
-  public enum MasterSwitchType {
-SPLIT,
-MERGE
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/bf4fcc30/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
index 64eb9fb..dfa9937 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
@@ -1742,20 +1742,6 @@ class ConnectionImplementation implements 
ClusterConnection, Closeable {
   }
 
   @Override
-  public MasterProtos.SetSplitOrMergeEnabledResponse 
setSplitOrMergeEnabled(
-RpcController controller, MasterProtos.SetSplitOrMergeEnabledRequest 
request)
-throws ServiceException {
-return stub.setSplitOrMergeEnabled(controller, request);
-  }
-
-  @Override
-  

[32/37] hbase git commit: HBASE-15205 Do not find the replication scope for every WAL#append() (Ram)

2016-02-26 Thread syuanjiang
http://git-wip-us.apache.org/repos/asf/hbase/blob/8f2bd060/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java
index 567e09d..e9bb468 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java
@@ -22,6 +22,8 @@ package org.apache.hadoop.hbase.regionserver;
 import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
+import java.util.NavigableMap;
+import java.util.TreeMap;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
 
@@ -208,13 +210,17 @@ public class TestWALLockup {
 HTableDescriptor htd = new HTableDescriptor(TableName.META_TABLE_NAME);
 final HRegion region = initHRegion(tableName, null, null, dodgyWAL);
 byte [] bytes = Bytes.toBytes(getName());
+NavigableMap scopes = new TreeMap(
+Bytes.BYTES_COMPARATOR);
+scopes.put(COLUMN_FAMILY_BYTES, 0);
 try {
   // First get something into memstore. Make a Put and then pull the Cell 
out of it. Will
   // manage append and sync carefully in below to manufacture hang. We 
keep adding same
   // edit. WAL subsystem doesn't care.
   Put put = new Put(bytes);
   put.addColumn(COLUMN_FAMILY_BYTES, Bytes.toBytes("1"), bytes);
-  WALKey key = new WALKey(region.getRegionInfo().getEncodedNameAsBytes(), 
htd.getTableName());
+  WALKey key = new WALKey(region.getRegionInfo().getEncodedNameAsBytes(), 
htd.getTableName(),
+  scopes);
   WALEdit edit = new WALEdit();
   CellScanner CellScanner = put.cellScanner();
   assertTrue(CellScanner.advance());
@@ -228,7 +234,7 @@ public class TestWALLockup {
   LOG.info("SET throwing of exception on append");
   dodgyWAL.throwException = true;
   // This append provokes a WAL roll request
-  dodgyWAL.append(htd, region.getRegionInfo(), key, edit, true);
+  dodgyWAL.append(region.getRegionInfo(), key, edit, true);
   boolean exception = false;
   try {
 dodgyWAL.sync();

http://git-wip-us.apache.org/repos/asf/hbase/blob/8f2bd060/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
index fd6d535..c60b225 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
@@ -28,7 +28,9 @@ import java.lang.reflect.Field;
 import java.util.ArrayList;
 import java.util.Comparator;
 import java.util.List;
+import java.util.NavigableMap;
 import java.util.Set;
+import java.util.TreeMap;
 import java.util.UUID;
 
 import org.apache.commons.lang.mutable.MutableBoolean;
@@ -152,12 +154,9 @@ public class TestFSHLog {
 }
   }
 
-  protected void addEdits(WAL log,
-  HRegionInfo hri,
-  HTableDescriptor htd,
-  int times,
-  MultiVersionConcurrencyControl mvcc)
-  throws IOException {
+  protected void addEdits(WAL log, HRegionInfo hri, HTableDescriptor htd, int 
times,
+  MultiVersionConcurrencyControl mvcc, NavigableMap 
scopes)
+  throws IOException {
 final byte[] row = Bytes.toBytes("row");
 for (int i = 0; i < times; i++) {
   long timestamp = System.currentTimeMillis();
@@ -165,8 +164,8 @@ public class TestFSHLog {
   cols.add(new KeyValue(row, row, row, timestamp, row));
   WALKey key = new WALKey(hri.getEncodedNameAsBytes(), htd.getTableName(),
   WALKey.NO_SEQUENCE_ID, timestamp, WALKey.EMPTY_UUIDS, 
HConstants.NO_NONCE,
-  HConstants.NO_NONCE, mvcc);
-  log.append(htd, hri, key, cols, true);
+  HConstants.NO_NONCE, mvcc, scopes);
+  log.append(hri, key, cols, true);
 }
 log.sync();
   }
@@ -261,11 +260,21 @@ public class TestFSHLog {
 new HRegionInfo(t2.getTableName(), HConstants.EMPTY_START_ROW, 
HConstants.EMPTY_END_ROW);
 // add edits and roll the wal
 MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl();
+NavigableMap scopes1 = new TreeMap(
+Bytes.BYTES_COMPARATOR);
+for(byte[] fam : t1.getFamiliesKeys()) {
+  scopes1.put(fam, 0);
+}
+NavigableMap scopes2 = new TreeMap(
+

[31/37] hbase git commit: HBASE-15215 TestBlockEvictionFromClient is flaky in jdk1.7 build (setting offheap)

2016-02-26 Thread syuanjiang
HBASE-15215 TestBlockEvictionFromClient is flaky in jdk1.7 build (setting
offheap)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/538815d8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/538815d8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/538815d8

Branch: refs/heads/hbase-12439
Commit: 538815d82a62cbcc7aaccec0a3bc4e44cb925277
Parents: bf4fcc3
Author: ramkrishna 
Authored: Fri Feb 26 11:43:00 2016 +0530
Committer: ramkrishna 
Committed: Fri Feb 26 11:43:00 2016 +0530

--
 .../apache/hadoop/hbase/client/TestBlockEvictionFromClient.java| 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/538815d8/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java
index f4d668c..d3f718b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java
@@ -102,7 +102,7 @@ public class TestBlockEvictionFromClient {
 // tests
 conf.setInt("hbase.regionserver.handler.count", 20);
 conf.setInt("hbase.bucketcache.size", 400);
-conf.setStrings("hbase.bucketcache.ioengine", "heap");
+conf.setStrings("hbase.bucketcache.ioengine", "offheap");
 conf.setFloat("hfile.block.cache.size", 0.2f);
 conf.setFloat("hbase.regionserver.global.memstore.size", 0.1f);
 conf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 0);// do not retry



[29/37] hbase git commit: Revert "HBASE-15128 Disable region splits and merges switch in master"

2016-02-26 Thread syuanjiang
http://git-wip-us.apache.org/repos/asf/hbase/blob/bf4fcc30/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
--
diff --git 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
index 073eba9..043d549 100644
--- 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
+++ 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
@@ -8,88 +8,6 @@ public final class MasterProtos {
   public static void registerAllExtensions(
   com.google.protobuf.ExtensionRegistry registry) {
   }
-  /**
-   * Protobuf enum {@code hbase.pb.MasterSwitchType}
-   */
-  public enum MasterSwitchType
-  implements com.google.protobuf.ProtocolMessageEnum {
-/**
- * SPLIT = 0;
- */
-SPLIT(0, 0),
-/**
- * MERGE = 1;
- */
-MERGE(1, 1),
-;
-
-/**
- * SPLIT = 0;
- */
-public static final int SPLIT_VALUE = 0;
-/**
- * MERGE = 1;
- */
-public static final int MERGE_VALUE = 1;
-
-
-public final int getNumber() { return value; }
-
-public static MasterSwitchType valueOf(int value) {
-  switch (value) {
-case 0: return SPLIT;
-case 1: return MERGE;
-default: return null;
-  }
-}
-
-public static com.google.protobuf.Internal.EnumLiteMap
-internalGetValueMap() {
-  return internalValueMap;
-}
-private static com.google.protobuf.Internal.EnumLiteMap
-internalValueMap =
-  new com.google.protobuf.Internal.EnumLiteMap() {
-public MasterSwitchType findValueByNumber(int number) {
-  return MasterSwitchType.valueOf(number);
-}
-  };
-
-public final com.google.protobuf.Descriptors.EnumValueDescriptor
-getValueDescriptor() {
-  return getDescriptor().getValues().get(index);
-}
-public final com.google.protobuf.Descriptors.EnumDescriptor
-getDescriptorForType() {
-  return getDescriptor();
-}
-public static final com.google.protobuf.Descriptors.EnumDescriptor
-getDescriptor() {
-  return 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.getDescriptor().getEnumTypes().get(0);
-}
-
-private static final MasterSwitchType[] VALUES = values();
-
-public static MasterSwitchType valueOf(
-com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
-  if (desc.getType() != getDescriptor()) {
-throw new java.lang.IllegalArgumentException(
-  "EnumValueDescriptor is not for this type.");
-  }
-  return VALUES[desc.getIndex()];
-}
-
-private final int index;
-private final int value;
-
-private MasterSwitchType(int index, int value) {
-  this.index = index;
-  this.value = value;
-}
-
-// @@protoc_insertion_point(enum_scope:hbase.pb.MasterSwitchType)
-  }
-
   public interface AddColumnRequestOrBuilder
   extends com.google.protobuf.MessageOrBuilder {
 
@@ -28846,62 +28764,28 @@ public final class MasterProtos {
 // @@protoc_insertion_point(class_scope:hbase.pb.IsBalancerEnabledResponse)
   }
 
-  public interface SetSplitOrMergeEnabledRequestOrBuilder
+  public interface NormalizeRequestOrBuilder
   extends com.google.protobuf.MessageOrBuilder {
-
-// required bool enabled = 1;
-/**
- * required bool enabled = 1;
- */
-boolean hasEnabled();
-/**
- * required bool enabled = 1;
- */
-boolean getEnabled();
-
-// optional bool synchronous = 2;
-/**
- * optional bool synchronous = 2;
- */
-boolean hasSynchronous();
-/**
- * optional bool synchronous = 2;
- */
-boolean getSynchronous();
-
-// repeated .hbase.pb.MasterSwitchType switch_types = 3;
-/**
- * repeated .hbase.pb.MasterSwitchType switch_types = 3;
- */
-
java.util.List
 getSwitchTypesList();
-/**
- * repeated .hbase.pb.MasterSwitchType switch_types = 3;
- */
-int getSwitchTypesCount();
-/**
- * repeated .hbase.pb.MasterSwitchType switch_types = 3;
- */
-org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType 
getSwitchTypes(int index);
   }
   /**
-   * Protobuf type {@code hbase.pb.SetSplitOrMergeEnabledRequest}
+   * Protobuf type {@code hbase.pb.NormalizeRequest}
*/
-  public static final class SetSplitOrMergeEnabledRequest extends
+  public static final class NormalizeRequest extends
   com.google.protobuf.GeneratedMessage
-  implements SetSplitOrMergeEnabledRequestOrBuilder {
-// Use SetSplitOrMergeEnabledRequest.newBuilder() to construct.
-private 
SetSplitOrMergeEnabledRequest(com.google.protobuf.GeneratedMessage.Builder 
builder) {
+  implements NormalizeRequestOrBuilder {
+

[28/37] hbase git commit: Revert "HBASE-15128 Disable region splits and merges switch in master"

2016-02-26 Thread syuanjiang
http://git-wip-us.apache.org/repos/asf/hbase/blob/bf4fcc30/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
--
diff --git 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
index 9805d50..8dbb5ad 100644
--- 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
+++ 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
@@ -11,13 +11,13 @@ public final class SnapshotProtos {
   public interface SnapshotFileInfoOrBuilder
   extends com.google.protobuf.MessageOrBuilder {
 
-// required .hbase.pb.SnapshotFileInfo.Type type = 1;
+// required .SnapshotFileInfo.Type type = 1;
 /**
- * required .hbase.pb.SnapshotFileInfo.Type type = 1;
+ * required .SnapshotFileInfo.Type type = 1;
  */
 boolean hasType();
 /**
- * required .hbase.pb.SnapshotFileInfo.Type type = 1;
+ * required .SnapshotFileInfo.Type type = 1;
  */
 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type 
getType();
 
@@ -67,7 +67,7 @@ public final class SnapshotProtos {
 getWalNameBytes();
   }
   /**
-   * Protobuf type {@code hbase.pb.SnapshotFileInfo}
+   * Protobuf type {@code SnapshotFileInfo}
*/
   public static final class SnapshotFileInfo extends
   com.google.protobuf.GeneratedMessage
@@ -157,12 +157,12 @@ public final class SnapshotProtos {
 }
 public static final com.google.protobuf.Descriptors.Descriptor
 getDescriptor() {
-  return 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_descriptor;
+  return 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotFileInfo_descriptor;
 }
 
 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
 internalGetFieldAccessorTable() {
-  return 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_fieldAccessorTable
+  return 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotFileInfo_fieldAccessorTable
   .ensureFieldAccessorsInitialized(
   
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.class,
 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Builder.class);
 }
@@ -183,7 +183,7 @@ public final class SnapshotProtos {
 }
 
 /**
- * Protobuf enum {@code hbase.pb.SnapshotFileInfo.Type}
+ * Protobuf enum {@code SnapshotFileInfo.Type}
  */
 public enum Type
 implements com.google.protobuf.ProtocolMessageEnum {
@@ -261,21 +261,21 @@ public final class SnapshotProtos {
 this.value = value;
   }
 
-  // @@protoc_insertion_point(enum_scope:hbase.pb.SnapshotFileInfo.Type)
+  // @@protoc_insertion_point(enum_scope:SnapshotFileInfo.Type)
 }
 
 private int bitField0_;
-// required .hbase.pb.SnapshotFileInfo.Type type = 1;
+// required .SnapshotFileInfo.Type type = 1;
 public static final int TYPE_FIELD_NUMBER = 1;
 private 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type 
type_;
 /**
- * required .hbase.pb.SnapshotFileInfo.Type type = 1;
+ * required .SnapshotFileInfo.Type type = 1;
  */
 public boolean hasType() {
   return ((bitField0_ & 0x0001) == 0x0001);
 }
 /**
- * required .hbase.pb.SnapshotFileInfo.Type type = 1;
+ * required .SnapshotFileInfo.Type type = 1;
  */
 public 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type 
getType() {
   return type_;
@@ -613,19 +613,19 @@ public final class SnapshotProtos {
   return builder;
 }
 /**
- * Protobuf type {@code hbase.pb.SnapshotFileInfo}
+ * Protobuf type {@code SnapshotFileInfo}
  */
 public static final class Builder extends
 com.google.protobuf.GeneratedMessage.Builder
implements 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfoOrBuilder
 {
   public static final com.google.protobuf.Descriptors.Descriptor
   getDescriptor() {
-return 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_descriptor;
+return 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotFileInfo_descriptor;
   }
 
   protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
   internalGetFieldAccessorTable() {
-return 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_fieldAccessorTable
+return 

[04/37] hbase git commit: HBASE-13259 mmap() based BucketCache IOEngine (Zee Chen & Ram)

2016-02-26 Thread syuanjiang
HBASE-13259 mmap() based BucketCache IOEngine (Zee Chen & Ram)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3ba1a7fd
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3ba1a7fd
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3ba1a7fd

Branch: refs/heads/hbase-12439
Commit: 3ba1a7fd23f0b0ca06cf7a9a04cb45975e1c7d91
Parents: a8073c4
Author: ramkrishna 
Authored: Tue Feb 23 17:03:38 2016 +0530
Committer: ramkrishna 
Committed: Tue Feb 23 17:03:38 2016 +0530

--
 .../hadoop/hbase/util/ByteBufferAllocator.java  |  39 +
 .../hadoop/hbase/util/ByteBufferArray.java  |  16 +-
 .../hadoop/hbase/util/TestByteBufferArray.java  |  16 +-
 .../hbase/io/hfile/bucket/BucketCache.java  |  11 +-
 .../io/hfile/bucket/ByteBufferIOEngine.java |  18 +-
 .../hbase/io/hfile/bucket/FileMmapEngine.java   | 166 +++
 .../io/hfile/bucket/TestFileMmapEngine.java |  68 
 7 files changed, 320 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3ba1a7fd/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferAllocator.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferAllocator.java
 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferAllocator.java
new file mode 100644
index 000..b19a0a7
--- /dev/null
+++ 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferAllocator.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+
+/**
+ * Defines the way the ByteBuffers are created
+ */
+@InterfaceAudience.Private
+public interface ByteBufferAllocator {
+
+  /**
+   * Allocates a bytebuffer
+   * @param size the size of the bytebuffer
+   * @param directByteBuffer indicator to create a direct bytebuffer
+   * @return the bytebuffer that is created
+   * @throws IOException exception thrown if there is an error while creating 
the ByteBuffer
+   */
+  ByteBuffer allocate(long size, boolean directByteBuffer) throws IOException;
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hbase/blob/3ba1a7fd/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
index 2334cf7..b09dc9a 100644
--- 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
+++ 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
@@ -18,6 +18,7 @@
  */
 package org.apache.hadoop.hbase.util;
 
+import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
@@ -39,20 +40,23 @@ import org.apache.hadoop.util.StringUtils;
 public final class ByteBufferArray {
   private static final Log LOG = LogFactory.getLog(ByteBufferArray.class);
 
-  static final int DEFAULT_BUFFER_SIZE = 4 * 1024 * 1024;
+  public static final int DEFAULT_BUFFER_SIZE = 4 * 1024 * 1024;
   private ByteBuffer buffers[];
   private Lock locks[];
   private int bufferSize;
   private int bufferCount;
-
+  private ByteBufferAllocator allocator;
   /**
* We allocate a number of byte buffers as the capacity. In order not to out
* of the array bounds for the last byte(see {@link 
ByteBufferArray#multiple}),
* we will allocate one additional buffer with capacity 0;
* @param capacity total size of the byte buffer array
* @param directByteBuffer true if we allocate direct buffer
+   * @param allocator the ByteBufferAllocator that will create 

[24/37] hbase git commit: HBASE-15128 Disable region splits and merges switch in master

2016-02-26 Thread syuanjiang
http://git-wip-us.apache.org/repos/asf/hbase/blob/24d481c5/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
--
diff --git 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
index 8dbb5ad..9805d50 100644
--- 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
+++ 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
@@ -11,13 +11,13 @@ public final class SnapshotProtos {
   public interface SnapshotFileInfoOrBuilder
   extends com.google.protobuf.MessageOrBuilder {
 
-// required .SnapshotFileInfo.Type type = 1;
+// required .hbase.pb.SnapshotFileInfo.Type type = 1;
 /**
- * required .SnapshotFileInfo.Type type = 1;
+ * required .hbase.pb.SnapshotFileInfo.Type type = 1;
  */
 boolean hasType();
 /**
- * required .SnapshotFileInfo.Type type = 1;
+ * required .hbase.pb.SnapshotFileInfo.Type type = 1;
  */
 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type 
getType();
 
@@ -67,7 +67,7 @@ public final class SnapshotProtos {
 getWalNameBytes();
   }
   /**
-   * Protobuf type {@code SnapshotFileInfo}
+   * Protobuf type {@code hbase.pb.SnapshotFileInfo}
*/
   public static final class SnapshotFileInfo extends
   com.google.protobuf.GeneratedMessage
@@ -157,12 +157,12 @@ public final class SnapshotProtos {
 }
 public static final com.google.protobuf.Descriptors.Descriptor
 getDescriptor() {
-  return 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotFileInfo_descriptor;
+  return 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_descriptor;
 }
 
 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
 internalGetFieldAccessorTable() {
-  return 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotFileInfo_fieldAccessorTable
+  return 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_fieldAccessorTable
   .ensureFieldAccessorsInitialized(
   
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.class,
 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Builder.class);
 }
@@ -183,7 +183,7 @@ public final class SnapshotProtos {
 }
 
 /**
- * Protobuf enum {@code SnapshotFileInfo.Type}
+ * Protobuf enum {@code hbase.pb.SnapshotFileInfo.Type}
  */
 public enum Type
 implements com.google.protobuf.ProtocolMessageEnum {
@@ -261,21 +261,21 @@ public final class SnapshotProtos {
 this.value = value;
   }
 
-  // @@protoc_insertion_point(enum_scope:SnapshotFileInfo.Type)
+  // @@protoc_insertion_point(enum_scope:hbase.pb.SnapshotFileInfo.Type)
 }
 
 private int bitField0_;
-// required .SnapshotFileInfo.Type type = 1;
+// required .hbase.pb.SnapshotFileInfo.Type type = 1;
 public static final int TYPE_FIELD_NUMBER = 1;
 private 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type 
type_;
 /**
- * required .SnapshotFileInfo.Type type = 1;
+ * required .hbase.pb.SnapshotFileInfo.Type type = 1;
  */
 public boolean hasType() {
   return ((bitField0_ & 0x0001) == 0x0001);
 }
 /**
- * required .SnapshotFileInfo.Type type = 1;
+ * required .hbase.pb.SnapshotFileInfo.Type type = 1;
  */
 public 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type 
getType() {
   return type_;
@@ -613,19 +613,19 @@ public final class SnapshotProtos {
   return builder;
 }
 /**
- * Protobuf type {@code SnapshotFileInfo}
+ * Protobuf type {@code hbase.pb.SnapshotFileInfo}
  */
 public static final class Builder extends
 com.google.protobuf.GeneratedMessage.Builder
implements 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfoOrBuilder
 {
   public static final com.google.protobuf.Descriptors.Descriptor
   getDescriptor() {
-return 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotFileInfo_descriptor;
+return 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_descriptor;
   }
 
   protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
   internalGetFieldAccessorTable() {
-return 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotFileInfo_fieldAccessorTable
+return 

[27/37] hbase git commit: Revert "HBASE-15128 Disable region splits and merges switch in master"

2016-02-26 Thread syuanjiang
http://git-wip-us.apache.org/repos/asf/hbase/blob/bf4fcc30/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java
--
diff --git 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java
 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java
index 0240a67..4371739 100644
--- 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java
+++ 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java
@@ -8196,450 +8196,6 @@ public final class ZooKeeperProtos {
 // @@protoc_insertion_point(class_scope:hbase.pb.TableLock)
   }
 
-  public interface SwitchStateOrBuilder
-  extends com.google.protobuf.MessageOrBuilder {
-
-// optional bool enabled = 1;
-/**
- * optional bool enabled = 1;
- */
-boolean hasEnabled();
-/**
- * optional bool enabled = 1;
- */
-boolean getEnabled();
-  }
-  /**
-   * Protobuf type {@code hbase.pb.SwitchState}
-   *
-   * 
-   **
-   * State of the switch.
-   * 
-   */
-  public static final class SwitchState extends
-  com.google.protobuf.GeneratedMessage
-  implements SwitchStateOrBuilder {
-// Use SwitchState.newBuilder() to construct.
-private SwitchState(com.google.protobuf.GeneratedMessage.Builder 
builder) {
-  super(builder);
-  this.unknownFields = builder.getUnknownFields();
-}
-private SwitchState(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-private static final SwitchState defaultInstance;
-public static SwitchState getDefaultInstance() {
-  return defaultInstance;
-}
-
-public SwitchState getDefaultInstanceForType() {
-  return defaultInstance;
-}
-
-private final com.google.protobuf.UnknownFieldSet unknownFields;
-@java.lang.Override
-public final com.google.protobuf.UnknownFieldSet
-getUnknownFields() {
-  return this.unknownFields;
-}
-private SwitchState(
-com.google.protobuf.CodedInputStream input,
-com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-throws com.google.protobuf.InvalidProtocolBufferException {
-  initFields();
-  int mutable_bitField0_ = 0;
-  com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-  com.google.protobuf.UnknownFieldSet.newBuilder();
-  try {
-boolean done = false;
-while (!done) {
-  int tag = input.readTag();
-  switch (tag) {
-case 0:
-  done = true;
-  break;
-default: {
-  if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
-done = true;
-  }
-  break;
-}
-case 8: {
-  bitField0_ |= 0x0001;
-  enabled_ = input.readBool();
-  break;
-}
-  }
-}
-  } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-throw e.setUnfinishedMessage(this);
-  } catch (java.io.IOException e) {
-throw new com.google.protobuf.InvalidProtocolBufferException(
-e.getMessage()).setUnfinishedMessage(this);
-  } finally {
-this.unknownFields = unknownFields.build();
-makeExtensionsImmutable();
-  }
-}
-public static final com.google.protobuf.Descriptors.Descriptor
-getDescriptor() {
-  return 
org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SwitchState_descriptor;
-}
-
-protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-internalGetFieldAccessorTable() {
-  return 
org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SwitchState_fieldAccessorTable
-  .ensureFieldAccessorsInitialized(
-  
org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState.class, 
org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SwitchState.Builder.class);
-}
-
-public static com.google.protobuf.Parser PARSER =
-new com.google.protobuf.AbstractParser() {
-  public SwitchState parsePartialFrom(
-  com.google.protobuf.CodedInputStream input,
-  com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-  throws com.google.protobuf.InvalidProtocolBufferException {
-return new SwitchState(input, extensionRegistry);
-  }
-};
-
-@java.lang.Override
-public com.google.protobuf.Parser getParserForType() {
-  return PARSER;
-}
-
-private int bitField0_;
-// optional bool enabled = 1;
-public static final int ENABLED_FIELD_NUMBER = 1;
-private boolean enabled_;
-/**
- * optional bool 

[03/37] hbase git commit: HBASE-15232 Handle region location cache mgmt in AsyncProcess for multi()'s

2016-02-26 Thread syuanjiang
HBASE-15232 Handle region location cache mgmt in AsyncProcess for multi()'s

Further investigation after HBASE-15221 lead to some findings that
AsyncProcess should have been managing the contents of the region
location cache, appropriately clearing it when necessary (e.g. an
RPC to a server fails because the server doesn't host that region)

For multi() RPCs, the tableName argument is null since there is no
single table that the updates are destined to. This inadvertently
caused the existing region location cache updates to fail on 1.x
branches. AsyncProcess needs to handle when tableName is null
and perform the necessary cache evictions.

As such, much of the new retry logic in HTableMultiplexer is
unnecessary and is removed with this commit. Getters which were
added as a part of testing were left since that are mostly
harmless and should contain no negative impact.

Signed-off-by: stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a8073c4a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a8073c4a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a8073c4a

Branch: refs/heads/hbase-12439
Commit: a8073c4a9819953a2dd13a26bb4dd9405ac8750c
Parents: 5e50112
Author: Josh Elser 
Authored: Mon Feb 8 14:25:37 2016 -0500
Committer: stack 
Committed: Mon Feb 22 22:03:14 2016 -0800

--
 .../hadoop/hbase/client/AsyncProcess.java   |  11 +-
 .../hadoop/hbase/client/HTableMultiplexer.java  |  23 ++--
 .../hbase/exceptions/ClientExceptionsUtil.java  |   6 +-
 .../client/TestHTableMultiplexerViaMocks.java   | 117 ---
 .../client/TestHTableMultiplexerFlushCache.java |  60 ++
 5 files changed, 81 insertions(+), 136 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a8073c4a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
index 4ceb89a..65c15ce 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
@@ -1360,8 +1360,15 @@ class AsyncProcess {
   errorsByServer.reportServerError(server);
   canRetry = errorsByServer.canTryMore(numAttempt);
 }
-connection.updateCachedLocations(
-tableName, region, actions.get(0).getAction().getRow(), throwable, 
server);
+if (null == tableName && 
ClientExceptionsUtil.isMetaClearingException(throwable)) {
+  // For multi-actions, we don't have a table name, but we want to 
make sure to clear the
+  // cache in case there were location-related exceptions. We don't to 
clear the cache
+  // for every possible exception that comes through, however.
+  connection.clearCaches(server);
+} else {
+  connection.updateCachedLocations(
+  tableName, region, actions.get(0).getAction().getRow(), 
throwable, server);
+}
 failureCount += actions.size();
 
 for (Action action : actions) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/a8073c4a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
index 13e9b85..f1bbcb3 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
@@ -194,19 +194,6 @@ public class HTableMultiplexer {
* @return true if the request can be accepted by its corresponding buffer 
queue.
*/
   public boolean put(final TableName tableName, final Put put, int 
maxAttempts) {
-return _put(tableName, put, maxAttempts, false);
-  }
-
-  /**
-   * Internal "put" which exposes a boolean flag to control whether or not the 
region location
-   * cache should be reloaded when trying to queue the {@link Put}.
-   * @param tableName Destination table for the Put
-   * @param put The Put to send
-   * @param maxAttempts Number of attempts to retry the {@code put}
-   * @param reloadCache Should the region location cache be reloaded
-   * @return true if the request was accepted in the queue, otherwise false
-   */
-  boolean _put(final TableName tableName, final Put put, int maxAttempts, 
boolean reloadCache) {
 if (maxAttempts <= 0) {
   return 

[35/37] hbase git commit: HBASE-15332 Document how to take advantage of HDFS-6133 in HBase

2016-02-26 Thread syuanjiang
HBASE-15332 Document how to take advantage of HDFS-6133 in HBase

(cherry picked from commit e0a656ed50027a7d982f1eca7a8c0ee3cab47f92)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c5288947
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c5288947
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c5288947

Branch: refs/heads/hbase-12439
Commit: c5288947ddc4abae2f4036544a775ff81538df2f
Parents: e88d943
Author: Misty Stanley-Jones 
Authored: Thu Feb 25 13:51:26 2016 -0800
Committer: Misty Stanley-Jones 
Committed: Fri Feb 26 09:38:32 2016 -0800

--
 .../asciidoc/_chapters/troubleshooting.adoc | 22 
 1 file changed, 22 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c5288947/src/main/asciidoc/_chapters/troubleshooting.adoc
--
diff --git a/src/main/asciidoc/_chapters/troubleshooting.adoc 
b/src/main/asciidoc/_chapters/troubleshooting.adoc
index 66e56b8..8b2011d 100644
--- a/src/main/asciidoc/_chapters/troubleshooting.adoc
+++ b/src/main/asciidoc/_chapters/troubleshooting.adoc
@@ -1347,6 +1347,28 @@ Settings for HDFS retries and timeouts are important to 
HBase.::
   Defaults are current as of Hadoop 2.3.
   Check the Hadoop documentation for the most current values and 
recommendations.
 
+The HBase Balancer and HDFS Balancer are incompatible::
+  The HDFS balancer attempts to spread HDFS blocks evenly among DataNodes. 
HBase relies
+  on compactions to restore locality after a region split or failure. These 
two types
+  of balancing do not work well together.
++
+In the past, the generally accepted advice was to turn off the HDFS load 
balancer and rely
+on the HBase balancer, since the HDFS balancer would degrade locality. This 
advice
+is still valid if your HDFS version is lower than 2.7.1.
++
+link:https://issues.apache.org/jira/browse/HDFS-6133[HDFS-6133] provides the 
ability
+to exclude a given directory from the HDFS load balancer, by setting the
+`dfs.datanode.block-pinning.enabled` property to `true` in your HDFS
+configuration and running the following hdfs command:
++
+
+$ sudo -u hdfs hdfs balancer -exclude /hbase
+
++
+NOTE: HDFS-6133 is available in HDFS 2.7.0 and higher, but HBase does not 
support
+running on HDFS 2.7.0, so you must be using HDFS 2.7.1 or higher to use this 
feature
+with HBase.
+
 .Connection Timeouts
 Connection timeouts occur between the client (HBASE) and the HDFS DataNode.
 They may occur when establishing a connection, attempting to read, or 
attempting to write.



[11/37] hbase git commit: HBASE-15302 Reenable the other tests disabled by HBASE-14678

2016-02-26 Thread syuanjiang
http://git-wip-us.apache.org/repos/asf/hbase/blob/30cec72f/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
new file mode 100644
index 000..c5728cf
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
@@ -0,0 +1,1799 @@
+/**
+ *
+
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.master;
+
+import static 
org.apache.hadoop.hbase.SplitLogCounters.tot_mgr_wait_for_zk_delete;
+import static 
org.apache.hadoop.hbase.SplitLogCounters.tot_wkr_final_transition_failed;
+import static org.apache.hadoop.hbase.SplitLogCounters.tot_wkr_preempt_task;
+import static org.apache.hadoop.hbase.SplitLogCounters.tot_wkr_task_acquired;
+import static org.apache.hadoop.hbase.SplitLogCounters.tot_wkr_task_done;
+import static org.apache.hadoop.hbase.SplitLogCounters.tot_wkr_task_err;
+import static org.apache.hadoop.hbase.SplitLogCounters.tot_wkr_task_resigned;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.NavigableSet;
+import java.util.Set;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicLong;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PathFilter;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.NamespaceDescriptor;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.SplitLogCounters;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.Waiter;
+import org.apache.hadoop.hbase.client.ClusterConnection;
+import org.apache.hadoop.hbase.client.ConnectionUtils;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Increment;
+import org.apache.hadoop.hbase.client.NonceGenerator;
+import org.apache.hadoop.hbase.client.PerClientRandomNonceGenerator;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.RegionLocator;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.coordination.BaseCoordinatedStateManager;
+import org.apache.hadoop.hbase.coordination.ZKSplitLogManagerCoordination;
+import org.apache.hadoop.hbase.exceptions.OperationConflictException;
+import org.apache.hadoop.hbase.exceptions.RegionInRecoveryException;
+import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
+import org.apache.hadoop.hbase.master.SplitLogManager.TaskBatch;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import 
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState;
+import org.apache.hadoop.hbase.regionserver.HRegion;
+import 

[17/37] hbase git commit: HBASE-15319 clearJmxCache does not take effect actually

2016-02-26 Thread syuanjiang
HBASE-15319 clearJmxCache does not take effect actually


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a3b4575f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a3b4575f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a3b4575f

Branch: refs/heads/hbase-12439
Commit: a3b4575f700c20167a6ab0b774d8a2c9cd3916af
Parents: 630a658
Author: Elliott Clark 
Authored: Wed Feb 24 09:02:06 2016 -0800
Committer: Elliott Clark 
Committed: Wed Feb 24 16:29:05 2016 -0800

--
 .../main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a3b4575f/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
index 95734ba..8fcf623 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
@@ -52,7 +52,7 @@ public class JmxCacheBuster {
   public static void clearJmxCache() {
 //If there are more then 100 ms before the executor will run then 
everything should be merged.
 ScheduledFuture future = fut.get();
-if ((future == null || (!future.isDone() && 
future.getDelay(TimeUnit.MILLISECONDS) > 100))) {
+if ((future != null && (!future.isDone() && 
future.getDelay(TimeUnit.MILLISECONDS) > 100))) {
   // BAIL OUT
   return;
 }



[01/37] hbase git commit: HBASE-15247 InclusiveStopFilter does not respect reverse Filter property. (Amal Joshy)

2016-02-26 Thread syuanjiang
Repository: hbase
Updated Branches:
  refs/heads/hbase-12439 2966eee60 -> 8f6e29785


HBASE-15247 InclusiveStopFilter does not respect reverse Filter property. (Amal 
Joshy)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2d66cd86
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2d66cd86
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2d66cd86

Branch: refs/heads/hbase-12439
Commit: 2d66cd86d08120887efe05b65fa53bd667a5be76
Parents: 2966eee
Author: anoopsjohn 
Authored: Tue Feb 23 09:21:37 2016 +0530
Committer: anoopsjohn 
Committed: Tue Feb 23 09:21:37 2016 +0530

--
 .../hbase/filter/InclusiveStopFilter.java   |  4 +--
 .../apache/hadoop/hbase/filter/TestFilter.java  | 37 
 2 files changed, 38 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2d66cd86/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
index 3412c5c..1096f5e 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
@@ -62,9 +62,7 @@ public class InclusiveStopFilter extends FilterBase {
   public boolean filterRowKey(Cell firstRowCell) {
 // if stopRowKey is <= buffer, then true, filter row.
 int cmp = CellComparator.COMPARATOR.compareRows(firstRowCell, stopRowKey, 
0, stopRowKey.length);
-if (cmp > 0) {
-  done = true;
-}
+done = reversed ? cmp < 0 : cmp > 0;
 return done;
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/2d66cd86/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
index 1e89685..1dd6616 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
@@ -717,6 +717,43 @@ public class TestFilter {
   }
 
   @Test
+  public void testInclusiveStopFilterWithReverseScan() throws IOException {
+
+// Grab rows from group one
+
+// If we just use start/stop row, we get total/2 - 1 rows
+long expectedRows = (this.numRows / 2) - 1;
+long expectedKeys = this.colsPerRow;
+Scan s = new Scan(Bytes.toBytes("testRowOne-3"), 
Bytes.toBytes("testRowOne-0"));
+s.setReversed(true);
+verifyScan(s, expectedRows, expectedKeys);
+
+// Now use start row with inclusive stop filter
+expectedRows = this.numRows / 2;
+s = new Scan(Bytes.toBytes("testRowOne-3"));
+s.setReversed(true);
+s.setFilter(new InclusiveStopFilter(Bytes.toBytes("testRowOne-0")));
+verifyScan(s, expectedRows, expectedKeys);
+
+// Grab rows from group two
+
+// If we just use start/stop row, we get total/2 - 1 rows
+expectedRows = (this.numRows / 2) - 1;
+expectedKeys = this.colsPerRow;
+s = new Scan(Bytes.toBytes("testRowTwo-3"), Bytes.toBytes("testRowTwo-0"));
+s.setReversed(true);
+verifyScan(s, expectedRows, expectedKeys);
+
+// Now use start row with inclusive stop filter
+expectedRows = this.numRows / 2;
+s = new Scan(Bytes.toBytes("testRowTwo-3"));
+s.setReversed(true);
+s.setFilter(new InclusiveStopFilter(Bytes.toBytes("testRowTwo-0")));
+verifyScan(s, expectedRows, expectedKeys);
+
+  }
+
+  @Test
   public void testQualifierFilter() throws IOException {
 
 // Match two keys (one from each family) in half the rows



[08/37] hbase git commit: HBASE-15277 TestRegionMergeTransactionOnCluster.testWholesomeMerge fails with no connection to master; ADDING DEBUGGING

2016-02-26 Thread syuanjiang
HBASE-15277 TestRegionMergeTransactionOnCluster.testWholesomeMerge fails with 
no connection to master; ADDING DEBUGGING


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/28cd48b6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/28cd48b6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/28cd48b6

Branch: refs/heads/hbase-12439
Commit: 28cd48b673ca743d193874b2951bc995699e8e89
Parents: 0024865
Author: stack 
Authored: Tue Feb 23 22:43:01 2016 -0800
Committer: stack 
Committed: Tue Feb 23 22:43:01 2016 -0800

--
 .../org/apache/hadoop/hbase/master/TableStateManager.java   | 2 +-
 .../regionserver/TestRegionMergeTransactionOnCluster.java   | 9 +++--
 2 files changed, 8 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/28cd48b6/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java
index 12db91e..b6befaa 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java
@@ -131,7 +131,7 @@ public class TableStateManager {
   TableState.State tableState = getTableState(tableName);
   return TableState.isInStates(tableState, states);
 } catch (IOException e) {
-  LOG.error("Unable to get table " + tableName + " state, probably table 
not exists");
+  LOG.error("Unable to get table " + tableName + " state", e);
   return false;
 }
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/28cd48b6/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
index a532bb7..cd4410f 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
@@ -78,6 +78,7 @@ import org.junit.BeforeClass;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
 import org.junit.rules.TestRule;
 
 import com.google.common.base.Joiner;
@@ -94,6 +95,7 @@ import com.google.protobuf.ServiceException;
 public class TestRegionMergeTransactionOnCluster {
   private static final Log LOG = LogFactory
   .getLog(TestRegionMergeTransactionOnCluster.class);
+  @Rule public TestName name = new TestName();
   @Rule public final TestRule timeout = 
CategoryBasedTimeout.builder().withTimeout(this.getClass()).
   withLookingForStuckThread(true).build();
   private static final int NB_SERVERS = 3;
@@ -182,7 +184,6 @@ public class TestRegionMergeTransactionOnCluster {
*/
   @Test
   public void testMergeAndRestartingMaster() throws Exception {
-LOG.info("Starting testMergeAndRestartingMaster");
 final TableName tableName = 
TableName.valueOf("testMergeAndRestartingMaster");
 
 // Create table and load data.
@@ -458,11 +459,15 @@ public class TestRegionMergeTransactionOnCluster {
 }
 
 Table table = TEST_UTIL.createTable(tablename, FAMILYNAME, splitRows);
+LOG.info("Created " + table.getName());
 if (replication > 1) {
   HBaseTestingUtility.setReplicas(ADMIN, tablename, replication);
+  LOG.info("Set replication of " + replication + " on " + table.getName());
 }
 loadData(table);
+LOG.info("Loaded " + table.getName());
 verifyRowCount(table, ROWSIZE);
+LOG.info("Verified " + table.getName());
 
 // sleep here is an ugly hack to allow region transitions to finish
 long timeout = System.currentTimeMillis() + waitTime;
@@ -474,7 +479,7 @@ public class TestRegionMergeTransactionOnCluster {
 break;
   Thread.sleep(250);
 }
-
+LOG.info("Getting regions of " + table.getName());
 tableRegions = MetaTableAccessor.getTableRegionsAndLocations(
 TEST_UTIL.getConnection(), tablename);
 LOG.info("Regions after load: " + Joiner.on(',').join(tableRegions));



[16/37] hbase git commit: HBASE-15222 Use less contended classes for metrics

2016-02-26 Thread syuanjiang
HBASE-15222 Use less contended classes for metrics

Summary:
Use less contended things for metrics.
For histogram which was the largest culprit we use FastLongHistogram
For atomic long where possible we now use counter.

Test Plan: unit tests

Reviewers:

Subscribers:

Differential Revision: https://reviews.facebook.net/D54381


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/630a6582
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/630a6582
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/630a6582

Branch: refs/heads/hbase-12439
Commit: 630a65825ed9a9c00f72bbfcac0588e1ab0cdd72
Parents: 20e14f4
Author: Elliott Clark 
Authored: Thu Feb 18 09:54:05 2016 -0800
Committer: Elliott Clark 
Committed: Wed Feb 24 14:34:05 2016 -0800

--
 .../hadoop/hbase/util/FastLongHistogram.java| 162 ++-
 .../hbase/util/TestFastLongHistogram.java   |  32 
 .../apache/hadoop/hbase/metrics/BaseSource.java |   8 -
 .../apache/hadoop/metrics2/MetricHistogram.java |   3 +
 hbase-hadoop2-compat/pom.xml|   8 +-
 .../hbase/ipc/MetricsHBaseServerSourceImpl.java |  50 +++---
 .../MetricsAssignmentManagerSourceImpl.java |  10 +-
 .../MetricsMasterFilesystemSourceImpl.java  |  14 +-
 .../hbase/master/MetricsMasterSourceImpl.java   |   4 +-
 .../hbase/master/MetricsSnapshotSourceImpl.java |   8 +-
 .../balancer/MetricsBalancerSourceImpl.java |   8 +-
 .../hadoop/hbase/metrics/BaseSourceImpl.java|  17 +-
 .../MetricsRegionServerSourceImpl.java  |  16 +-
 .../regionserver/MetricsRegionSourceImpl.java   |  24 +--
 .../regionserver/wal/MetricsWALSourceImpl.java  |  10 +-
 .../MetricsReplicationGlobalSourceSource.java   |  36 ++---
 .../MetricsReplicationSinkSourceImpl.java   |  16 +-
 .../MetricsReplicationSourceSourceImpl.java |  36 ++---
 .../hbase/rest/MetricsRESTSourceImpl.java   |  38 ++---
 .../thrift/MetricsThriftServerSourceImpl.java   |  13 +-
 .../metrics2/lib/DynamicMetricsRegistry.java| 103 ++--
 .../metrics2/lib/MetricMutableQuantiles.java| 154 --
 .../metrics2/lib/MetricsExecutorImpl.java   |   2 +-
 .../hadoop/metrics2/lib/MutableFastCounter.java |  60 +++
 .../hadoop/metrics2/lib/MutableHistogram.java   | 133 +--
 .../metrics2/lib/MutableRangeHistogram.java |  75 -
 .../metrics2/lib/MutableSizeHistogram.java  |  25 ++-
 .../metrics2/lib/MutableTimeHistogram.java  |  23 ++-
 .../hbase/metrics/TestBaseSourceImpl.java   |   5 +-
 .../tmpl/regionserver/BlockCacheTmpl.jamon  |   8 -
 .../tmpl/regionserver/BlockCacheViewTmpl.jamon  |   1 -
 .../tmpl/regionserver/ServerMetricsTmpl.jamon   |   1 -
 .../hadoop/hbase/io/hfile/AgeSnapshot.java  |  38 +++--
 .../hadoop/hbase/io/hfile/BlockCacheUtil.java   |  31 ++--
 .../hadoop/hbase/io/hfile/CacheStats.java   |  54 +++
 .../org/apache/hadoop/hbase/io/hfile/HFile.java |  10 +-
 .../hadoop/hbase/io/hfile/HFileBlock.java   |   2 +-
 .../hadoop/hbase/io/hfile/HFileReaderImpl.java  |   4 +-
 .../hbase/io/hfile/bucket/BucketCacheStats.java |  11 +-
 .../hbase/regionserver/StoreFileScanner.java|  14 +-
 40 files changed, 565 insertions(+), 702 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/630a6582/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
index 623cbdb..78b2bf0 100644
--- 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
+++ 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.util;
 
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicLong;
-import java.util.concurrent.atomic.AtomicLongArray;
 
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
@@ -31,11 +30,20 @@ import 
org.apache.hadoop.hbase.classification.InterfaceStability;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class FastLongHistogram {
+
+  /**
+   * Default number of bins.
+   */
+  public static final int DEFAULT_NBINS = 255;
+
+  public static final double[] DEFAULT_QUANTILES =
+  new double[]{0.25, 0.5, 0.75, 0.90, 0.95, 0.98, 0.99, 0.999};
+
   /**
* Bins is a class containing a list of buckets(or bins) for estimation 
histogram of some data.
*/
   private static class Bins {
-private final AtomicLongArray counts;
+private 

[13/37] hbase git commit: HBASE-15312 Update the dependences of pom for mini cluster in HBase Book (Liu Shaohui)

2016-02-26 Thread syuanjiang
HBASE-15312 Update the dependences of pom for mini cluster in HBase Book (Liu 
Shaohui)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2a306437
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2a306437
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2a306437

Branch: refs/heads/hbase-12439
Commit: 2a306437aaccb99ff333ab41c7165333994eba48
Parents: 30cec72
Author: stack 
Authored: Wed Feb 24 09:31:10 2016 -0800
Committer: stack 
Committed: Wed Feb 24 09:31:10 2016 -0800

--
 src/main/asciidoc/_chapters/unit_testing.adoc | 60 ++
 1 file changed, 39 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2a306437/src/main/asciidoc/_chapters/unit_testing.adoc
--
diff --git a/src/main/asciidoc/_chapters/unit_testing.adoc 
b/src/main/asciidoc/_chapters/unit_testing.adoc
index e1bcf87..15b6cce 100644
--- a/src/main/asciidoc/_chapters/unit_testing.adoc
+++ b/src/main/asciidoc/_chapters/unit_testing.adoc
@@ -268,37 +268,55 @@ Check the versions to be sure they are appropriate.
 
 [source,xml]
 
+
+  2.0.0-SNAPSHOT
+  2.7.1
+
 
-
-org.apache.hadoop
-hadoop-common
-2.0.0
+
+  
+org.apache.hbase
+hbase-server
+${hbase.version}
+test
+  
+  
+org.apache.hbase
+hbase-server
+${hbase.version}
 test-jar
 test
-
-
-
+  
+  
 org.apache.hbase
-hbase
-0.98.3
+hbase-hadoop-compat
+${hbase.version}
 test-jar
 test
-
+  
 
-
+  
 org.apache.hadoop
-hadoop-hdfs
-2.0.0
+hadoop-common
+${hadoop.version}
 test-jar
 test
-
-
-
+  
+  
+org.apache.hbase
+hbase-hadoop2-compat
+${hbase.version}
+test-jar
+test
+  
+  
 org.apache.hadoop
 hadoop-hdfs
-2.0.0
+${hadoop.version}
+test-jar
 test
-
+  
+
 
 
 This code represents an integration test for the MyDAO insert shown in 
<>.
@@ -309,7 +327,8 @@ This code represents an integration test for the MyDAO 
insert shown in <

[14/37] hbase git commit: HBASE-15310 hbase-spark module has compilation failures with clover profile

2016-02-26 Thread syuanjiang
HBASE-15310 hbase-spark module has compilation failures with clover profile


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/20e14f44
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/20e14f44
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/20e14f44

Branch: refs/heads/hbase-12439
Commit: 20e14f449a9d5ba052ef6250c08ee1e4c558ccf2
Parents: 2a30643
Author: Jonathan M Hsieh 
Authored: Wed Feb 24 10:09:21 2016 -0800
Committer: Jonathan M Hsieh 
Committed: Wed Feb 24 11:54:43 2016 -0800

--
 hbase-spark/pom.xml | 35 +++
 1 file changed, 35 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/20e14f44/hbase-spark/pom.xml
--
diff --git a/hbase-spark/pom.xml b/hbase-spark/pom.xml
index 7c7590e..7767440 100644
--- a/hbase-spark/pom.xml
+++ b/hbase-spark/pom.xml
@@ -604,6 +604,41 @@
 
 
 
+
+
+
+org.codehaus.mojo
+build-helper-maven-plugin
+
+
+add-source
+validate
+
+add-source
+
+
+
+src/main/scala
+
+
+
+
+add-test-source
+validate
+
+add-test-source
+
+
+
+src/test/scala
+
+
+
+
+
 
 
 



[21/37] hbase git commit: HBASE-15144 Procedure v2 - Web UI displaying Store state

2016-02-26 Thread syuanjiang
HBASE-15144 Procedure v2 - Web UI displaying Store state


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/40c55915
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/40c55915
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/40c55915

Branch: refs/heads/hbase-12439
Commit: 40c55915e7a45a639adb7f7a370a04f38058ac26
Parents: 77133fd
Author: Samir Ahmic 
Authored: Wed Feb 24 16:05:24 2016 +0100
Committer: Matteo Bertozzi 
Committed: Thu Feb 25 10:46:56 2016 -0800

--
 .../procedure2/store/wal/ProcedureWALFile.java  |  32 +++--
 .../store/wal/ProcedureWALFormat.java   |   9 +-
 .../store/wal/ProcedureWALFormatReader.java |   8 +-
 .../procedure2/store/wal/WALProcedureStore.java | 108 +
 .../org/apache/hadoop/hbase/master/HMaster.java |   8 +-
 .../hbase-webapps/master/procedures.jsp | 118 ++-
 6 files changed, 244 insertions(+), 39 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/40c55915/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java
index 6493526..097cd29 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java
@@ -22,12 +22,12 @@ import java.io.IOException;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.procedure2.store.ProcedureStoreTracker;
 import 
org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureWALHeader;
 import 
org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureWALTrailer;
@@ -42,24 +42,29 @@ public class ProcedureWALFile implements 
Comparable {
 
   private ProcedureWALHeader header;
   private FSDataInputStream stream;
-  private FileStatus logStatus;
   private FileSystem fs;
   private Path logFile;
   private long startPos;
   private long minProcId;
   private long maxProcId;
+  private long logSize;
+  private long timestamp;
 
   public ProcedureWALFile(final FileSystem fs, final FileStatus logStatus) {
 this.fs = fs;
-this.logStatus = logStatus;
 this.logFile = logStatus.getPath();
+this.logSize = logStatus.getLen();
+this.timestamp = logStatus.getModificationTime();
   }
 
-  public ProcedureWALFile(FileSystem fs, Path logFile, ProcedureWALHeader 
header, long startPos) {
+  public ProcedureWALFile(FileSystem fs, Path logFile, ProcedureWALHeader 
header,
+  long startPos, long timestamp) {
 this.fs = fs;
-this.logFile = logFile;
 this.header = header;
+this.logFile = logFile;
 this.startPos = startPos;
+this.logSize = startPos;
+this.timestamp = timestamp;
   }
 
   public void open() throws IOException {
@@ -77,7 +82,7 @@ public class ProcedureWALFile implements 
Comparable {
 
   public ProcedureWALTrailer readTrailer() throws IOException {
 try {
-  return ProcedureWALFormat.readTrailer(stream, startPos, 
logStatus.getLen());
+  return ProcedureWALFormat.readTrailer(stream, startPos, logSize);
 } finally {
   stream.seek(startPos);
 }
@@ -112,6 +117,10 @@ public class ProcedureWALFile implements 
Comparable {
 return header;
   }
 
+  public long getTimestamp() {
+return timestamp;
+  }
+
   public boolean isCompacted() {
 return header.getType() == ProcedureWALFormat.LOG_TYPE_COMPACTED;
   }
@@ -121,7 +130,14 @@ public class ProcedureWALFile implements 
Comparable {
   }
 
   public long getSize() {
-return logStatus != null ? logStatus.getLen() : 0;
+return logSize;
+  }
+
+  /**
+   * Used to update in-progress log sizes. the FileStatus will report 0 
otherwise.
+   */
+  void addToSize(long size) {
+this.logSize += size;
   }
 
   public void removeFile() throws IOException {

http://git-wip-us.apache.org/repos/asf/hbase/blob/40c55915/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java

[18/37] hbase git commit: HBASE-15264 Implement a fan out HDFS OutputStream

2016-02-26 Thread syuanjiang
HBASE-15264 Implement a fan out HDFS OutputStream


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6e9d355b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6e9d355b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6e9d355b

Branch: refs/heads/hbase-12439
Commit: 6e9d355b12a1e666f4d05be02775a01b6754d063
Parents: a3b4575
Author: zhangduo 
Authored: Wed Feb 24 20:47:38 2016 +0800
Committer: zhangduo 
Committed: Thu Feb 25 10:07:27 2016 +0800

--
 .../util/FanOutOneBlockAsyncDFSOutput.java  | 533 +++
 .../FanOutOneBlockAsyncDFSOutputHelper.java | 672 +++
 ...anOutOneBlockAsyncDFSOutputFlushHandler.java |  61 ++
 .../util/TestFanOutOneBlockAsyncDFSOutput.java  | 190 ++
 4 files changed, 1456 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6e9d355b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutput.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutput.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutput.java
new file mode 100644
index 000..b10f180
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutput.java
@@ -0,0 +1,533 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import static 
org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutputHelper.HEART_BEAT_SEQNO;
+import static 
org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutputHelper.completeFile;
+import static 
org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutputHelper.endFileLease;
+import static 
org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutputHelper.getStatus;
+import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.nio.channels.CompletionHandler;
+import java.util.ArrayDeque;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Deque;
+import java.util.IdentityHashMap;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.Callable;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import 
org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose;
+import org.apache.hadoop.hdfs.DFSClient;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.hdfs.protocol.ClientProtocol;
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
+import org.apache.hadoop.hdfs.protocol.LocatedBlock;
+import org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader;
+import org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;
+import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;
+import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
+import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
+import org.apache.hadoop.util.DataChecksum;
+
+import com.google.common.base.Supplier;
+
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufAllocator;
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.EventLoop;
+import io.netty.channel.SimpleChannelInboundHandler;
+import io.netty.handler.codec.protobuf.ProtobufDecoder;
+import io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
+import io.netty.handler.timeout.IdleState;
+import io.netty.handler.timeout.IdleStateEvent;
+import io.netty.handler.timeout.IdleStateHandler;
+import io.netty.util.concurrent.Future;
+import io.netty.util.concurrent.FutureListener;
+import io.netty.util.concurrent.Promise;
+
+/**
+ * An asynchronous HDFS output stream implementation which fans out data to 

[15/37] hbase git commit: HBASE-15222 Use less contended classes for metrics

2016-02-26 Thread syuanjiang
http://git-wip-us.apache.org/repos/asf/hbase/blob/630a6582/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableTimeHistogram.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableTimeHistogram.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableTimeHistogram.java
index 32d4fae..aaf4359 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableTimeHistogram.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MutableTimeHistogram.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.metrics2.lib;
 
-import java.util.concurrent.atomic.AtomicLongArray;
-
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.metrics2.MetricsInfo;
 
@@ -28,31 +26,30 @@ import org.apache.hadoop.metrics2.MetricsInfo;
  */
 @InterfaceAudience.Private
 public class MutableTimeHistogram extends MutableRangeHistogram {
-  private final String rangeType = "TimeRangeCount";
-  private final long[] ranges =
+  private final static String RANGE_TYPE = "TimeRangeCount";
+  private final static long[] RANGES =
   { 1, 3, 10, 30, 100, 300, 1000, 3000, 1, 3, 6, 12, 
30, 60 };
-  private final AtomicLongArray rangeVals = new 
AtomicLongArray(ranges.length+1);
 
   public MutableTimeHistogram(MetricsInfo info) {
 this(info.name(), info.description());
   }
 
   public MutableTimeHistogram(String name, String description) {
-super(name, description);
+this(name, description, RANGES[RANGES.length - 2]);
+  }
+
+  public MutableTimeHistogram(String name, String description, long 
expectedMax) {
+super(name, description, expectedMax);
   }
 
   @Override
   public String getRangeType() {
-return rangeType;
+return RANGE_TYPE;
   }
 
   @Override
-  public long[] getRange() {
-return ranges;
+  public long[] getRanges() {
+return RANGES;
   }
 
-  @Override
-  public AtomicLongArray getRangeVals() {
-return rangeVals;
-  } 
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/630a6582/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/metrics/TestBaseSourceImpl.java
--
diff --git 
a/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/metrics/TestBaseSourceImpl.java
 
b/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/metrics/TestBaseSourceImpl.java
index 7381fb9..2e374f7 100644
--- 
a/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/metrics/TestBaseSourceImpl.java
+++ 
b/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/metrics/TestBaseSourceImpl.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.metrics;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.testclassification.MetricsTests;
 import org.apache.hadoop.metrics2.lib.MutableCounterLong;
+import org.apache.hadoop.metrics2.lib.MutableFastCounter;
 import org.apache.hadoop.metrics2.lib.MutableGaugeLong;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -72,9 +73,9 @@ public class TestBaseSourceImpl {
   @Test
   public void testIncCounters() throws Exception {
 bmsi.incCounters("testinccounter", 100);
-assertEquals(100, ((MutableCounterLong) 
bmsi.metricsRegistry.get("testinccounter")).value());
+assertEquals(100, ((MutableFastCounter) 
bmsi.metricsRegistry.get("testinccounter")).value());
 bmsi.incCounters("testinccounter", 100);
-assertEquals(200, ((MutableCounterLong) 
bmsi.metricsRegistry.get("testinccounter")).value());
+assertEquals(200, ((MutableFastCounter) 
bmsi.metricsRegistry.get("testinccounter")).value());
 
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/630a6582/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
index 6986f12..3dcd5e2 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
@@ -178,7 +178,6 @@ org.apache.hadoop.util.StringUtils;
   AgeSnapshot ageAtEvictionSnapshot = bc.getStats().getAgeAtEvictionSnapshot();
   // Only show if non-zero mean and stddev as is the case in combinedblockcache
   double mean = ageAtEvictionSnapshot.getMean();
-  double stddev = ageAtEvictionSnapshot.getStdDev();
 
 
 Evicted
@@ -197,13 +196,6 @@ org.apache.hadoop.util.StringUtils;
 Mean age of Blocks at eviction time (seconds)
 
 
-<%if stddev > 0 %>
-
-StdDev
-<% String.format("%,d", 

[22/37] hbase git commit: HBASE-15311 Prevent NPE in BlockCacheViewTmpl.

2016-02-26 Thread syuanjiang
HBASE-15311 Prevent NPE in BlockCacheViewTmpl.

Signed-off-by: stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/75c57a04
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/75c57a04
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/75c57a04

Branch: refs/heads/hbase-12439
Commit: 75c57a04ddad2d7cf3435df1eba13541775319fb
Parents: 40c5591
Author: Samir Ahmic 
Authored: Tue Feb 23 11:34:09 2016 +0100
Committer: stack 
Committed: Thu Feb 25 15:23:28 2016 -0800

--
 .../apache/hadoop/hbase/tmpl/regionserver/BlockCacheViewTmpl.jamon | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/75c57a04/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheViewTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheViewTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheViewTmpl.jamon
index fa55f6a..c6d7a61 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheViewTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheViewTmpl.jamon
@@ -44,7 +44,7 @@ org.apache.hadoop.util.StringUtils;
   if (bcn.equals("L1")) {
 bc = bcs == null || bcs.length == 0? bc: bcs[0];
   } else {
-if (bcs.length < 2) {
+if (bcs == null || bcs.length < 2) {
   System.out.println("There is no L2 block cache");
   return;
 }



[36/37] hbase git commit: HBASE-15349 Update surefire version to 2.19.1. (Apekshit)

2016-02-26 Thread syuanjiang
HBASE-15349 Update surefire version to 2.19.1. (Apekshit)

Signed-off-by: stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3c660e2a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3c660e2a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3c660e2a

Branch: refs/heads/hbase-12439
Commit: 3c660e2a0f436a52a9bbdfb7c6dd82bf67097639
Parents: c528894
Author: Apekshit 
Authored: Fri Feb 26 12:01:08 2016 -0800
Committer: stack 
Committed: Fri Feb 26 12:26:40 2016 -0800

--
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3c660e2a/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 82eff70..b3fa787 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1239,7 +1239,7 @@
 
hbase-procedure-${project.version}-tests.jar
 hbase-it-${project.version}-tests.jar
 
hbase-annotations-${project.version}-tests.jar
-2.18.1
+2.19.1
 surefire-junit47
 
 false



[34/37] hbase git commit: HBASE-15348 Disable metrics tests until fixed.

2016-02-26 Thread syuanjiang
HBASE-15348 Disable metrics tests until fixed.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e88d9431
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e88d9431
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e88d9431

Branch: refs/heads/hbase-12439
Commit: e88d94318321d40993953180368d33d24602a2ae
Parents: 8f2bd06
Author: Elliott Clark 
Authored: Fri Feb 26 09:04:18 2016 -0800
Committer: Elliott Clark 
Committed: Fri Feb 26 09:04:51 2016 -0800

--
 .../apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java| 2 ++
 .../hadoop/hbase/regionserver/TestRegionServerMetrics.java   | 4 +++-
 2 files changed, 5 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e88d9431/hbase-server/src/test/java/org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java
index 1f149bf..d4f7cdd 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java
@@ -50,12 +50,14 @@ import org.apache.hadoop.net.DNSToSwitchMapping;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.FixMethodOrder;
+import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.runners.MethodSorters;
 
 @Category({ MiscTests.class, MediumTests.class })
 @FixMethodOrder(MethodSorters.NAME_ASCENDING)
+@Ignore
 public class TestStochasticBalancerJmxMetrics extends BalancerTestBase {
   private static final Log LOG = 
LogFactory.getLog(TestStochasticBalancerJmxMetrics.class);
   private static HBaseTestingUtility UTIL = new HBaseTestingUtility();

http://git-wip-us.apache.org/repos/asf/hbase/blob/e88d9431/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
index 7575e7b..1ec0bf7 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
@@ -29,6 +29,7 @@ import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
+import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
@@ -513,6 +514,7 @@ public class TestRegionServerMetrics {
   }
   
   @Test
+  @Ignore
   public void testRangeCountMetrics() throws Exception {
 String tableNameString = "testRangeCountMetrics";
 final long[] timeranges =
@@ -558,7 +560,7 @@ public class TestRegionServerMetrics {
   dynamicMetricName =
   timeRangeMetricName + "_" + timeRangeType + "_" + prior + "-" + 
timeranges[i];
   if (metricsHelper.checkCounterExists(dynamicMetricName, serverSource)) {
-long count = metricsHelper.getCounter(dynamicMetricName, serverSource);
+long count = metricsHelper.getGaugeLong(dynamicMetricName, 
serverSource);
 if (count > 0) {
   timeRangeCountUpdated = true;
   break;



[02/37] hbase git commit: HBASE-15301 Remove the never-thrown NamingException from TableInputFormatBase#reverseDNS method signature (Yu Li)

2016-02-26 Thread syuanjiang
HBASE-15301 Remove the never-thrown NamingException from 
TableInputFormatBase#reverseDNS method signature (Yu Li)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5e501123
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5e501123
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5e501123

Branch: refs/heads/hbase-12439
Commit: 5e501123ca7854b18c63674029cdf3e7aeac5cf9
Parents: 2d66cd8
Author: stack 
Authored: Mon Feb 22 21:58:20 2016 -0800
Committer: stack 
Committed: Mon Feb 22 21:58:20 2016 -0800

--
 .../hadoop/hbase/mapreduce/TableInputFormatBase.java | 11 ++-
 .../hadoop/hbase/mapreduce/TestTableInputFormatBase.java |  4 +---
 2 files changed, 3 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5e501123/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
index b2f115c..9b5bbfb 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
@@ -27,8 +27,6 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 
-import javax.naming.NamingException;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -285,12 +283,7 @@ extends InputFormat {
 }
 InetAddress regionAddress = isa.getAddress();
 String regionLocation;
-try {
-  regionLocation = reverseDNS(regionAddress);
-} catch (NamingException e) {
-  LOG.warn("Cannot resolve the host name for " + regionAddress + " 
because of " + e);
-  regionLocation = location.getHostname();
-}
+regionLocation = reverseDNS(regionAddress);
   
 byte[] startRow = scan.getStartRow();
 byte[] stopRow = scan.getStopRow();
@@ -344,7 +337,7 @@ extends InputFormat {
 }
   }
 
-  String reverseDNS(InetAddress ipAddress) throws NamingException, 
UnknownHostException {
+  String reverseDNS(InetAddress ipAddress) throws UnknownHostException {
 String hostName = this.reverseDNSCacheMap.get(ipAddress);
 if (hostName == null) {
   String ipAddressString = null;

http://git-wip-us.apache.org/repos/asf/hbase/blob/5e501123/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatBase.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatBase.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatBase.java
index c757a2d..699e773 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatBase.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatBase.java
@@ -24,8 +24,6 @@ import java.net.Inet6Address;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 
-import javax.naming.NamingException;
-
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -34,7 +32,7 @@ import org.junit.experimental.categories.Category;
 public class TestTableInputFormatBase {
   @Test
   public void testTableInputFormatBaseReverseDNSForIPv6()
-  throws UnknownHostException, NamingException {
+  throws UnknownHostException {
 String address = "ipv6.google.com";
 String localhost = null;
 InetAddress addr = null;



[19/37] hbase git commit: HBASE-15136 Explore different queuing behaviors while busy

2016-02-26 Thread syuanjiang
HBASE-15136 Explore different queuing behaviors while busy


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/43f99def
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/43f99def
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/43f99def

Branch: refs/heads/hbase-12439
Commit: 43f99def670551cfe314c44181c0cb9570cdaaa3
Parents: 6e9d355
Author: Mikhail Antonov 
Authored: Wed Feb 24 20:40:44 2016 -0800
Committer: Mikhail Antonov 
Committed: Wed Feb 24 20:41:30 2016 -0800

--
 .../hadoop/hbase/util/ReflectionUtils.java  |   1 +
 .../hbase/ipc/MetricsHBaseServerSource.java |   6 +
 .../hbase/ipc/MetricsHBaseServerWrapper.java|   2 +
 .../hbase/ipc/MetricsHBaseServerSourceImpl.java |   6 +-
 .../hbase/ipc/AdaptiveLifoCoDelCallQueue.java   | 329 +++
 .../hadoop/hbase/ipc/FifoRpcScheduler.java  |  10 +
 .../ipc/MetricsHBaseServerWrapperImpl.java  |  16 +
 .../hadoop/hbase/ipc/RWQueueRpcExecutor.java|  10 +
 .../apache/hadoop/hbase/ipc/RpcScheduler.java   |  13 +
 .../hadoop/hbase/ipc/SimpleRpcScheduler.java|  70 +++-
 .../ipc/MetricsHBaseServerWrapperStub.java  |  10 +
 .../hbase/ipc/TestSimpleRpcScheduler.java   |  63 
 12 files changed, 534 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/43f99def/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java
index 650c544..15b3930 100644
--- 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java
+++ 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java
@@ -85,6 +85,7 @@ public class ReflectionUtils {
 match = (!ctorParamTypes[i].isPrimitive()) ? 
ctorParamTypes[i].isAssignableFrom(paramType) :
   ((int.class.equals(ctorParamTypes[i]) && 
Integer.class.equals(paramType)) ||
(long.class.equals(ctorParamTypes[i]) && 
Long.class.equals(paramType)) ||
+   (double.class.equals(ctorParamTypes[i]) && 
Double.class.equals(paramType)) ||
(char.class.equals(ctorParamTypes[i]) && 
Character.class.equals(paramType)) ||
(short.class.equals(ctorParamTypes[i]) && 
Short.class.equals(paramType)) ||
(boolean.class.equals(ctorParamTypes[i]) && 
Boolean.class.equals(paramType)) ||

http://git-wip-us.apache.org/repos/asf/hbase/blob/43f99def/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
index 061a672..bb89789 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
@@ -64,6 +64,12 @@ public interface MetricsHBaseServerSource extends BaseSource 
{
   String NUM_OPEN_CONNECTIONS_DESC = "Number of open connections.";
   String NUM_ACTIVE_HANDLER_NAME = "numActiveHandler";
   String NUM_ACTIVE_HANDLER_DESC = "Number of active rpc handlers.";
+  String NUM_GENERAL_CALLS_DROPPED_NAME = "numGeneralCallsDropped";
+  String NUM_GENERAL_CALLS_DROPPED_DESC = "Total number of calls in general 
queue which " +
+"were dropped by CoDel RPC executor";
+  String NUM_LIFO_MODE_SWITCHES_NAME = "numLifoModeSwitches";
+  String NUM_LIFO_MODE_SWITCHES_DESC = "Total number of calls in general queue 
which " +
+"were served from the tail of the queue";
 
   String EXCEPTIONS_NAME="exceptions";
   String EXCEPTIONS_DESC="Exceptions caused by requests";

http://git-wip-us.apache.org/repos/asf/hbase/blob/43f99def/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java
index 1885264..8f30205 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java
@@ -26,4 +26,6 @@ public interface MetricsHBaseServerWrapper {
   int 

[07/37] hbase git commit: HBASE-15184 SparkSQL Scan operation doesn't work on kerberos cluster (Ted Malaska)

2016-02-26 Thread syuanjiang
HBASE-15184 SparkSQL Scan operation doesn't work on kerberos cluster (Ted 
Malaska)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/00248656
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/00248656
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/00248656

Branch: refs/heads/hbase-12439
Commit: 00248656ee9c60009ff1697e90ba9d0f86264103
Parents: f47dba7
Author: tedyu 
Authored: Tue Feb 23 16:52:13 2016 -0800
Committer: tedyu 
Committed: Tue Feb 23 16:52:13 2016 -0800

--
 .../hadoop/hbase/spark/DefaultSource.scala  |  4 +--
 .../hadoop/hbase/spark/HBaseContext.scala   | 15 +---
 .../apache/hadoop/hbase/spark/NewHBaseRDD.scala | 36 
 .../spark/datasources/HBaseTableScanRDD.scala   | 15 
 4 files changed, 57 insertions(+), 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/00248656/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala
--
diff --git 
a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala 
b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala
index b6d7982..844b5b5 100644
--- 
a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala
+++ 
b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala
@@ -164,7 +164,7 @@ case class HBaseRelation (val tableName:String,
 HBaseSparkConf.BULKGET_SIZE,  HBaseSparkConf.defaultBulkGetSize))
 
   //create or get latest HBaseContext
-  @transient val hbaseContext:HBaseContext = if (useHBaseContext) {
+  val hbaseContext:HBaseContext = if (useHBaseContext) {
 LatestHBaseContextCache.latest
   } else {
 val config = HBaseConfiguration.create()
@@ -270,7 +270,7 @@ case class HBaseRelation (val tableName:String,
 } else {
   None
 }
-val hRdd = new HBaseTableScanRDD(this, pushDownFilterJava, 
requiredQualifierDefinitionList.seq)
+val hRdd = new HBaseTableScanRDD(this, hbaseContext, pushDownFilterJava, 
requiredQualifierDefinitionList.seq)
 pushDownRowKeyFilter.points.foreach(hRdd.addPoint(_))
 pushDownRowKeyFilter.ranges.foreach(hRdd.addRange(_))
 var resultRDD: RDD[Row] = {

http://git-wip-us.apache.org/repos/asf/hbase/blob/00248656/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala
--
diff --git 
a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala 
b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala
index 2d21e69..61ed3cf 100644
--- 
a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala
+++ 
b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala
@@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding
 import org.apache.hadoop.hbase.io.hfile.{CacheConfig, HFileContextBuilder, 
HFileWriterImpl}
 import org.apache.hadoop.hbase.regionserver.{HStore, StoreFile, BloomType}
 import org.apache.hadoop.hbase.util.Bytes
+import org.apache.hadoop.mapred.JobConf
 import org.apache.spark.broadcast.Broadcast
 import org.apache.spark.deploy.SparkHadoopUtil
 import org.apache.spark.rdd.RDD
@@ -228,7 +229,7 @@ class HBaseContext(@transient sc: SparkContext,
 }))
   }
 
-  def applyCreds[T] (configBroadcast: 
Broadcast[SerializableWritable[Configuration]]){
+  def applyCreds[T] (){
 credentials = SparkHadoopUtil.get.getCurrentUserCredentials()
 
 logDebug("appliedCredentials:" + appliedCredentials + ",credentials:" + 
credentials)
@@ -440,10 +441,14 @@ class HBaseContext(@transient sc: SparkContext,
 TableMapReduceUtil.initTableMapperJob(tableName, scan,
   classOf[IdentityTableMapper], null, null, job)
 
-sc.newAPIHadoopRDD(job.getConfiguration,
+val jconf = new JobConf(job.getConfiguration)
+SparkHadoopUtil.get.addCredentials(jconf)
+new NewHBaseRDD(sc,
   classOf[TableInputFormat],
   classOf[ImmutableBytesWritable],
-  classOf[Result]).map(f)
+  classOf[Result],
+  job.getConfiguration,
+  this).map(f)
   }
 
   /**
@@ -474,7 +479,7 @@ class HBaseContext(@transient sc: SparkContext,
 
 val config = getConf(configBroadcast)
 
-applyCreds(configBroadcast)
+applyCreds
 // specify that this is a proxy user
 val connection = ConnectionFactory.createConnection(config)
 f(it, connection)
@@ -514,7 +519,7 @@ class HBaseContext(@transient sc: SparkContext,
  Iterator[U]): Iterator[U] = {
 
 val config = getConf(configBroadcast)
-applyCreds(configBroadcast)
+applyCreds
 
 val connection = 

hbase git commit: HBASE-15351 Fix description of hbase.bucketcache.size in hbase-default.xml

2016-02-26 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1.1 6cb16e93d -> 41efb9233


HBASE-15351 Fix description of hbase.bucketcache.size in hbase-default.xml


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/41efb923
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/41efb923
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/41efb923

Branch: refs/heads/branch-1.1
Commit: 41efb9233729dd755b712cf6b496afeffc51b957
Parents: 6cb16e9
Author: stack 
Authored: Fri Feb 26 13:49:26 2016 -0800
Committer: stack 
Committed: Fri Feb 26 13:54:46 2016 -0800

--
 .../src/main/resources/hbase-default.xml| 36 +++-
 1 file changed, 35 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/41efb923/hbase-common/src/main/resources/hbase-default.xml
--
diff --git a/hbase-common/src/main/resources/hbase-default.xml 
b/hbase-common/src/main/resources/hbase-default.xml
index 2b3c48b..80fe1a8 100644
--- a/hbase-common/src/main/resources/hbase-default.xml
+++ b/hbase-common/src/main/resources/hbase-default.xml
@@ -783,6 +783,40 @@ possible configurations would overwhelm and obscure the 
important.
   block is written out and a new block is started.
   
   
+hbase.bucketcache.ioengine
+
+Where to store the contents of the bucketcache. One of: heap,
+offheap, or file. If a file, set it to file:PATH_TO_FILE. See
+http://hbase.apache.org/book.html#offheap.blockcache for more information.
+
+  
+  
+hbase.bucketcache.combinedcache.enabled
+true
+Whether or not the bucketcache is used in league with the LRU
+on-heap block cache. In this mode, indices and blooms are kept in the LRU
+blockcache and the data blocks are kept in the bucketcache.
+  
+  
+hbase.bucketcache.size
+
+A float that EITHER represents a percentage of total heap 
memory
+size to give to the cache (if  1.0) OR, it is the total capacity in
+megabytes of BucketCache. Default: 0.0
+  
+  
+hbase.bucketcache.sizes
+
+A comma-separated list of sizes for buckets for the 
bucketcache.
+Can be multiple sizes. List block sizes in order from smallest to largest.
+The sizes you use will depend on your data access patterns.
+Must be a multiple of 1024 else you will run into
+'java.io.IOException: Invalid HFile block magic' when you go to read from 
cache.
+If you specify no values here, then you pick up the default bucketsizes set
+in code (See BucketAllocator#DEFAULT_BUCKET_SIZES). 
+  
+  
+  
   hfile.format.version
   3
   The HFile format version to use for new files.
@@ -808,7 +842,7 @@ possible configurations would overwhelm and obscure the 
important.
   hbase.rs.cacheblocksonwrite
   false
   Whether an HFile block should be added to the block cache 
when the
-  block is finished.
+block is finished.
   
   
 hbase.rpc.timeout



hbase git commit: HBASE-15351 Fix description of hbase.bucketcache.size in hbase-default.xml

2016-02-26 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 73e4cb43c -> 31b12fda0


HBASE-15351 Fix description of hbase.bucketcache.size in hbase-default.xml


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/31b12fda
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/31b12fda
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/31b12fda

Branch: refs/heads/branch-1.2
Commit: 31b12fda036e5b1f36f8838f9f7a330adbd39b43
Parents: 73e4cb4
Author: stack 
Authored: Fri Feb 26 13:49:26 2016 -0800
Committer: stack 
Committed: Fri Feb 26 13:54:27 2016 -0800

--
 .../src/main/resources/hbase-default.xml| 36 +++-
 1 file changed, 35 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/31b12fda/hbase-common/src/main/resources/hbase-default.xml
--
diff --git a/hbase-common/src/main/resources/hbase-default.xml 
b/hbase-common/src/main/resources/hbase-default.xml
index d6e0961..21533c9 100644
--- a/hbase-common/src/main/resources/hbase-default.xml
+++ b/hbase-common/src/main/resources/hbase-default.xml
@@ -778,6 +778,40 @@ possible configurations would overwhelm and obscure the 
important.
   block is written out and a new block is started.
   
   
+hbase.bucketcache.ioengine
+
+Where to store the contents of the bucketcache. One of: heap,
+offheap, or file. If a file, set it to file:PATH_TO_FILE. See
+http://hbase.apache.org/book.html#offheap.blockcache for more information.
+
+  
+  
+hbase.bucketcache.combinedcache.enabled
+true
+Whether or not the bucketcache is used in league with the LRU
+on-heap block cache. In this mode, indices and blooms are kept in the LRU
+blockcache and the data blocks are kept in the bucketcache.
+  
+  
+hbase.bucketcache.size
+
+A float that EITHER represents a percentage of total heap 
memory
+size to give to the cache (if  1.0) OR, it is the total capacity in
+megabytes of BucketCache. Default: 0.0
+  
+  
+hbase.bucketcache.sizes
+
+A comma-separated list of sizes for buckets for the 
bucketcache.
+Can be multiple sizes. List block sizes in order from smallest to largest.
+The sizes you use will depend on your data access patterns.
+Must be a multiple of 1024 else you will run into
+'java.io.IOException: Invalid HFile block magic' when you go to read from 
cache.
+If you specify no values here, then you pick up the default bucketsizes set
+in code (See BucketAllocator#DEFAULT_BUCKET_SIZES). 
+  
+  
+  
   hfile.format.version
   3
   The HFile format version to use for new files.
@@ -803,7 +837,7 @@ possible configurations would overwhelm and obscure the 
important.
   hbase.rs.cacheblocksonwrite
   false
   Whether an HFile block should be added to the block cache 
when the
-  block is finished.
+block is finished.
   
   
 hbase.rpc.timeout



hbase git commit: HBASE-15351 Fix description of hbase.bucketcache.size in hbase-default.xml

2016-02-26 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1 e0d7e0eb3 -> 46ffa8598


HBASE-15351 Fix description of hbase.bucketcache.size in hbase-default.xml


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/46ffa859
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/46ffa859
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/46ffa859

Branch: refs/heads/branch-1
Commit: 46ffa85982c0d56a1d3ea98f322bf4786dd50ef3
Parents: e0d7e0e
Author: stack 
Authored: Fri Feb 26 13:49:26 2016 -0800
Committer: stack 
Committed: Fri Feb 26 13:53:59 2016 -0800

--
 .../src/main/resources/hbase-default.xml| 36 +++-
 1 file changed, 35 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/46ffa859/hbase-common/src/main/resources/hbase-default.xml
--
diff --git a/hbase-common/src/main/resources/hbase-default.xml 
b/hbase-common/src/main/resources/hbase-default.xml
index ca104cd..94f1e6d 100644
--- a/hbase-common/src/main/resources/hbase-default.xml
+++ b/hbase-common/src/main/resources/hbase-default.xml
@@ -780,6 +780,40 @@ possible configurations would overwhelm and obscure the 
important.
   block is written out and a new block is started.
   
   
+hbase.bucketcache.ioengine
+
+Where to store the contents of the bucketcache. One of: heap,
+offheap, or file. If a file, set it to file:PATH_TO_FILE. See
+http://hbase.apache.org/book.html#offheap.blockcache for more information.
+
+  
+  
+hbase.bucketcache.combinedcache.enabled
+true
+Whether or not the bucketcache is used in league with the LRU
+on-heap block cache. In this mode, indices and blooms are kept in the LRU
+blockcache and the data blocks are kept in the bucketcache.
+  
+  
+hbase.bucketcache.size
+
+A float that EITHER represents a percentage of total heap 
memory
+size to give to the cache (if  1.0) OR, it is the total capacity in
+megabytes of BucketCache. Default: 0.0
+  
+  
+hbase.bucketcache.sizes
+
+A comma-separated list of sizes for buckets for the 
bucketcache.
+Can be multiple sizes. List block sizes in order from smallest to largest.
+The sizes you use will depend on your data access patterns.
+Must be a multiple of 1024 else you will run into
+'java.io.IOException: Invalid HFile block magic' when you go to read from 
cache.
+If you specify no values here, then you pick up the default bucketsizes set
+in code (See BucketAllocator#DEFAULT_BUCKET_SIZES). 
+  
+  
+  
   hfile.format.version
   3
   The HFile format version to use for new files.
@@ -805,7 +839,7 @@ possible configurations would overwhelm and obscure the 
important.
   hbase.rs.cacheblocksonwrite
   false
   Whether an HFile block should be added to the block cache 
when the
-  block is finished.
+block is finished.
   
   
 hbase.rpc.timeout



hbase git commit: HBASE-15351 Fix description of hbase.bucketcache.size in hbase-default.xml

2016-02-26 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 3c660e2a0 -> 8f6e29785


HBASE-15351 Fix description of hbase.bucketcache.size in hbase-default.xml


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8f6e2978
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8f6e2978
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8f6e2978

Branch: refs/heads/master
Commit: 8f6e29785abff6b3205d7058d9b88c1cf27faa29
Parents: 3c660e2
Author: stack 
Authored: Fri Feb 26 13:49:26 2016 -0800
Committer: stack 
Committed: Fri Feb 26 13:49:26 2016 -0800

--
 .../src/main/resources/hbase-default.xml| 28 
 1 file changed, 17 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8f6e2978/hbase-common/src/main/resources/hbase-default.xml
--
diff --git a/hbase-common/src/main/resources/hbase-default.xml 
b/hbase-common/src/main/resources/hbase-default.xml
index e446a24..e50e89e 100644
--- a/hbase-common/src/main/resources/hbase-default.xml
+++ b/hbase-common/src/main/resources/hbase-default.xml
@@ -857,29 +857,35 @@ possible configurations would overwhelm and obscure the 
important.
 hbase.bucketcache.ioengine
 
 Where to store the contents of the bucketcache. One of: heap,
-  offheap, or file. If a file, set it to file:PATH_TO_FILE. See
-  http://hbase.apache.org/book.html#offheap.blockcache for more 
information.
+offheap, or file. If a file, set it to file:PATH_TO_FILE. See
+http://hbase.apache.org/book.html#offheap.blockcache for more information.
 
   
   
 hbase.bucketcache.combinedcache.enabled
 true
 Whether or not the bucketcache is used in league with the LRU
-  on-heap block cache. In this mode, indices and blooms are kept in the LRU
-  blockcache and the data blocks are kept in the bucketcache.
+on-heap block cache. In this mode, indices and blooms are kept in the LRU
+blockcache and the data blocks are kept in the bucketcache.
   
   
 hbase.bucketcache.size
-65536
-The size of the buckets for the bucketcache if you only use a 
single size.
-  Defaults to the default blocksize, which is 64 * 1024.
+
+A float that EITHER represents a percentage of total heap 
memory
+size to give to the cache (if  1.0) OR, it is the total capacity in
+megabytes of BucketCache. Default: 0.0
   
   
 hbase.bucketcache.sizes
 
-A comma-separated list of sizes for buckets for the 
bucketcache
-  if you use multiple sizes. Should be a list of block sizes in order from 
smallest
-  to largest. The sizes you use will depend on your data access 
patterns.
+A comma-separated list of sizes for buckets for the 
bucketcache.
+Can be multiple sizes. List block sizes in order from smallest to largest.
+The sizes you use will depend on your data access patterns.
+Must be a multiple of 1024 else you will run into
+'java.io.IOException: Invalid HFile block magic' when you go to read from 
cache.
+If you specify no values here, then you pick up the default bucketsizes set
+in code (See BucketAllocator#DEFAULT_BUCKET_SIZES). 
+  
   
   
   hfile.format.version
@@ -907,7 +913,7 @@ possible configurations would overwhelm and obscure the 
important.
   hbase.rs.cacheblocksonwrite
   false
   Whether an HFile block should be added to the block cache 
when the
-  block is finished.
+block is finished.
   
   
 hbase.rpc.timeout



[40/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/ipc/AdaptiveLifoCoDelCallQueue.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/AdaptiveLifoCoDelCallQueue.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/AdaptiveLifoCoDelCallQueue.html
new file mode 100644
index 000..5163a53
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/AdaptiveLifoCoDelCallQueue.html
@@ -0,0 +1,977 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+AdaptiveLifoCoDelCallQueue (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev 
Class
+Next 
Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.ipc
+Class 
AdaptiveLifoCoDelCallQueue
+
+
+
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.ipc.AdaptiveLifoCoDelCallQueue
+
+
+
+
+
+
+
+All Implemented Interfaces:
+http://docs.oracle.com/javase/7/docs/api/java/lang/Iterable.html?is-external=true;
 title="class or interface in java.lang">IterableCallRunner, http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionCallRunner, http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true;
 title="class or interface in java.util.concurrent">BlockingQueueCallRunner, http://docs.oracle.com/javase/7/docs/api/java/util/Queue.html?is-external=true;
 title="class or interf
 ace in java.util">QueueCallRunner
+
+
+
+@InterfaceAudience.Private
+public class AdaptiveLifoCoDelCallQueue
+extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+implements http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true;
 title="class or interface in java.util.concurrent">BlockingQueueCallRunner
+Adaptive LIFO blocking queue utilizing CoDel algorithm to 
prevent queue overloading.
+
+ Implementing http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true;
 title="class or interface in 
java.util.concurrent">BlockingQueue interface to be compatible 
with RpcExecutor.
+
+ Currently uses milliseconds internally, need to look into whether we should 
use
+ nanoseconds for timeInterval and minDelay.
+See Also:http://queue.acm.org/detail.cfm?id=2839461;>Fail at Scale paper, 
+https://github.com/facebook/wangle/blob/master/wangle/concurrent/Codel.cpp;>
+   CoDel version for generic job queues in Wangle library
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+private int
+codelInterval
+
+
+private int
+codelTargetDelay
+
+
+private long
+intervalTime
+
+
+private boolean
+isOverloaded
+
+
+private double
+lifoThreshold
+
+
+private http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/locks/ReentrantLock.html?is-external=true;
 title="class or interface in 
java.util.concurrent.locks">ReentrantLock
+lock
+Lock held by take ops, all other locks are inside queue 
impl.
+
+
+
+private int
+maxCapacity
+
+
+private long
+minDelay
+
+
+private http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicLong
+numGeneralCallsDropped
+
+
+private http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicLong
+numLifoModeSwitches
+
+
+private http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/LinkedBlockingDeque.html?is-external=true;
 title="class or interface in 
java.util.concurrent">LinkedBlockingDequeCallRunner
+queue
+
+
+private http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicBoolean
+resetDelay
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+AdaptiveLifoCoDelCallQueue(intcapacity,
+inttargetDelay,
+intinterval,
+   

[23/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/regionserver/Region.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/Region.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/Region.html
index 7846373..345dc35 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/Region.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/Region.html
@@ -97,7 +97,7 @@
 
 @InterfaceAudience.LimitedPrivate(value="Coprocesssor")
 @InterfaceStability.Evolving
-public interface Region
+public interface Region
 extends ConfigurationObserver
 Regions store data for a certain region of a table.  It 
stores all columns
  for each row. A given table consists of one or more Regions.
@@ -387,20 +387,24 @@ extends getRegionInfo()
 
 
+RegionServicesForStores
+getRegionServicesForStores()
+
+
 Region.RowLock
 getRowLock(byte[]row,
 booleanwaitForLock)
 Tries to acquire a lock on the given row.
 
 
-
+
 RegionScanner
 getScanner(Scanscan)
 Return an iterator that scans over the HRegion, returning 
the indicated
  columns and rows specified by the Scan.
 
 
-
+
 RegionScanner
 getScanner(Scanscan,
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScanneradditionalScanners)
@@ -408,31 +412,31 @@ extends Scan.
 
 
-
+
 Store
 getStore(byte[]family)
 Return the Store for the given family
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 getStoreFileList(byte[][]columns)
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStore
 getStores()
 Return the list of Stores managed by this region
 
 
-
+
 HTableDescriptor
 getTableDesc()
 
-
+
 long
 getWriteRequestsCount()
 
-
+
 Result
 increment(Incrementincrement,
   longnonceGroup,
@@ -440,37 +444,37 @@ extends Perform one or more increment operations on a row.
 
 
-
+
 boolean
 isAvailable()
 
-
+
 boolean
 isClosed()
 
-
+
 boolean
 isClosing()
 
-
+
 boolean
 isLoadingCfsOnDemandDefault()
 
-
+
 boolean
 isReadOnly()
 
-
+
 boolean
 isRecovering()
 
-
+
 void
 mutateRow(RowMutationsmutations)
 Performs multiple mutations atomically on a single 
row.
 
 
-
+
 void
 mutateRowsWithLocks(http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionMutationmutations,
   http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in 
java.util">Collectionbyte[]rowsToLock,
@@ -479,13 +483,13 @@ extends Perform atomic mutations within the region.
 
 
-
+
 void
 prepareDelete(Deletedelete)
 Prepare a delete for a row mutation processor
 
 
-
+
 void
 prepareDeleteTimestamps(Mutationmutation,
   http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellfamilyCellMap,
@@ -493,13 +497,13 @@ extends Set up correct timestamps in the KVs in Delete object.
 
 
-
+
 void
 processRowsWithLocks(RowProcessor?,?processor)
 Performs atomic multiple reads and writes on a given 
row.
 
 
-
+
 void
 processRowsWithLocks(RowProcessor?,?processor,
 longnonceGroup,
@@ -507,7 +511,7 @@ extends Performs atomic multiple reads and writes on a given 
row.
 
 
-
+
 void
 processRowsWithLocks(RowProcessor?,?processor,
 longtimeout,
@@ -516,13 +520,13 @@ extends Performs atomic multiple reads and writes on a given 
row.
 
 
-
+
 void
 put(Putput)
 Puts some data in the table.
 
 
-
+
 boolean
 refreshStoreFiles()
 Check the region's underlying store files, open the files 
that have not
@@ -530,7 +534,7 @@ extends 
+
 boolean
 registerService(com.google.protobuf.Serviceinstance)
 Registers a new protocol buffer Service 
subclass as a coprocessor endpoint to
@@ -539,33 +543,33 @@ extends 
+
 void
 releaseRowLocks(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegion.RowLockrowLocks)
 If the given list of row locks is not null, releases all 
locks.
 
 
-
+
 void
 startRegionOperation()
 This method needs to be called before any public call that 
reads or
  modifies data.
 
 
-
+
 void
 startRegionOperation(Region.Operationop)
 This method needs to be called before any public call that 
reads or
  

[49/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/apache_hbase_reference_guide.pdfmarks
--
diff --git a/apache_hbase_reference_guide.pdfmarks 
b/apache_hbase_reference_guide.pdfmarks
index 63dd81f..8ccb9e6 100644
--- a/apache_hbase_reference_guide.pdfmarks
+++ b/apache_hbase_reference_guide.pdfmarks
@@ -2,8 +2,8 @@
   /Author (Apache HBase Team)
   /Subject ()
   /Keywords ()
-  /ModDate (D:20160224151429)
-  /CreationDate (D:20160224151429)
+  /ModDate (D:20160226190416)
+  /CreationDate (D:20160226190416)
   /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
   /Producer ()
   /DOCINFO pdfmark

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/apidocs/constant-values.html
--
diff --git a/apidocs/constant-values.html b/apidocs/constant-values.html
index d16cfe9..e2e5f1f 100644
--- a/apidocs/constant-values.html
+++ b/apidocs/constant-values.html
@@ -4577,6 +4577,25 @@
 
 
 
+org.apache.hadoop.hbase.util.FastLongHistogram
+
+Modifier and Type
+Constant Field
+Value
+
+
+
+
+
+publicstaticfinalint
+DEFAULT_NBINS
+255
+
+
+
+
+
+
 org.apache.hadoop.hbase.util.OrderedBytes
 
 Modifier and Type

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/apidocs/index-all.html
--
diff --git a/apidocs/index-all.html b/apidocs/index-all.html
index f8f5425..820d5c3 100644
--- a/apidocs/index-all.html
+++ b/apidocs/index-all.html
@@ -2730,12 +2730,18 @@
 
 DEFAULT_NAMESPACE_NAME_STR
 - Static variable in class org.apache.hadoop.hbase.NamespaceDescriptor
 
+DEFAULT_NBINS
 - Static variable in class org.apache.hadoop.hbase.util.FastLongHistogram
+
+Default number of bins.
+
 DEFAULT_NORMALIZATION_ENABLED
 - Static variable in class org.apache.hadoop.hbase.HTableDescriptor
 
 Constant that denotes whether the table is normalized by 
default.
 
 DEFAULT_PREFETCH_BLOCKS_ON_OPEN
 - Static variable in class org.apache.hadoop.hbase.HColumnDescriptor
 
+DEFAULT_QUANTILES
 - Static variable in class org.apache.hadoop.hbase.util.FastLongHistogram
+
 DEFAULT_READONLY
 - Static variable in class org.apache.hadoop.hbase.HTableDescriptor
 
 Constant that denotes whether the table is READONLY by 
default and is false
@@ -3840,6 +3846,10 @@
 FastLongHistogram is a thread-safe class that estimate 
distribution of data and computes the
  quantiles.
 
+FastLongHistogram()
 - Constructor for class org.apache.hadoop.hbase.util.FastLongHistogram
+
+Constructor.
+
 FastLongHistogram(int)
 - Constructor for class org.apache.hadoop.hbase.util.FastLongHistogram
 
 Constructor.
@@ -4700,6 +4710,8 @@
 
 Return the list of attached co-processor represented by 
their name className
 
+getCount()
 - Method in class org.apache.hadoop.hbase.util.FastLongHistogram
+
 getCurrent()
 - Static method in class org.apache.hadoop.hbase.security.User
 
 Returns the User instance within current 
execution context.
@@ -5210,6 +5222,8 @@
 
 getMax() 
- Method in class org.apache.hadoop.hbase.io.TimeRange
 
+getMax()
 - Method in class org.apache.hadoop.hbase.util.FastLongHistogram
+
 getMaxAllowedOperationTime()
 - Method in exception org.apache.hadoop.hbase.errorhandling.TimeoutException
 
 getMaxColumn()
 - Method in class org.apache.hadoop.hbase.filter.ColumnRangeFilter
@@ -5254,6 +5268,8 @@
 
 Given a byte array, returns its MD5 hash as a hex 
string.
 
+getMean()
 - Method in class org.apache.hadoop.hbase.util.FastLongHistogram
+
 getMemStoreFlushSize()
 - Method in class org.apache.hadoop.hbase.HTableDescriptor
 
 Returns the size of the memstore after which a flush to 
filesystem is triggered.
@@ -5268,6 +5284,8 @@
 
 getMin() 
- Method in class org.apache.hadoop.hbase.io.TimeRange
 
+getMin()
 - Method in class org.apache.hadoop.hbase.util.FastLongHistogram
+
 getMinColumn()
 - Method in class org.apache.hadoop.hbase.filter.ColumnRangeFilter
 
 getMinColumnInclusive()
 - Method in class org.apache.hadoop.hbase.filter.ColumnRangeFilter
@@ -5388,6 +5406,8 @@
 
 Map of families to their most recent qualifiers and 
values.
 
+getNumAtOrBelow(long)
 - Method in class org.apache.hadoop.hbase.util.FastLongHistogram
+
 getNumberOfRegions()
 - Method in class org.apache.hadoop.hbase.ServerLoad
 
 getNumberOfRequests()
 - Method in class org.apache.hadoop.hbase.ServerLoad
@@ -5578,6 +5598,8 @@
 
 Computes the quantiles give the ratios.
 
+getQuantiles()
 - Method in class org.apache.hadoop.hbase.util.FastLongHistogram
+
 getQuotaRetriever(QuotaFilter)
 - Method in interface org.apache.hadoop.hbase.client.Admin
 
 Return a QuotaRetriever to list the quotas based on the 
filter.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/apidocs/org/apache/hadoop/hbase/util/FastLongHistogram.html
--
diff --git 

[43/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.CachedBlocksByFile.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.CachedBlocksByFile.html
 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.CachedBlocksByFile.html
index 8956cc7..dfe0b29 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.CachedBlocksByFile.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.CachedBlocksByFile.html
@@ -99,7 +99,7 @@
 
 
 
-public static class BlockCacheUtil.CachedBlocksByFile
+public static class BlockCacheUtil.CachedBlocksByFile
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Use one of these to keep a running account of cached blocks 
by file.  Throw it away when done.
  This is different than metrics in that it is stats on current state of a 
cache.
@@ -123,31 +123,31 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 Field and Description
 
 
-(package private) 
com.codahale.metrics.Histogram
-age
-
-
 private http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMaphttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/7/docs/api/java/util/NavigableSet.html?is-external=true;
 title="class or interface in java.util">NavigableSetCachedBlock
 cachedBlockByFile
 Map by filename.
 
 
-
+
 private int
 count
 
-
+
 private int
 dataBlockCount
 
-
+
 private long
 dataSize
 
-
+
 static int
 DEFAULT_MAX
 
+
+(package private) FastLongHistogram
+hist
+
 
 private int
 max
@@ -259,7 +259,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 count
-privateint count
+privateint count
 
 
 
@@ -268,7 +268,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 dataBlockCount
-privateint dataBlockCount
+privateint dataBlockCount
 
 
 
@@ -277,7 +277,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 size
-privatelong size
+privatelong size
 
 
 
@@ -286,7 +286,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 dataSize
-privatelong dataSize
+privatelong dataSize
 
 
 
@@ -295,7 +295,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 now
-private finallong now
+private finallong now
 
 
 
@@ -304,7 +304,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 max
-private finalint max
+private finalint max
 
 
 
@@ -313,7 +313,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 DEFAULT_MAX
-public static finalint DEFAULT_MAX
+public static finalint DEFAULT_MAX
 See Also:Constant
 Field Values
 
 
@@ -323,17 +323,17 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 cachedBlockByFile
-privatehttp://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMaphttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/7/docs/api/java/util/NavigableSet.html?is-external=true;
 title="class or interface in java.util">NavigableSetCachedBlock 
cachedBlockByFile
+privatehttp://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMaphttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/7/docs/api/java/util/NavigableSet.html?is-external=true;
 title="class or interface in java.util">NavigableSetCachedBlock 
cachedBlockByFile
 Map by filename. use concurent utils because we want our 
Map and contained blocks sorted.
 
 
-
+
 
 
 
 
-age
-com.codahale.metrics.Histogram age
+hist
+FastLongHistogram hist
 
 
 
@@ -350,7 +350,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 BlockCacheUtil.CachedBlocksByFile
-BlockCacheUtil.CachedBlocksByFile()
+BlockCacheUtil.CachedBlocksByFile()
 
 
 
@@ -359,7 +359,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 BlockCacheUtil.CachedBlocksByFile
-BlockCacheUtil.CachedBlocksByFile(org.apache.hadoop.conf.Configurationc)
+BlockCacheUtil.CachedBlocksByFile(org.apache.hadoop.conf.Configurationc)
 
 
 
@@ -376,7 +376,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 update
-publicbooleanupdate(CachedBlockcb)
+publicbooleanupdate(CachedBlockcb)
 Parameters:cb - 
 Returns:True if full if we won't 
be adding any more.
 
@@ 

[48/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 7fcf382..39b1115 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -280,10 +280,10 @@
 Warnings
 Errors
 
-1688
+1692
 0
 0
-12743
+12712
 
 Files
 
@@ -1843,860 +1843,850 @@
 0
 4
 
-org/apache/hadoop/hbase/io/hfile/AgeSnapshot.java
-0
-0
-1
-
 org/apache/hadoop/hbase/io/hfile/BlockCache.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.java
 0
 0
-23
-
+22
+
 org/apache/hadoop/hbase/io/hfile/BlockCachesIterator.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/io/hfile/BlockType.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/io/hfile/CacheConfig.java
 0
 0
 14
-
+
 org/apache/hadoop/hbase/io/hfile/CacheStats.java
 0
 0
-7
-
+6
+
 org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/hfile/CompoundBloomFilter.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterBase.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/io/hfile/CorruptHFileException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
 0
 0
 16
-
+
 org/apache/hadoop/hbase/io/hfile/HFile.java
 0
 0
 48
-
+
 org/apache/hadoop/hbase/io/hfile/HFileBlock.java
 0
 0
 52
-
+
 org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
 0
 0
 41
-
+
 org/apache/hadoop/hbase/io/hfile/HFileContext.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java
 0
 0
 13
-
+
 org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
 0
 0
 19
-
+
 org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
 0
 0
 53
-
+
 org/apache/hadoop/hbase/io/hfile/HFileScanner.java
 0
 0
 22
-
+
 org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java
 0
 0
 22
-
+
 org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
 0
 0
 18
-
+
 org/apache/hadoop/hbase/io/hfile/LruCachedBlock.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/io/hfile/LruCachedBlockQueue.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
 0
 0
 34
-
+
 org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
 0
 0
 28
-
+
 org/apache/hadoop/hbase/io/hfile/bucket/ByteBufferIOEngine.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/io/hfile/bucket/FileMmapEngine.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/io/util/HeapMemorySizeUtil.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/io/util/LRUDictionary.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/io/util/StreamUtils.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/ipc/AsyncCall.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ipc/AsyncRpcClient.java
 0
 0
 34
-
+
 org/apache/hadoop/hbase/ipc/AsyncServerResponseHandler.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ipc/BlockingRpcCallback.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ipc/BufferChain.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/ipc/Call.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ipc/CallRunner.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ipc/CallTimeoutException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ipc/ConnectionId.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ipc/FailedServers.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ipc/FifoRpcScheduler.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ipc/IPCUtil.java
 0
 0
 48
-
+
 org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ipc/MetricsHBaseServer.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceImpl.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/ipc/PriorityFunction.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java
 0
 0
 3
-
+
 

[17/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/regionserver/wal/WALActionsListener.Base.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/WALActionsListener.Base.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/WALActionsListener.Base.html
index 96b0bd0..45249fd 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/WALActionsListener.Base.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/WALActionsListener.Base.html
@@ -107,7 +107,7 @@
 
 
 
-public static class WALActionsListener.Base
+public static class WALActionsListener.Base
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements WALActionsListener
 
@@ -224,8 +224,7 @@ implements 
 void
-visitLogEntryBeforeWrite(HTableDescriptorhtd,
-WALKeylogKey,
+visitLogEntryBeforeWrite(WALKeylogKey,
 WALEditlogEdit)
 
 
@@ -256,7 +255,7 @@ implements 
 
 WALActionsListener.Base
-publicWALActionsListener.Base()
+publicWALActionsListener.Base()
 
 
 
@@ -273,7 +272,7 @@ implements 
 
 preLogRoll
-publicvoidpreLogRoll(org.apache.hadoop.fs.PatholdPath,
+publicvoidpreLogRoll(org.apache.hadoop.fs.PatholdPath,
   org.apache.hadoop.fs.PathnewPath)
 throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from interface:WALActionsListener
@@ -293,7 +292,7 @@ implements 
 
 postLogRoll
-publicvoidpostLogRoll(org.apache.hadoop.fs.PatholdPath,
+publicvoidpostLogRoll(org.apache.hadoop.fs.PatholdPath,
org.apache.hadoop.fs.PathnewPath)
  throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from interface:WALActionsListener
@@ -313,7 +312,7 @@ implements 
 
 preLogArchive
-publicvoidpreLogArchive(org.apache.hadoop.fs.PatholdPath,
+publicvoidpreLogArchive(org.apache.hadoop.fs.PatholdPath,
  org.apache.hadoop.fs.PathnewPath)
throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from interface:WALActionsListener
@@ -332,7 +331,7 @@ implements 
 
 postLogArchive
-publicvoidpostLogArchive(org.apache.hadoop.fs.PatholdPath,
+publicvoidpostLogArchive(org.apache.hadoop.fs.PatholdPath,
   org.apache.hadoop.fs.PathnewPath)
 throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from interface:WALActionsListener
@@ -351,7 +350,7 @@ implements 
 
 logRollRequested
-publicvoidlogRollRequested(booleantooFewReplicas)
+publicvoidlogRollRequested(booleantooFewReplicas)
 Description copied from interface:WALActionsListener
 A request was made that the WAL be rolled.
 
@@ -366,7 +365,7 @@ implements 
 
 logCloseRequested
-publicvoidlogCloseRequested()
+publicvoidlogCloseRequested()
 Description copied from interface:WALActionsListener
 The WAL is about to close.
 
@@ -381,7 +380,7 @@ implements 
 
 visitLogEntryBeforeWrite
-publicvoidvisitLogEntryBeforeWrite(HRegionInfoinfo,
+publicvoidvisitLogEntryBeforeWrite(HRegionInfoinfo,
 WALKeylogKey,
 WALEditlogEdit)
 Description copied from interface:WALActionsListener
@@ -392,19 +391,18 @@ implements 
+
 
 
 
 
 visitLogEntryBeforeWrite
-publicvoidvisitLogEntryBeforeWrite(HTableDescriptorhtd,
-WALKeylogKey,
+publicvoidvisitLogEntryBeforeWrite(WALKeylogKey,
 WALEditlogEdit)
   throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Specified by:
-visitLogEntryBeforeWritein
 interfaceWALActionsListener
+visitLogEntryBeforeWritein
 interfaceWALActionsListener
 logEdit - TODO: Retire this in favor of
   WALActionsListener.visitLogEntryBeforeWrite(HRegionInfo,
 WALKey, WALEdit) It only exists to get
   scope when replicating. Scope should be in the WALKey and not need 
us passing in a
@@ -419,7 +417,7 @@ implements 
 
 postAppend
-publicvoidpostAppend(longentryLen,
+publicvoidpostAppend(longentryLen,
   longelapsedTimeMillis)
 Description copied from interface:WALActionsListener
 For notification post append to the writer.  Used by 
metrics system at least.
@@ -436,7 +434,7 @@ implements 
 
 postSync
-publicvoidpostSync(longtimeInNanos,
+publicvoidpostSync(longtimeInNanos,
 

[01/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 89b638a43 -> 55dfd6fed


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/metrics2/lib/MetricMutableQuantiles.RolloverSample.html
--
diff --git 
a/devapidocs/org/apache/hadoop/metrics2/lib/MetricMutableQuantiles.RolloverSample.html
 
b/devapidocs/org/apache/hadoop/metrics2/lib/MetricMutableQuantiles.RolloverSample.html
deleted file mode 100644
index 9cf5ee0..000
--- 
a/devapidocs/org/apache/hadoop/metrics2/lib/MetricMutableQuantiles.RolloverSample.html
+++ /dev/null
@@ -1,306 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-MetricMutableQuantiles.RolloverSample (Apache HBase 2.0.0-SNAPSHOT 
API)
-
-
-
-
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev 
Class
-Next 
Class
-
-
-Frames
-No 
Frames
-
-
-All Classes
-
-
-
-
-
-
-
-Summary:
-Nested|
-Field|
-Constr|
-Method
-
-
-Detail:
-Field|
-Constr|
-Method
-
-
-
-
-
-
-
-
-org.apache.hadoop.metrics2.lib
-Class 
MetricMutableQuantiles.RolloverSample
-
-
-
-http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
-
-
-org.apache.hadoop.metrics2.lib.MetricMutableQuantiles.RolloverSample
-
-
-
-
-
-
-
-All Implemented Interfaces:
-http://docs.oracle.com/javase/7/docs/api/java/lang/Runnable.html?is-external=true;
 title="class or interface in java.lang">Runnable
-
-
-Enclosing class:
-MetricMutableQuantiles
-
-
-
-private static class MetricMutableQuantiles.RolloverSample
-extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-implements http://docs.oracle.com/javase/7/docs/api/java/lang/Runnable.html?is-external=true;
 title="class or interface in java.lang">Runnable
-Runnable used to periodically roll over the internal MetricSampleQuantiles every 
interval.
-
-
-
-
-
-
-
-
-
-
-
-Field Summary
-
-Fields
-
-Modifier and Type
-Field and Description
-
-
-(package private) MetricMutableQuantiles
-parent
-
-
-
-
-
-
-
-
-
-Constructor Summary
-
-Constructors
-
-Constructor and Description
-
-
-MetricMutableQuantiles.RolloverSample(MetricMutableQuantilesparent)
-
-
-
-
-
-
-
-
-
-Method Summary
-
-Methods
-
-Modifier and Type
-Method and Description
-
-
-void
-run()
-
-
-
-
-
-
-Methods inherited from classjava.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#clone()"
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#equals(java.lang.Object)"
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#finalize()"
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#getClass()"
 title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#hashCode()"
 title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#notify()"
 title="class or interface in java.lang">notify, http://docs.oracle.com/javase/7/docs/api/java/lang
 /Object.html?is-external=true#notifyAll()" title="class or interface in 
java.lang">notifyAll, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString()"
 title="class or interface in java.lang">toString, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait()"
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait(long)"
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait(long,%20int)"
 title="class or interface in java.lang">wait
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-Field Detail
-
-
-
-
-
-parent
-MetricMutableQuantiles parent
-
-
-
-
-
-
-
-
-
-Constructor Detail
-
-
-
-
-
-MetricMutableQuantiles.RolloverSample
-publicMetricMutableQuantiles.RolloverSample(MetricMutableQuantilesparent)
-
-
-
-
-
-
-
-
-
-Method Detail
-
-
-
-
-
-run
-publicvoidrun()
-
-Specified by:

[04/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.RegionEntryBuffer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.RegionEntryBuffer.html 
b/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.RegionEntryBuffer.html
index d60f547..0d7bcf1 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.RegionEntryBuffer.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.RegionEntryBuffer.html
@@ -103,7 +103,7 @@
 
 
 
-public static class WALSplitter.RegionEntryBuffer
+public static class WALSplitter.RegionEntryBuffer
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements HeapSize
 A buffer of some number of edits for a given region.
@@ -229,7 +229,7 @@ implements 
 
 heapInBuffer
-long heapInBuffer
+long heapInBuffer
 
 
 
@@ -238,7 +238,7 @@ implements 
 
 entryBuffer
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWAL.Entry entryBuffer
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWAL.Entry entryBuffer
 
 
 
@@ -247,7 +247,7 @@ implements 
 
 tableName
-TableName tableName
+TableName tableName
 
 
 
@@ -256,7 +256,7 @@ implements 
 
 encodedRegionName
-byte[] encodedRegionName
+byte[] encodedRegionName
 
 
 
@@ -273,7 +273,7 @@ implements 
 
 WALSplitter.RegionEntryBuffer
-WALSplitter.RegionEntryBuffer(TableNametableName,
+WALSplitter.RegionEntryBuffer(TableNametableName,
  byte[]region)
 
 
@@ -291,7 +291,7 @@ implements 
 
 appendEntry
-longappendEntry(WAL.Entryentry)
+longappendEntry(WAL.Entryentry)
 
 
 
@@ -300,7 +300,7 @@ implements 
 
 internify
-privatevoidinternify(WAL.Entryentry)
+privatevoidinternify(WAL.Entryentry)
 
 
 
@@ -309,7 +309,7 @@ implements 
 
 heapSize
-publiclongheapSize()
+publiclongheapSize()
 
 Specified by:
 heapSizein
 interfaceHeapSize
@@ -323,7 +323,7 @@ implements 
 
 getEncodedRegionName
-publicbyte[]getEncodedRegionName()
+publicbyte[]getEncodedRegionName()
 
 
 
@@ -332,7 +332,7 @@ implements 
 
 getEntryBuffer
-publichttp://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWAL.EntrygetEntryBuffer()
+publichttp://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWAL.EntrygetEntryBuffer()
 
 
 
@@ -341,7 +341,7 @@ implements 
 
 getTableName
-publicTableNamegetTableName()
+publicTableNamegetTableName()
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.RegionServerWriter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.RegionServerWriter.html 
b/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.RegionServerWriter.html
index 60c1760..4e32530 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.RegionServerWriter.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.RegionServerWriter.html
@@ -104,7 +104,7 @@
 
 
 
-private static final class WALSplitter.RegionServerWriter
+private static final class WALSplitter.RegionServerWriter
 extends WALSplitter.SinkWriter
 Private data structure that wraps a receiving RS and 
collecting statistics about the data
  written to this newly assigned RS.
@@ -210,7 +210,7 @@ extends 
 
 sink
-finalWALEditsReplaySink sink
+finalWALEditsReplaySink sink
 
 
 
@@ -227,7 +227,7 @@ extends 
 
 WALSplitter.RegionServerWriter
-WALSplitter.RegionServerWriter(org.apache.hadoop.conf.Configurationconf,
+WALSplitter.RegionServerWriter(org.apache.hadoop.conf.Configurationconf,
   TableNametableName,
   HConnectionconn)
  throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
@@ -249,7 +249,7 @@ extends 
 
 close
-voidclose()
+voidclose()
  throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Throws:
 http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.SinkWriter.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.SinkWriter.html 
b/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.SinkWriter.html
index 76e4c8d..1ff6766 100644
--- 

[14/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html 
b/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
index 515914b..131ea48 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
@@ -379,130 +379,130 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 privateHMaster m_master
 
 
-
+
 
 
 
 
-m_serverManager
-privateServerManager m_serverManager
+m_frags
+privatehttp://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer m_frags
 
 
-
+
 
 
 
 
-m_serverManager__IsNotDefault
-privateboolean m_serverManager__IsNotDefault
+m_frags__IsNotDefault
+privateboolean m_frags__IsNotDefault
 
 
-
+
 
 
 
 
-m_metaLocation
-privateServerName m_metaLocation
+m_serverManager
+privateServerManager m_serverManager
 
 
-
+
 
 
 
 
-m_metaLocation__IsNotDefault
-privateboolean m_metaLocation__IsNotDefault
+m_serverManager__IsNotDefault
+privateboolean m_serverManager__IsNotDefault
 
 
-
+
 
 
 
 
-m_frags
-privatehttp://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer m_frags
+m_filter
+privatehttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String m_filter
 
 
-
+
 
 
 
 
-m_frags__IsNotDefault
-privateboolean m_frags__IsNotDefault
+m_filter__IsNotDefault
+privateboolean m_filter__IsNotDefault
 
 
-
+
 
 
 
 
-m_filter
-privatehttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String m_filter
+m_assignmentManager
+privateAssignmentManager m_assignmentManager
 
 
-
+
 
 
 
 
-m_filter__IsNotDefault
-privateboolean m_filter__IsNotDefault
+m_assignmentManager__IsNotDefault
+privateboolean m_assignmentManager__IsNotDefault
 
 
-
+
 
 
 
 
-m_deadServers
-privatehttp://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetServerName m_deadServers
+m_format
+privatehttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String m_format
 
 
-
+
 
 
 
 
-m_deadServers__IsNotDefault
-privateboolean m_deadServers__IsNotDefault
+m_format__IsNotDefault
+privateboolean m_format__IsNotDefault
 
 
-
+
 
 
 
 
-m_format
-privatehttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String m_format
+m_metaLocation
+privateServerName m_metaLocation
 
 
-
+
 
 
 
 
-m_format__IsNotDefault
-privateboolean m_format__IsNotDefault
+m_metaLocation__IsNotDefault
+privateboolean m_metaLocation__IsNotDefault
 
 
-
+
 
 
 
 
-m_assignmentManager
-privateAssignmentManager m_assignmentManager
+m_catalogJanitorEnabled
+privateboolean m_catalogJanitorEnabled
 
 
-
+
 
 
 
 
-m_assignmentManager__IsNotDefault
-privateboolean m_assignmentManager__IsNotDefault
+m_catalogJanitorEnabled__IsNotDefault
+privateboolean m_catalogJanitorEnabled__IsNotDefault
 
 
 
@@ -523,22 +523,22 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 privateboolean m_servers__IsNotDefault
 
 
-
+
 
 
 
 
-m_catalogJanitorEnabled
-privateboolean m_catalogJanitorEnabled
+m_deadServers
+privatehttp://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetServerName m_deadServers
 
 
-
+
 
 
 
 
-m_catalogJanitorEnabled__IsNotDefault
-privateboolean m_catalogJanitorEnabled__IsNotDefault
+m_deadServers__IsNotDefault
+privateboolean m_deadServers__IsNotDefault
 
 
 
@@ -584,193 +584,193 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 publicHMastergetMaster()
 
 
-
+
 
 
 
 
-setServerManager
-publicvoidsetServerManager(ServerManagerserverManager)
+setFrags
+publicvoidsetFrags(http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 

[37/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/ipc/class-use/CallRunner.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/CallRunner.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/CallRunner.html
index f22908f..3da24c4 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/CallRunner.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/CallRunner.html
@@ -97,16 +97,57 @@
 
 
 
+private http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/LinkedBlockingDeque.html?is-external=true;
 title="class or interface in 
java.util.concurrent">LinkedBlockingDequeCallRunner
+AdaptiveLifoCoDelCallQueue.queue
+
+
 private http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/7/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true;
 title="class or interface in java.util.concurrent">BlockingQueueCallRunner
 RWQueueRpcExecutor.queues
 
-
+
 protected http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/7/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true;
 title="class or interface in java.util.concurrent">BlockingQueueCallRunner
 BalancedQueueRpcExecutor.queues
 
 
 
 
+Methods in org.apache.hadoop.hbase.ipc
 that return CallRunner
+
+Modifier and Type
+Method and Description
+
+
+
+CallRunner
+AdaptiveLifoCoDelCallQueue.element()
+
+
+CallRunner
+AdaptiveLifoCoDelCallQueue.peek()
+
+
+CallRunner
+AdaptiveLifoCoDelCallQueue.poll()
+
+
+CallRunner
+AdaptiveLifoCoDelCallQueue.poll(longtimeout,
+http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
 title="class or interface in 
java.util.concurrent">TimeUnitunit)
+
+
+CallRunner
+AdaptiveLifoCoDelCallQueue.remove()
+
+
+CallRunner
+AdaptiveLifoCoDelCallQueue.take()
+Behaves as http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/LinkedBlockingQueue.html?is-external=true#take()"
 title="class or interface in 
java.util.concurrent">LinkedBlockingQueue.take(), except it 
will silently
+ skip all calls which it thinks should be dropped.
+
+
+
+
+
 Methods in org.apache.hadoop.hbase.ipc
 that return types with arguments of type CallRunner
 
 Modifier and Type
@@ -127,6 +168,10 @@
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/7/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true;
 title="class or interface in java.util.concurrent">BlockingQueueCallRunner
 BalancedQueueRpcExecutor.getQueues()
 
+
+http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorCallRunner
+AdaptiveLifoCoDelCallQueue.iterator()
+
 
 
 
@@ -137,38 +182,60 @@
 
 
 
+boolean
+AdaptiveLifoCoDelCallQueue.add(CallRunnercallRunner)
+
+
 int
 SimpleRpcScheduler.CallPriorityComparator.compare(CallRunnera,
   CallRunnerb)
 
-
+
 boolean
 SimpleRpcScheduler.dispatch(CallRunnercallTask)
 
-
+
 boolean
 FifoRpcScheduler.dispatch(CallRunnertask)
 
-
+
 abstract boolean
 RpcExecutor.dispatch(CallRunnercallTask)
 Add the request to the executor queue
 
 
-
+
 boolean
 RWQueueRpcExecutor.dispatch(CallRunnercallTask)
 
-
+
 abstract boolean
 RpcScheduler.dispatch(CallRunnertask)
 Dispatches an RPC request asynchronously.
 
 
-
+
 boolean
 BalancedQueueRpcExecutor.dispatch(CallRunnercallTask)
 
+
+private boolean
+AdaptiveLifoCoDelCallQueue.needToDrop(CallRunnercallRunner)
+
+
+boolean
+AdaptiveLifoCoDelCallQueue.offer(CallRunnercallRunner)
+
+
+boolean
+AdaptiveLifoCoDelCallQueue.offer(CallRunnercallRunner,
+  longtimeout,
+  http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
 title="class or interface in 
java.util.concurrent">TimeUnitunit)
+
+
+void
+AdaptiveLifoCoDelCallQueue.put(CallRunnercallRunner)
+
 
 
 
@@ -179,10 +246,23 @@
 
 
 
+boolean
+AdaptiveLifoCoDelCallQueue.addAll(http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">Collection? extends CallRunnerc)
+
+
 protected void
 RpcExecutor.consumerLoop(http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true;
 title="class or interface in java.util.concurrent">BlockingQueueCallRunnermyQueue)
 
+
+int
+AdaptiveLifoCoDelCallQueue.drainTo(http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">Collection? super CallRunnerc)
+
 
+int
+AdaptiveLifoCoDelCallQueue.drainTo(http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true;
 title="class or 

[38/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.html
index 8bd67e3..848dc41 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.html
@@ -206,6 +206,17 @@ extends Constructor and Description
 
 
+RWQueueRpcExecutor(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
+inthandlerCount,
+intnumQueues,
+floatreadShare,
+floatscanShare,
+http://docs.oracle.com/javase/7/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true;
 title="class or interface in 
java.util.concurrent">BlockingQueuewriteQueueClass,
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]writeQueueInitArgs,
+http://docs.oracle.com/javase/7/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true;
 title="class or interface in 
java.util.concurrent">BlockingQueuereadQueueClass,
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in 
java.lang">Object[]readQueueInitArgs)
+
+
 RWQueueRpcExecutor(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
 inthandlerCount,
 intnumQueues,
@@ -213,7 +224,7 @@ extends 
 
-
+
 RWQueueRpcExecutor(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
 inthandlerCount,
 intnumQueues,
@@ -223,7 +234,7 @@ extends Abortableabortable)
 
-
+
 RWQueueRpcExecutor(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
 inthandlerCount,
 intnumQueues,
@@ -235,7 +246,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true;
 title="class or interface in 
java.util.concurrent">BlockingQueuereadQueueClass,
 http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in 
java.lang">Object...readQueueInitArgs)
 
-
+
 RWQueueRpcExecutor(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
 inthandlerCount,
 intnumQueues,
@@ -244,7 +255,7 @@ extends Abortableabortable)
 
-
+
 RWQueueRpcExecutor(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
 inthandlerCount,
 intnumQueues,
@@ -255,7 +266,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true;
 title="class or interface in 
java.util.concurrent">BlockingQueuereadQueueClass,
 http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in 
java.lang">Object...readQueueInitArgs)
 
-
+
 RWQueueRpcExecutor(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
 intwriteHandlers,
 intreadHandlers,
@@ -266,7 +277,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends 

[09/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/util/class-use/CancelableProgressable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/util/class-use/CancelableProgressable.html 
b/devapidocs/org/apache/hadoop/hbase/util/class-use/CancelableProgressable.html
index d186c64..a74 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/util/class-use/CancelableProgressable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/util/class-use/CancelableProgressable.html
@@ -281,6 +281,19 @@
 
 
 Uses of CancelableProgressable 
in org.apache.hadoop.hbase.util
+
+Classes in org.apache.hadoop.hbase.util
 that implement CancelableProgressable
+
+Modifier and Type
+Class and Description
+
+
+
+(package private) static class
+FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose
+
+
+
 
 Methods in org.apache.hadoop.hbase.util
 with parameters of type CancelableProgressable
 
@@ -293,20 +306,26 @@
 FSHDFSUtils.checkIfCancelled(CancelableProgressablereporter)
 
 
+void
+FanOutOneBlockAsyncDFSOutput.recoverAndClose(CancelableProgressablereporter)
+The close method when error occurred.
+
+
+
 (package private) boolean
 FSHDFSUtils.recoverDFSFileLease(org.apache.hadoop.hdfs.DistributedFileSystemdfs,
   org.apache.hadoop.fs.Pathp,
   
org.apache.hadoop.conf.Configurationconf,
   CancelableProgressablereporter)
 
-
+
 void
 FSMapRUtils.recoverFileLease(org.apache.hadoop.fs.FileSystemfs,
 org.apache.hadoop.fs.Pathp,
 org.apache.hadoop.conf.Configurationconf,
 CancelableProgressablereporter)
 
-
+
 void
 FSHDFSUtils.recoverFileLease(org.apache.hadoop.fs.FileSystemfs,
 org.apache.hadoop.fs.Pathp,
@@ -315,7 +334,7 @@
 Recover the lease from HDFS, retrying multiple times.
 
 
-
+
 abstract void
 FSUtils.recoverFileLease(org.apache.hadoop.fs.FileSystemfs,
 org.apache.hadoop.fs.Pathp,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/util/class-use/Counter.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/class-use/Counter.html 
b/devapidocs/org/apache/hadoop/hbase/util/class-use/Counter.html
index 31b0a8c..cc78fa6 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/class-use/Counter.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/class-use/Counter.html
@@ -75,6 +75,20 @@
 
 
 
+org.apache.hadoop.hbase.io.hfile
+
+Provides implementations of HFile and 
HFile
+ BlockCache.
+
+
+
+org.apache.hadoop.hbase.io.hfile.bucket
+
+Provides BucketCache, an 
implementation of
+ BlockCache.
+
+
+
 org.apache.hadoop.hbase.ipc
 
 Tools to help define network clients and servers.
@@ -84,11 +98,119 @@
 org.apache.hadoop.hbase.regionserver
 
 
+
+org.apache.hadoop.hbase.util
+
+
+
+org.apache.hadoop.metrics2.lib
+
+
 
 
 
 
 
+
+
+
+Uses of Counter in org.apache.hadoop.hbase.io.hfile
+
+Fields in org.apache.hadoop.hbase.io.hfile
 declared as Counter
+
+Modifier and Type
+Field and Description
+
+
+
+(package private) static Counter
+HFile.checksumFailures
+
+
+static Counter
+HFile.dataBlockReadCnt
+
+
+private Counter
+CacheStats.evictedBlockCount
+The total number of blocks that have been evicted
+
+
+
+private Counter
+CacheStats.evictionCount
+The number of times an eviction has occurred
+
+
+
+private Counter
+CacheStats.hitCachingCount
+The number of getBlock requests that were cache hits, but 
only from
+ requests that were set to use the block cache.
+
+
+
+private Counter
+CacheStats.hitCount
+The number of getBlock requests that were cache hits
+
+
+
+private Counter
+CacheStats.missCachingCount
+The number of getBlock requests that were cache misses, but 
only from
+ requests that were set to use the block cache.
+
+
+
+private Counter
+CacheStats.missCount
+The number of getBlock requests that were cache misses
+
+
+
+private Counter
+CacheStats.primaryEvictedBlockCount
+The total number of blocks for primary replica that have 
been evicted
+
+
+
+private Counter
+CacheStats.primaryHitCount
+The number of getBlock requests that were cache hits from 
primary replica
+
+
+
+private Counter
+CacheStats.primaryMissCount
+The number of getBlock requests for primary replica that 
were cache misses
+
+
+
+
+
+
+
+
+Uses of Counter in org.apache.hadoop.hbase.io.hfile.bucket
+
+Fields in org.apache.hadoop.hbase.io.hfile.bucket
 declared as Counter
+
+Modifier and Type
+Field and Description
+
+
+
+private Counter
+BucketCacheStats.ioHitCount
+
+
+private Counter
+BucketCacheStats.ioHitTime
+
+
+
+
 
 
 
@@ -173,10 +295,14 @@
 RSRpcServices.rpcScanRequestCount
 
 
+private static Counter
+StoreFileScanner.seekCount
+
+
 private Counter
 

[26/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
index 72d75ab..3406105 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
@@ -100,7 +100,7 @@
 
 
 @InterfaceAudience.Private
-public class HRegion
+public class HRegion
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements HeapSize, PropagatingConfigurationObserver, Region
 
@@ -483,9 +483,17 @@ implements recovering
 
 
+private RegionServicesForStores
+regionServicesForStores
+
+
 private boolean
 regionStatsEnabled
 
+
+private http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
+replicationScope
+
 
 private int
 rowLockWaitDuration
@@ -705,6 +713,10 @@ implements 
 
 
+void
+blockUpdates()
+
+
 boolean
 bulkLoadHFiles(http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionPairbyte[],http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringfamilyPaths,
 booleanassignSeqId,
@@ -712,7 +724,7 @@ implements Attempts to atomically load a group of hfiles.
 
 
-
+
 boolean
 checkAndMutate(byte[]row,
 byte[]family,
@@ -725,7 +737,7 @@ implements 
 
 
-
+
 boolean
 checkAndRowMutate(byte[]row,
   byte[]family,
@@ -738,66 +750,66 @@ implements 
 
 
-
+
 private boolean
 checkBatchOp(HRegion.BatchOperation?batchOp,
 intlastIndexExclusive,
 http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCell[]familyMaps,
 longnow)
 
-
+
 private void
 checkClassLoading()
 
-
+
 private void
 checkCompressionCodecs()
 
-
+
 private void
 checkEncryption()
 
-
+
 void
 checkFamilies(http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in 
java.util">Collectionbyte[]families)
 Check the collection of families for validity.
 
 
-
+
 (package private) void
 checkFamily(byte[]family)
 
-
+
 private void
 checkMutationType(Mutationmutation,
   byte[]row)
 
-
+
 protected void
 checkReadOnly()
 
-
+
 protected void
 checkReadsEnabled()
 
-
+
 private void
 checkResources()
 
-
+
 (package private) void
 checkRow(byte[]row,
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringop)
 Make sure this is a valid row for the HRegion
 
 
-
+
 byte[]
 checkSplit()
 Return the splitpoint.
 
 
-
+
 private void
 checkTargetRegion(byte[]encodedRegionName,
   http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringexceptionMsg,
@@ -806,67 +818,67 @@ implements 
 
 
-
+
 void
 checkTimestamps(http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellfamilyMap,
   longnow)
 Check the collection of families for valid timestamps
 
 
-
+
 (package private) void
 clearSplit()
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFile
 close()
 Close down this HRegion.
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFile
 close(booleanabort)
 Close down this HRegion.
 
 
-
+
 private void
 closeBulkRegionOperation()
 Closes the lock.
 
 
-
+
 void
 closeRegionOperation()
 Closes the region operation lock.
 
 
-
+
 void
 closeRegionOperation(Region.Operationoperation)
 Closes the lock.
 
 
-
+

[24/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.html
index 048adec..b395efc 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.html
@@ -138,7 +138,7 @@ implements LOG
 
 
-private 
org.apache.hadoop.metrics2.lib.MutableCounterLong
+private MutableFastCounter
 regionAppend
 
 
@@ -146,7 +146,7 @@ implements regionAppendKey
 
 
-private 
org.apache.hadoop.metrics2.lib.MutableCounterLong
+private MutableFastCounter
 regionDelete
 
 
@@ -154,7 +154,7 @@ implements regionDeleteKey
 
 
-private MutableHistogram
+private MetricHistogram
 regionGet
 
 
@@ -162,7 +162,7 @@ implements regionGetKey
 
 
-private 
org.apache.hadoop.metrics2.lib.MutableCounterLong
+private MutableFastCounter
 regionIncrement
 
 
@@ -174,7 +174,7 @@ implements regionNamePrefix
 
 
-private 
org.apache.hadoop.metrics2.lib.MutableCounterLong
+private MutableFastCounter
 regionPut
 
 
@@ -182,7 +182,7 @@ implements regionPutKey
 
 
-private MutableHistogram
+private MetricHistogram
 regionScanNext
 
 
@@ -438,7 +438,7 @@ implements 
 
 regionPut
-private finalorg.apache.hadoop.metrics2.lib.MutableCounterLong regionPut
+private finalMutableFastCounter regionPut
 
 
 
@@ -447,7 +447,7 @@ implements 
 
 regionDelete
-private finalorg.apache.hadoop.metrics2.lib.MutableCounterLong regionDelete
+private finalMutableFastCounter regionDelete
 
 
 
@@ -456,7 +456,7 @@ implements 
 
 regionIncrement
-private finalorg.apache.hadoop.metrics2.lib.MutableCounterLong regionIncrement
+private finalMutableFastCounter regionIncrement
 
 
 
@@ -465,7 +465,7 @@ implements 
 
 regionAppend
-private finalorg.apache.hadoop.metrics2.lib.MutableCounterLong regionAppend
+private finalMutableFastCounter regionAppend
 
 
 
@@ -474,7 +474,7 @@ implements 
 
 regionGet
-private finalMutableHistogram regionGet
+private finalMetricHistogram regionGet
 
 
 
@@ -483,7 +483,7 @@ implements 
 
 regionScanNext
-private finalMutableHistogram regionScanNext
+private finalMetricHistogram regionScanNext
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/regionserver/Region.BulkLoadListener.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/Region.BulkLoadListener.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/Region.BulkLoadListener.html
index 9f9de16..aa09a49 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/Region.BulkLoadListener.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/Region.BulkLoadListener.html
@@ -95,7 +95,7 @@
 
 
 
-public static interface Region.BulkLoadListener
+public static interface Region.BulkLoadListener
 Listener class to enable callers of
  bulkLoadHFile() to perform any necessary
  pre/post processing of a given bulkload call
@@ -159,7 +159,7 @@
 
 
 prepareBulkLoad
-http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringprepareBulkLoad(byte[]family,
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringprepareBulkLoad(byte[]family,
  http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringsrcPath)
throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Called before an HFile is actually loaded
@@ -175,7 +175,7 @@
 
 
 doneBulkLoad
-voiddoneBulkLoad(byte[]family,
+voiddoneBulkLoad(byte[]family,
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringsrcPath)
   throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Called after a successful HFile load
@@ -190,7 +190,7 @@
 
 
 failedBulkLoad
-voidfailedBulkLoad(byte[]family,
+voidfailedBulkLoad(byte[]family,
   http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringsrcPath)
 throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Called after a failed HFile load


[35/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
 
b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
index 451f5a0..5e8205a 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
@@ -234,7 +234,7 @@ extends 
 
 Methods inherited from classorg.apache.hadoop.hbase.master.HMaster
-abort,
 abortProcedure,
 addColumn,
 assignMeta,
 assignRegion,
 balance,
 balance,
 balanceSwitch, canCreateBaseZNode,
 canUpdateTableDescriptor,
 checkInitialized,
 checkServiceStarted,
 checkTableModifiable,
 configureInfoServer,
 constructMaster
 , createNamespace,
 createRpcServices,
 createServerManager,
 createTable,
 deleteColumn,
 deleteNamespace,
 deleteTable,
 disableTable,
 dispatchMergingRegions,
 doMetrics,
 enableTable,
 getAssignmentManager,
 getAverageLoad,
 getClientIdAuditPrefix,
 getClusterSchema,
 getClusterStatus,
 getDumpServlet,
 getFsTableDescriptors,
 getHFileCleaner,
 getInitializedEvent,
 getLastMajorCompactionTimestamp,
 getLastMajorCompactionTimestampForRegion,
 getLoadBalancerClassName,
 getLoadedCoprocessors,
 getMasterActiveTime,
 getMasterCoprocessorHost,
 getMasterCoprocessors,
 getMasterFileSystem,
 getMasterMetrics,
 getMasterProcedureExecutor, getMasterQuotaManager,
 getMasterRpcServices,
 getMasterStartTime,
 getMergePlanCount,
 getMobCompactionState,
 getNamespace,
 getNamespaces,
 g
 etNumWALFiles, getProcessName,
 getRegionNormalizer,
 getRegionNormalizerTracker,
 getRegionServerFatalLogBuffer,
 getRegionServerInfoPort,
 getRegionServerVersion,
 getRemoteInetAddress,
 getServerCrashProcessingEnabledEvent,
 getServerManager,
 getServerName,
 getSnapshotManagerForTesting,
 getSplitPlanCount,
 getTableDescriptors,
 getTableRegionForRow,
 getTableStateManager,
 getZooKeeper, initClusterSchemaService,
 initializeZKBasedSystemTrackers,
 initQuotaManager,
 isActiveMaster,
 isBalancerOn,
 isCatalogJanitorEnabled,
 isInitializationStartsMetaRegionAssignment,
 isInitialized, isNormalizerOn,
 isServerCrashProcessingEnabled,
 listProcedures,
 listTableDescriptors,
 listTableDescriptorsByNamespace,
 listTableNames,
 li
 stTableNamesByNamespace, login,
 main,
 modifyColumn,
 modifyNamespace,
 modifyTable,
 move,
 normalizeRegions, registerService,
 reportMobCompactionEnd,
 reportMobCompactionStart,
 requestMobCompaction,
 sendShutdownInterrupt,
 setCatalogJanitorEnabled,
 setInitialized, setServerCrashProcessingEnabled,
 shutdown,
 stopMaster,
 stopServiceThreads,
 truncateTable,
 waitForMasterActive
+abort,
 abortProcedure,
 addColumn,
 assignMeta,
 assignRegion,
 balance,
 balance,
 balanceSwitch, canCreateBaseZNode,
 canUpdateTableDescriptor,
 checkInitialized,
 checkServiceStarted,
 checkTableModifiable,
 configureInfoServer,
 constructMaster
 , createNamespace,
 createRpcServices,
 createServerManager,
 createTable,
 deleteColumn,
 deleteNamespace,
 deleteTable,
 disableTable,
 dispatchMergingRegions,
 doMetrics,
 enableTable,
 getAssignmentManager,
 getAverageLoad,
 getClientIdAuditPrefix,
 getClusterSchema,
 getClusterStatus,
 getDumpServlet,
 getFsTableDescriptors,
 getHFileCleaner,
 getInitializedEvent,
 getLastMajorCompactionTimestamp,
 getLastMajorCompactionTimestampForRegion,
 getLoadBalancerClassName,
 getLoadedCoprocessors,
 getMasterActiveTime,
 getMasterCoprocessorHost,
 getMasterCoprocessors,
 getMasterFileSystem,
 getMasterMetrics,
 getMasterProcedureExecutor, getMasterQuotaManager,
 getMasterRpcServices,
 getMasterStartTime,
 getMergePlanCount,
 getMobCompactionState,
 getNamespace,
 getNamespaces,
 g
 etNumWALFiles, getProcessName,
 getRegionNormalizer,
 getRegionNormalizerTracker,
 getRegionServerFatalLogBuffer,
 getRegionServerInfoPort,
 getRegionServerVersion,
 getRemoteInetAddress,
 getServerCrashProcessingEnabledEvent,
 getServerManager,
 getServerName,
 getSnapshotManagerForTesting,
 getSplitPlanCount,
 getTableDescriptors,
 getTableRegionForRow,
 getTableStateManager,
 getWalProcedureStore, 
getZooKeeper,
 initClusterSchemaService,
 initializeZKBasedSystemTrackers,
 initQuotaManager,
 isActiveMaster,
 isBalancerOn,
 isCatalogJanitorEnabled,
 isInitializationStartsMetaRegionAssignment, isInitialized,
 isNormalizerOn,
 isServerCrashProcessingEnabled,
 listProcedures,
 listTableDescriptors,
 listTableDescriptorsByNamespace,
 listTableNames, href="../../../../../org/apache/hadoop/hbase/master/HMaster.html#listTableNamesByNamespace(java.lang.String)">listTableNamesByNamespace,
 > href="../../../../../org/apache/hadoop/hbase/master/HMaster.html#login(org.apache.hadoop.hbase.security.UserProvider,%20java.lang.String)">login,
 > 

[51/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
Published site at c5288947ddc4abae2f4036544a775ff81538df2f.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/55dfd6fe
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/55dfd6fe
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/55dfd6fe

Branch: refs/heads/asf-site
Commit: 55dfd6fed046e99a9a0911ac08fcdd5955afccad
Parents: 89b638a
Author: jenkins 
Authored: Fri Feb 26 19:12:29 2016 +
Committer: Misty Stanley-Jones 
Committed: Fri Feb 26 13:40:29 2016 -0800

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 21091 +
 apache_hbase_reference_guide.pdfmarks   | 4 +-
 apidocs/constant-values.html|19 +
 apidocs/index-all.html  |22 +
 .../hadoop/hbase/util/FastLongHistogram.html|   170 +-
 .../hbase/util/class-use/FastLongHistogram.html |44 +-
 .../apache/hadoop/hbase/util/package-use.html   |10 +-
 .../hadoop/hbase/util/FastLongHistogram.html|   498 +-
 book.html   |99 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 16116 +++--
 checkstyle.rss  |   124 +-
 coc.html| 4 +-
 cygwin.html | 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html | 6 +-
 dependency-info.html| 4 +-
 dependency-management.html  | 4 +-
 devapidocs/allclasses-frame.html|16 +-
 devapidocs/allclasses-noframe.html  |16 +-
 devapidocs/constant-values.html |   206 +-
 devapidocs/deprecated-list.html | 2 +-
 devapidocs/index-all.html   |   749 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html | 8 -
 .../hadoop/hbase/class-use/HRegionInfo.html |36 +-
 .../hbase/class-use/HTableDescriptor.html   |   222 -
 .../hbase/class-use/RegionTooBusyException.html | 4 +
 .../hadoop/hbase/class-use/TableName.html   |   103 +-
 .../class-use/InterfaceAudience.Private.html|   179 +-
 .../class-use/InterfaceStability.Evolving.html  |20 +-
 .../class-use/InterfaceStability.Unstable.html  |12 +-
 .../hbase/classification/package-tree.html  | 6 +-
 .../hadoop/hbase/client/package-tree.html   | 6 +-
 .../hadoop/hbase/filter/package-tree.html   | 8 +-
 .../hadoop/hbase/io/hfile/AgeSnapshot.html  |70 +-
 ...BlockCacheUtil.CachedBlockCountsPerFile.html |24 +-
 .../BlockCacheUtil.CachedBlocksByFile.html  |62 +-
 .../hadoop/hbase/io/hfile/BlockCacheUtil.html   |34 +-
 .../hadoop/hbase/io/hfile/CacheStats.html   |   174 +-
 .../io/hfile/HFile.CachingBlockReader.html  | 6 +-
 .../hadoop/hbase/io/hfile/HFile.FileInfo.html   |74 +-
 .../hadoop/hbase/io/hfile/HFile.Reader.html |64 +-
 .../hadoop/hbase/io/hfile/HFile.Writer.html |20 +-
 .../hbase/io/hfile/HFile.WriterFactory.html |36 +-
 .../org/apache/hadoop/hbase/io/hfile/HFile.html |42 +-
 .../hbase/io/hfile/bucket/BucketCacheStats.html |26 +-
 .../hadoop/hbase/io/hfile/package-tree.html | 4 +-
 ...Client.BlockingRpcChannelImplementation.html | 4 +-
 .../hbase/ipc/AdaptiveLifoCoDelCallQueue.html   |   977 +
 .../org/apache/hadoop/hbase/ipc/AsyncCall.html  | 4 +-
 .../hadoop/hbase/ipc/FifoRpcScheduler.html  |50 +-
 .../hbase/ipc/MetricsHBaseServerSource.html |   120 +-
 .../hbase/ipc/MetricsHBaseServerSourceImpl.html |86 +-
 .../hbase/ipc/MetricsHBaseServerWrapper.html|28 +-
 .../ipc/MetricsHBaseServerWrapperImpl.html  |36 +-
 .../hadoop/hbase/ipc/RWQueueRpcExecutor.html|60 +-
 .../apache/hadoop/hbase/ipc/RpcScheduler.html   |40 +-
 ...mpleRpcScheduler.CallPriorityComparator.html |12 +-
 .../hadoop/hbase/ipc/SimpleRpcScheduler.html|   246 +-
 .../class-use/AdaptiveLifoCoDelCallQueue.html   |   115 +
 .../hadoop/hbase/ipc/class-use/CallRunner.html  |94 +-
 .../apache/hadoop/hbase/ipc/package-frame.html  | 1 +
 .../hadoop/hbase/ipc/package-summary.html   |90 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   | 1 +
 .../hadoop/hbase/mapreduce/package-tree.html| 4 +-
 .../org/apache/hadoop/hbase/master/HMaster.html |   217 +-
 .../master/HMasterCommandLine.LocalHMaster.html | 2 +-
 .../master/MetricsAssignmentManagerSource.html  | 2 +-
 .../MetricsAssignmentManagerSourceImpl.html |34 +-
 .../master/MetricsMasterFileSystemSource.html   | 2 +-
 

[12/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutput.Callback.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutput.Callback.html
 
b/devapidocs/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutput.Callback.html
new file mode 100644
index 000..33ca5e8
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutput.Callback.html
@@ -0,0 +1,298 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+FanOutOneBlockAsyncDFSOutput.Callback (Apache HBase 2.0.0-SNAPSHOT 
API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev 
Class
+Next 
Class
+
+
+Frames
+No 
Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.util
+Class 
FanOutOneBlockAsyncDFSOutput.Callback
+
+
+
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutput.Callback
+
+
+
+
+
+
+
+Enclosing class:
+FanOutOneBlockAsyncDFSOutput
+
+
+
+private static final class FanOutOneBlockAsyncDFSOutput.Callback
+extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+long
+ackedLength
+
+
+io.netty.util.concurrent.Promisehttp://docs.oracle.com/javase/7/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+promise
+
+
+http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in 
java.util">Setio.netty.channel.Channel
+unfinishedReplicas
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+FanOutOneBlockAsyncDFSOutput.Callback(io.netty.util.concurrent.Promisehttp://docs.oracle.com/javase/7/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Voidpromise,
+  
longackedLength,
+  http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in 
java.util">Collectionio.netty.channel.Channelreplicas)
+
+
+
+
+
+
+
+
+
+Method Summary
+
+
+
+
+Methods inherited from classjava.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#clone()"
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#equals(java.lang.Object)"
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#finalize()"
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#getClass()"
 title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#hashCode()"
 title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#notify()"
 title="class or interface in java.lang">notify, http://docs.oracle.com/javase/7/docs/api/java/lang
 /Object.html?is-external=true#notifyAll()" title="class or interface in 
java.lang">notifyAll, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString()"
 title="class or interface in java.lang">toString, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait()"
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait(long)"
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait(long,%20int)"
 title="class or interface in java.lang">wait
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Field Detail
+
+
+
+
+
+promise
+public 

[33/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/metrics/BaseSourceImpl.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/metrics/BaseSourceImpl.html 
b/devapidocs/org/apache/hadoop/hbase/metrics/BaseSourceImpl.html
index 34c25eb..5d2d68a 100644
--- a/devapidocs/org/apache/hadoop/hbase/metrics/BaseSourceImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/metrics/BaseSourceImpl.html
@@ -104,7 +104,7 @@
 
 
 @InterfaceAudience.Private
-public class BaseSourceImpl
+public class BaseSourceImpl
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements BaseSource, 
org.apache.hadoop.metrics2.MetricsSource
 Hadoop 2 implementation of BaseSource (using metrics2 
framework).  It handles registration to
@@ -290,13 +290,6 @@ implements Add some value to a histogram.
 
 
-
-void
-updateQuantile(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
-longvalue)
-Add some value to a Quantile (An accurate histogram).
-
-
 
 
 
@@ -325,7 +318,7 @@ implements 
 
 metricsRegistry
-protected finalDynamicMetricsRegistry metricsRegistry
+protected finalDynamicMetricsRegistry metricsRegistry
 
 
 
@@ -334,7 +327,7 @@ implements 
 
 metricsName
-protected finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String metricsName
+protected finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String metricsName
 
 
 
@@ -343,7 +336,7 @@ implements 
 
 metricsDescription
-protected finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String metricsDescription
+protected finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String metricsDescription
 
 
 
@@ -352,7 +345,7 @@ implements 
 
 metricsContext
-protected finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String metricsContext
+protected finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String metricsContext
 
 
 
@@ -361,7 +354,7 @@ implements 
 
 metricsJmxContext
-protected finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String metricsJmxContext
+protected finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String metricsJmxContext
 
 
 
@@ -378,7 +371,7 @@ implements 
 
 BaseSourceImpl
-publicBaseSourceImpl(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringmetricsName,
+publicBaseSourceImpl(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringmetricsName,
   http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringmetricsDescription,
   http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringmetricsContext,
   http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringmetricsJmxContext)
@@ -398,7 +391,7 @@ implements 
 
 init
-publicvoidinit()
+publicvoidinit()
 Description copied from interface:BaseSource
 Clear out the metrics and re-prepare the source.
 
@@ -413,7 +406,7 @@ implements 
 
 setGauge
-publicvoidsetGauge(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgaugeName,
+publicvoidsetGauge(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgaugeName,
 longvalue)
 Set a single gauge to a value.
 
@@ -428,7 +421,7 @@ implements 
 
 incGauge
-publicvoidincGauge(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgaugeName,
+publicvoidincGauge(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgaugeName,
 longdelta)
 Add some amount to a gauge.
 
@@ -443,7 +436,7 @@ implements 
 
 decGauge
-publicvoiddecGauge(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 

[18/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.html
index eceda63..142026c 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.html
@@ -101,7 +101,7 @@
 
 
 @InterfaceAudience.Private
- class FSWALEntry
+ class FSWALEntry
 extends WAL.Entry
 A WAL Entry for FSHLog 
implementation.  Immutable.
  A subclass of WAL.Entry that 
carries extra info across the ring buffer such as
@@ -136,14 +136,10 @@ extends hri
 
 
-private HTableDescriptor
-htd
-
-
 private boolean
 inMemstore
 
-
+
 private long
 sequence
 
@@ -162,10 +158,9 @@ extends Constructor and Description
 
 
-FSWALEntry(longsequence,
+FSWALEntry(longsequence,
 WALKeykey,
 WALEditedit,
-HTableDescriptorhtd,
 HRegionInfohri,
 booleaninMemstore)
 
@@ -193,24 +188,20 @@ extends getHRegionInfo()
 
 
-(package private) HTableDescriptor
-getHTableDescriptor()
-
-
 (package private) long
 getSequence()
 
-
+
 (package private) boolean
 isInMemstore()
 
-
+
 (package private) long
 stampRegionSequenceId()
 Here is where a WAL edit gets its sequenceid.
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 
@@ -249,7 +240,7 @@ extends 
 
 sequence
-private final transientlong sequence
+private final transientlong sequence
 
 
 
@@ -258,16 +249,7 @@ extends 
 
 inMemstore
-private final transientboolean inMemstore
-
-
-
-
-
-
-
-htd
-private final transientHTableDescriptor htd
+private final transientboolean inMemstore
 
 
 
@@ -276,7 +258,7 @@ extends 
 
 hri
-private final transientHRegionInfo hri
+private final transientHRegionInfo hri
 
 
 
@@ -285,7 +267,7 @@ extends 
 
 familyNames
-private finalhttp://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Setbyte[] familyNames
+private finalhttp://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Setbyte[] familyNames
 
 
 
@@ -296,16 +278,15 @@ extends 
 
 Constructor Detail
-
+
 
 
 
 
 FSWALEntry
-FSWALEntry(longsequence,
+FSWALEntry(longsequence,
   WALKeykey,
   WALEditedit,
-  HTableDescriptorhtd,
   HRegionInfohri,
   booleaninMemstore)
 
@@ -324,7 +305,7 @@ extends 
 
 toString
-publichttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
+publichttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
 
 Overrides:
 toStringin
 classWAL.Entry
@@ -337,16 +318,7 @@ extends 
 
 isInMemstore
-booleanisInMemstore()
-
-
-
-
-
-
-
-getHTableDescriptor
-HTableDescriptorgetHTableDescriptor()
+booleanisInMemstore()
 
 
 
@@ -355,7 +327,7 @@ extends 
 
 getHRegionInfo
-HRegionInfogetHRegionInfo()
+HRegionInfogetHRegionInfo()
 
 
 
@@ -364,7 +336,7 @@ extends 
 
 getSequence
-longgetSequence()
+longgetSequence()
 Returns:The sequence on the ring 
buffer when this edit was added.
 
 
@@ -374,7 +346,7 @@ extends 
 
 stampRegionSequenceId
-longstampRegionSequenceId()
+longstampRegionSequenceId()
  throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Here is where a WAL edit gets its sequenceid.
  SIDE-EFFECT is our stamping the sequenceid into every Cell AND setting the 
sequenceid into the
@@ -390,7 +362,7 @@ extends 
 
 getFamilyNames
-http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Setbyte[]getFamilyNames()
+http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Setbyte[]getFamilyNames()
 Returns:the family names which 
are effected by this edit.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/regionserver/wal/HLogKey.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/HLogKey.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/HLogKey.html
index 2a4f41d..a306236 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/HLogKey.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/HLogKey.html
@@ -113,7 +113,7 @@
 
 @InterfaceAudience.LimitedPrivate(value="Replication")
 

[19/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/regionserver/wal/FSHLog.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/FSHLog.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/FSHLog.html
index 1b44a8a..b0c526a 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/FSHLog.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/FSHLog.html
@@ -100,7 +100,7 @@
 
 
 @InterfaceAudience.Private
-public class FSHLog
+public class FSHLog
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements WAL
 Implementation of WAL to go against 
FileSystem; i.e. keep WALs in HDFS.
@@ -468,8 +468,7 @@ implements 
 
 long
-append(HTableDescriptorhtd,
-HRegionInfohri,
+append(HRegionInfohri,
 WALKeykey,
 WALEditedits,
 booleaninMemstore)
@@ -818,7 +817,7 @@ implements 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -827,7 +826,7 @@ implements 
 
 DEFAULT_SLOW_SYNC_TIME_MS
-private static finalint DEFAULT_SLOW_SYNC_TIME_MS
+private static finalint DEFAULT_SLOW_SYNC_TIME_MS
 See Also:Constant
 Field Values
 
 
@@ -837,7 +836,7 @@ implements 
 
 disruptor
-private finalcom.lmax.disruptor.dsl.DisruptorRingBufferTruck disruptor
+private finalcom.lmax.disruptor.dsl.DisruptorRingBufferTruck disruptor
 The nexus at which all incoming handlers meet.  Does 
appends and sync with an ordering.
  Appends and syncs are each put on the ring which means handlers need to
  smash up against the ring twice (can we make it once only? ... maybe not 
since time to append
@@ -852,7 +851,7 @@ implements 
 
 appendExecutor
-private finalhttp://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in java.util.concurrent">ExecutorService appendExecutor
+private finalhttp://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in java.util.concurrent">ExecutorService appendExecutor
 An executorservice that runs the disruptor 
AppendEventHandler append executor.
 
 
@@ -862,7 +861,7 @@ implements 
 
 ringBufferEventHandler
-private finalFSHLog.RingBufferEventHandler ringBufferEventHandler
+private finalFSHLog.RingBufferEventHandler ringBufferEventHandler
 This fellow is run by the above appendExecutor service but 
it is all about batching up appends
  and syncs; it may shutdown without cleaning out the last few appends or 
syncs.  To guard
  against this, keep a reference to this handler and do explicit close on way 
out to make sure
@@ -875,7 +874,7 @@ implements 
 
 syncFuturesByHandler
-private finalhttp://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/7/docs/api/java/lang/Thread.html?is-external=true;
 title="class or interface in java.lang">Thread,SyncFuture 
syncFuturesByHandler
+private finalhttp://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/7/docs/api/java/lang/Thread.html?is-external=true;
 title="class or interface in java.lang">Thread,SyncFuture 
syncFuturesByHandler
 Map of SyncFutures keyed by 
Handler objects.  Used so we reuse SyncFutures.
  TODO: Reus FSWALEntry's rather than create them anew each time as we do 
SyncFutures here.
  TODO: Add a FSWalEntry and SyncFuture as thread locals on handlers rather 
than have them
@@ -888,7 +887,7 @@ implements 
 
 highestUnsyncedSequence
-private volatilelong highestUnsyncedSequence
+private volatilelong highestUnsyncedSequence
 The highest known outstanding unsync'd WALEdit sequence 
number where sequence number is the
  ring buffer sequence.  Maintained by the ring buffer consumer.
 
@@ -899,7 +898,7 @@ implements 
 
 highestSyncedSequence
-private finalhttp://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicLong highestSyncedSequence
+private finalhttp://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicLong highestSyncedSequence
 Updated to the ring buffer sequence of the last successful 
sync call.  This can be less than
  highestUnsyncedSequence
 for case where we have an append where a sync has not yet
  come in for it.  Maintained by the syncing threads.
@@ -911,7 +910,7 @@ implements 
 
 fs
-protected finalorg.apache.hadoop.fs.FileSystem fs
+protected finalorg.apache.hadoop.fs.FileSystem fs
 file system instance
 
 
@@ -921,7 +920,7 @@ implements 
 
 

[50/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/apache_hbase_reference_guide.pdf
--
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index a2cadbc..52d7640 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,24 +5,24 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
 /Producer (Apache HBase Team)
-/CreationDate (D:20160224151227+00'00')
-/ModDate (D:20160224151227+00'00')
+/CreationDate (D:20160226190225+00'00')
+/ModDate (D:20160226190225+00'00')
 >>
 endobj
 2 0 obj
 << /Type /Catalog
 /Pages 3 0 R
 /Names 25 0 R
-/Outlines 3940 0 R
-/PageLabels 4142 0 R
+/Outlines 3947 0 R
+/PageLabels 4149 0 R
 /PageMode /UseOutlines
 /ViewerPreferences [/FitWindow]
 >>
 endobj
 3 0 obj
 << /Type /Pages
-/Count 648
-/Kids [7 0 R 13 0 R 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 39 0 R 43 0 R 47 0 R 58 
0 R 62 0 R 64 0 R 66 0 R 68 0 R 75 0 R 78 0 R 80 0 R 85 0 R 88 0 R 90 0 R 92 0 
R 101 0 R 107 0 R 112 0 R 114 0 R 130 0 R 135 0 R 142 0 R 145 0 R 148 0 R 157 0 
R 168 0 R 184 0 R 188 0 R 192 0 R 194 0 R 198 0 R 204 0 R 206 0 R 208 0 R 210 0 
R 212 0 R 215 0 R 221 0 R 223 0 R 225 0 R 227 0 R 229 0 R 231 0 R 233 0 R 235 0 
R 239 0 R 243 0 R 245 0 R 247 0 R 249 0 R 251 0 R 253 0 R 255 0 R 257 0 R 260 0 
R 265 0 R 267 0 R 269 0 R 271 0 R 276 0 R 280 0 R 283 0 R 288 0 R 291 0 R 295 0 
R 310 0 R 321 0 R 328 0 R 338 0 R 349 0 R 354 0 R 356 0 R 358 0 R 369 0 R 374 0 
R 377 0 R 382 0 R 386 0 R 397 0 R 409 0 R 424 0 R 430 0 R 432 0 R 434 0 R 441 0 
R 452 0 R 463 0 R 474 0 R 477 0 R 480 0 R 484 0 R 488 0 R 491 0 R 494 0 R 496 0 
R 499 0 R 503 0 R 505 0 R 509 0 R 518 0 R 520 0 R 526 0 R 528 0 R 532 0 R 540 0 
R 542 0 R 545 0 R 548 0 R 551 0 R 554 0 R 569 0 R 576 0 R 583 0 R 594 0 R 601 0 
R 609 0 R 617 0 R 620 0 R 624 0 R 627 0
  R 639 0 R 647 0 R 653 0 R 658 0 R 662 0 R 664 0 R 679 0 R 691 0 R 697 0 R 704 
0 R 707 0 R 715 0 R 723 0 R 728 0 R 733 0 R 738 0 R 740 0 R 742 0 R 744 0 R 752 
0 R 761 0 R 765 0 R 772 0 R 780 0 R 786 0 R 790 0 R 797 0 R 801 0 R 806 0 R 814 
0 R 816 0 R 820 0 R 831 0 R 836 0 R 838 0 R 841 0 R 845 0 R 851 0 R 854 0 R 866 
0 R 870 0 R 875 0 R 883 0 R 888 0 R 892 0 R 896 0 R 898 0 R 901 0 R 903 0 R 907 
0 R 909 0 R 912 0 R 917 0 R 921 0 R 926 0 R 930 0 R 937 0 R 941 0 R 946 0 R 959 
0 R 964 0 R 968 0 R 973 0 R 975 0 R 984 0 R 987 0 R 992 0 R 995 0 R 1004 0 R 
1007 0 R 1013 0 R 1020 0 R 1023 0 R 1025 0 R 1034 0 R 1036 0 R 1038 0 R 1041 0 
R 1043 0 R 1045 0 R 1047 0 R 1049 0 R 1051 0 R 1054 0 R 1057 0 R 1062 0 R 1065 
0 R 1067 0 R 1069 0 R 1071 0 R 1076 0 R 1085 0 R 1088 0 R 1090 0 R 1092 0 R 
1097 0 R 1099 0 R 1102 0 R 1104 0 R 1106 0 R 1108 0 R  0 R 1117 0 R 1122 0 
R 1129 0 R 1134 0 R 1148 0 R 1159 0 R 1163 0 R 1176 0 R 1185 0 R 1201 0 R 1205 
0 R 1215 0 R 1228 0 R 1231 0 R 1243 0 R 1252 0 R 
 1260 0 R 1264 0 R 1273 0 R 1278 0 R 1282 0 R 1288 0 R 1294 0 R 1301 0 R 1309 0 
R 1311 0 R 1322 0 R 1324 0 R 1329 0 R 1333 0 R 1338 0 R 1348 0 R 1354 0 R 1360 
0 R 1362 0 R 1364 0 R 1377 0 R 1384 0 R 1393 0 R 1399 0 R 1413 0 R 1421 0 R 
1425 0 R 1434 0 R 1442 0 R 1450 0 R 1456 0 R 1460 0 R 1463 0 R 1465 0 R 1474 0 
R 1477 0 R 1483 0 R 1489 0 R 1492 0 R 1495 0 R 1497 0 R 1505 0 R 1512 0 R 1518 
0 R 1523 0 R 1527 0 R 1530 0 R 1536 0 R 1541 0 R 1546 0 R 1548 0 R 1550 0 R 
1553 0 R 1555 0 R 1564 0 R 1567 0 R 1573 0 R 1580 0 R 1584 0 R 1589 0 R 1592 0 
R 1594 0 R 1599 0 R 1602 0 R 1604 0 R 1606 0 R 1608 0 R 1615 0 R 1625 0 R 1630 
0 R 1637 0 R 1641 0 R 1643 0 R 1645 0 R 1647 0 R 1650 0 R 1652 0 R 1654 0 R 
1656 0 R 1660 0 R 1664 0 R 1673 0 R 1675 0 R 1677 0 R 1679 0 R 1681 0 R 1687 0 
R 1689 0 R 1694 0 R 1696 0 R 1698 0 R 1705 0 R 1710 0 R 1714 0 R 1718 0 R 1721 
0 R 1724 0 R 1728 0 R 1730 0 R 1733 0 R 1735 0 R 1737 0 R 1739 0 R 1743 0 R 
1745 0 R 1749 0 R 1751 0 R 1753 0 R 1755 0 R 1757 0 R 1762 0 
 R 1764 0 R 1766 0 R 1774 0 R 1784 0 R 1787 0 R 1802 0 R 1817 0 R 1821 0 R 1826 
0 R 1829 0 R 1832 0 R 1837 0 R 1839 0 R 1847 0 R 1849 0 R 1852 0 R 1854 0 R 
1856 0 R 1858 0 R 1860 0 R 1864 0 R 1866 0 R 1875 0 R 1882 0 R 1888 0 R 1900 0 
R 1914 0 R 1925 0 R 1945 0 R 1947 0 R 1949 0 R 1953 0 R 1970 0 R 1977 0 R 1984 
0 R 1993 0 R 1998 0 R 2008 0 R 2018 0 R 2023 0 R 2032 0 R 2045 0 R 2062 0 R 
2072 0 R 2075 0 R 2084 0 R 2099 0 R 2106 0 R 2109 0 R 2114 0 R 2119 0 R 2129 0 
R 2137 0 R 2140 0 R 2142 0 R 2146 0 R 2159 0 R 2167 0 R 2173 0 R 2177 0 R 2180 
0 R 2182 0 R 2184 0 R 2186 0 R 2188 0 R 2193 0 R 2195 0 R 2205 0 R 2215 0 R 
 0 R 2234 0 R 2239 0 R 2243 0 R 2256 0 R 2263 0 R 2269 0 R 2271 0 R 2282 0 
R 2289 0 R 2300 0 R 2304 0 R 2313 0 R 2319 0 R 2329 0 R 2337 0 R 2345 0 R 2351 
0 R 2356 0 R 2360 0 R 2362 0 R 2369 0 R 2373 0 R 2377 0 R 2383 0 R 2390 0 R 
2395 0 R 2399 0 R 2409 0 R 2414 0 R 2419 0 R 2432 0 R 2439 0 R 2443 0 R 2448 0 
R 2455 0 R 2459 0 R 2464 0 R 2472 0 R 2478 0 R 2480 0 R 2487 
 0 R 2494 0 R 2501 0 R 2506 0 R 2514 0 R 

[16/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.html
index 07b2391..a298332 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.html
@@ -125,15 +125,15 @@ implements ageOfLastShippedOpGauge
 
 
-private 
org.apache.hadoop.metrics2.lib.MutableCounterLong
+private MutableFastCounter
 logEditsFilteredCounter
 
 
-private 
org.apache.hadoop.metrics2.lib.MutableCounterLong
+private MutableFastCounter
 logReadInBytesCounter
 
 
-private 
org.apache.hadoop.metrics2.lib.MutableCounterLong
+private MutableFastCounter
 logReadInEditsCounter
 
 
@@ -141,19 +141,19 @@ implements rms
 
 
-private 
org.apache.hadoop.metrics2.lib.MutableCounterLong
+private MutableFastCounter
 shippedBatchesCounter
 
 
-private 
org.apache.hadoop.metrics2.lib.MutableCounterLong
+private MutableFastCounter
 shippedHFilesCounter
 
 
-private 
org.apache.hadoop.metrics2.lib.MutableCounterLong
+private MutableFastCounter
 shippedKBsCounter
 
 
-private 
org.apache.hadoop.metrics2.lib.MutableCounterLong
+private MutableFastCounter
 shippedOpsCounter
 
 
@@ -318,7 +318,7 @@ implements 
 
 logReadInEditsCounter
-private finalorg.apache.hadoop.metrics2.lib.MutableCounterLong logReadInEditsCounter
+private finalMutableFastCounter logReadInEditsCounter
 
 
 
@@ -327,7 +327,7 @@ implements 
 
 logEditsFilteredCounter
-private finalorg.apache.hadoop.metrics2.lib.MutableCounterLong logEditsFilteredCounter
+private finalMutableFastCounter logEditsFilteredCounter
 
 
 
@@ -336,7 +336,7 @@ implements 
 
 shippedBatchesCounter
-private finalorg.apache.hadoop.metrics2.lib.MutableCounterLong shippedBatchesCounter
+private finalMutableFastCounter shippedBatchesCounter
 
 
 
@@ -345,7 +345,7 @@ implements 
 
 shippedOpsCounter
-private finalorg.apache.hadoop.metrics2.lib.MutableCounterLong shippedOpsCounter
+private finalMutableFastCounter shippedOpsCounter
 
 
 
@@ -354,7 +354,7 @@ implements 
 
 shippedKBsCounter
-private finalorg.apache.hadoop.metrics2.lib.MutableCounterLong shippedKBsCounter
+private finalMutableFastCounter shippedKBsCounter
 
 
 
@@ -363,7 +363,7 @@ implements 
 
 logReadInBytesCounter
-private finalorg.apache.hadoop.metrics2.lib.MutableCounterLong logReadInBytesCounter
+private finalMutableFastCounter logReadInBytesCounter
 
 
 
@@ -372,7 +372,7 @@ implements 
 
 shippedHFilesCounter
-private finalorg.apache.hadoop.metrics2.lib.MutableCounterLong shippedHFilesCounter
+private finalMutableFastCounter shippedHFilesCounter
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSinkSourceImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSinkSourceImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSinkSourceImpl.html
index 0e55784..c073542 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSinkSourceImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSinkSourceImpl.html
@@ -125,15 +125,15 @@ implements ageGauge
 
 
-private 
org.apache.hadoop.metrics2.lib.MutableCounterLong
+private MutableFastCounter
 batchesCounter
 
 
-private 
org.apache.hadoop.metrics2.lib.MutableCounterLong
+private MutableFastCounter
 hfilesCounter
 
 
-private 
org.apache.hadoop.metrics2.lib.MutableCounterLong
+private MutableFastCounter
 opsCounter
 
 
@@ -232,7 +232,7 @@ implements 
 
 batchesCounter
-private finalorg.apache.hadoop.metrics2.lib.MutableCounterLong batchesCounter
+private finalMutableFastCounter batchesCounter
 
 
 
@@ -241,7 +241,7 @@ implements 
 
 opsCounter
-private finalorg.apache.hadoop.metrics2.lib.MutableCounterLong opsCounter
+private finalMutableFastCounter opsCounter
 
 
 
@@ -250,7 +250,7 @@ implements 
 
 hfilesCounter
-private finalorg.apache.hadoop.metrics2.lib.MutableCounterLong hfilesCounter
+private finalMutableFastCounter hfilesCounter
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSource.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSource.html
 

[25/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
index 436c297..3205ae9 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
@@ -556,10 +556,16 @@ implements determineTTLFromFamily(HColumnDescriptorfamily)
 
 
+void
+finalizeFlush()
+This method is called when it is clear that the flush to 
disk is completed.
+
+
+
 private void
 finishCompactionRequest(CompactionRequestcr)
 
-
+
 protected http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.fs.Path
 flushCache(longlogCacheFlushId,
 MemStoreSnapshotsnapshot,
@@ -568,167 +574,167 @@ implements Write out current snapshot.
 
 
-
+
 long
 getAvgStoreFileAge()
 
-
+
 long
 getBlockingFileCount()
 The number of files required before flushes for this store 
will be blocked.
 
 
-
+
 static int
 getBytesPerChecksum(org.apache.hadoop.conf.Configurationconf)
 Returns the configured bytesPerChecksum value.
 
 
-
+
 CacheConfig
 getCacheConfig()
 Used for tests.
 
 
-
+
 static ChecksumType
 getChecksumType(org.apache.hadoop.conf.Configurationconf)
 Returns the configured checksum algorithm.
 
 
-
+
 static int
 getCloseCheckInterval()
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 getColumnFamilyName()
 
-
+
 long
 getCompactedCellsCount()
 
-
+
 long
 getCompactedCellsSize()
 
-
+
 long
 getCompactionCheckMultiplier()
 
-
+
 double
 getCompactionPressure()
 This value can represent the degree of emergency of 
compaction for this store.
 
 
-
+
 CompactionProgress
 getCompactionProgress()
 getter for CompactionProgress object
 
 
-
+
 int
 getCompactPriority()
 
-
+
 CellComparator
 getComparator()
 
-
+
 RegionCoprocessorHost
 getCoprocessorHost()
 
-
+
 HFileDataBlockEncoder
 getDataBlockEncoder()
 
-
+
 HColumnDescriptor
 getFamily()
 
-
+
 org.apache.hadoop.fs.FileSystem
 getFileSystem()
 
-
+
 long
 getFlushableSize()
 
-
+
 long
 getFlushedCellsCount()
 
-
+
 long
 getFlushedCellsSize()
 
-
+
 HRegion
 getHRegion()
 
-
+
 long
 getLastCompactSize()
 
-
+
 long
 getMajorCompactedCellsCount()
 
-
+
 long
 getMajorCompactedCellsSize()
 
-
+
 long
 getMaxMemstoreTS()
 
-
+
 long
 getMaxSequenceId()
 
-
+
 long
 getMaxStoreFileAge()
 
-
+
 long
 getMemstoreFlushSize()
 
-
+
 long
 getMemStoreSize()
 
-
+
 long
 getMinStoreFileAge()
 
-
+
 long
 getNumHFiles()
 
-
+
 long
 getNumReferenceFiles()
 
-
+
 protected OffPeakHours
 getOffPeakHours()
 
-
+
 HRegionFileSystem
 getRegionFileSystem()
 
-
+
 HRegionInfo
 getRegionInfo()
 
-
+
 ScanInfo
 getScanInfo()
 
-
+
 KeyValueScanner
 getScanner(Scanscan,
 http://docs.oracle.com/javase/7/docs/api/java/util/NavigableSet.html?is-external=true;
 title="class or interface in 
java.util">NavigableSetbyte[]targetCols,
@@ -736,7 +742,7 @@ implements Return a scanner for both the memstore and the HStore 
files.
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScanner
 getScanners(booleancacheBlocks,
   booleanisGet,
@@ -750,7 +756,7 @@ implements 
+
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScanner
 getScanners(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilefiles,
   booleancacheBlocks,
@@ -766,53 +772,53 @@ implements 
+
 long
 getSize()
 
-
+
 long
 getSmallestReadPoint()
 
-
+
 long
 getSnapshotSize()
 Returns the memstore snapshot size
 
 
-
+
 byte[]
 getSplitPoint()
 Determines if Store should be split
 
 
-
+
 StoreEngine?,?,?,?
 getStoreEngine()
 Returns the StoreEngine that is backing this concrete 
implementation of Store.
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFile
 getStorefiles()
 
-
+
 int
 getStorefilesCount()
 
-
+
 long
 getStorefilesIndexSize()
 
-
+
 long
 getStorefilesSize()
 
-
+
 long
 getStoreFileTtl()
 
-
+
 static org.apache.hadoop.fs.Path
 getStoreHomedir(org.apache.hadoop.fs.Pathtabledir,
   HRegionInfohri,
@@ -820,7 +826,7 @@ implements Deprecated.
 
 
-
+
 static org.apache.hadoop.fs.Path
 getStoreHomedir(org.apache.hadoop.fs.Pathtabledir,
   http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 

[45/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
index 581145a..c211c28 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -6043,14 +6043,6 @@ service.
 ReplicationSink.isNewRowOrType(CellpreviousCell,
 Cellcell)
 
-
-private static void
-Replication.scopeBulkLoadEdits(HTableDescriptorhtd,
-ReplicationSourceManagerreplicationManager,
-http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integerscopes,
-TableNametableName,
-Cellcell)
-
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
index 892f8b8..49becba 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
@@ -4919,8 +4919,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 long
-FSHLog.append(HTableDescriptorhtd,
-HRegionInfohri,
+FSHLog.append(HRegionInfohri,
 WALKeykey,
 WALEditedits,
 booleaninMemstore)
@@ -4954,8 +4953,8 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static WALKey
-WALUtil.doFullAppendTransaction(WALwal,
-  HTableDescriptorhtd,
+WALUtil.doFullAppendTransaction(WALwal,
+  http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">IntegerreplicationScope,
   HRegionInfohri,
   WALEditedit,
   MultiVersionConcurrencyControlmvcc,
@@ -5007,8 +5006,8 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static WALKey
-WALUtil.writeBulkLoadMarkerAndSync(WALwal,
-HTableDescriptorhtd,
+WALUtil.writeBulkLoadMarkerAndSync(WALwal,
+http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">IntegerreplicationScope,
 HRegionInfohri,
 
org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptordesc,
 MultiVersionConcurrencyControlmvcc)
@@ -5017,8 +5016,8 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static WALKey
-WALUtil.writeCompactionMarker(WALwal,
-  HTableDescriptorhtd,
+WALUtil.writeCompactionMarker(WALwal,
+  http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">IntegerreplicationScope,
   HRegionInfohri,
   
org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptorc,
   MultiVersionConcurrencyControlmvcc)
@@ -5027,8 +5026,8 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static WALKey
-WALUtil.writeFlushMarker(WALwal,
-HTableDescriptorhtd,
+WALUtil.writeFlushMarker(WALwal,
+http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in 

[22/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServicesForStores.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServicesForStores.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServicesForStores.html
new file mode 100644
index 000..c931004
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServicesForStores.html
@@ -0,0 +1,325 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+RegionServicesForStores (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev Class
+Next Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.regionserver
+Class 
RegionServicesForStores
+
+
+
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.regionserver.RegionServicesForStores
+
+
+
+
+
+
+
+
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public class RegionServicesForStores
+extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+Services a Store needs from a Region.
+ RegionServicesForStores class is the interface through which memstore access 
services at the
+ region level.
+ For example, when using alternative memory formats or due to compaction the 
memstore needs to
+ take occasional lock and update size counters at the region level.
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+private HRegion
+region
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+RegionServicesForStores(HRegionregion)
+
+
+
+
+
+
+
+
+
+Method Summary
+
+Methods
+
+Modifier and Type
+Method and Description
+
+
+long
+addAndGetGlobalMemstoreSize(longsize)
+
+
+void
+blockUpdates()
+
+
+void
+unblockUpdates()
+
+
+
+
+
+
+Methods inherited from classjava.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#clone()"
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#equals(java.lang.Object)"
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#finalize()"
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#getClass()"
 title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#hashCode()"
 title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#notify()"
 title="class or interface in java.lang">notify, http://docs.oracle.com/javase/7/docs/api/java/lang
 /Object.html?is-external=true#notifyAll()" title="class or interface in 
java.lang">notifyAll, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString()"
 title="class or interface in java.lang">toString, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait()"
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait(long)"
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait(long,%20int)"
 title="class or interface in java.lang">wait
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Field Detail
+
+
+
+
+
+region
+private finalHRegion region
+
+
+
+
+
+
+
+
+
+Constructor Detail
+
+
+
+
+
+RegionServicesForStores
+publicRegionServicesForStores(HRegionregion)
+
+
+
+
+
+
+
+
+
+Method Detail
+
+
+
+
+
+blockUpdates
+publicvoidblockUpdates()
+
+
+
+
+
+
+
+unblockUpdates
+publicvoidunblockUpdates()
+
+
+
+
+
+
+
+addAndGetGlobalMemstoreSize
+publiclongaddAndGetGlobalMemstoreSize(longsize)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev Class
+Next Class
+
+
+Frames
+No Frames
+
+
+All Classes

[07/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/wal/DisabledWALProvider.DisabledWAL.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/wal/DisabledWALProvider.DisabledWAL.html 
b/devapidocs/org/apache/hadoop/hbase/wal/DisabledWALProvider.DisabledWAL.html
index db0016d..aaf42c7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/wal/DisabledWALProvider.DisabledWAL.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/wal/DisabledWALProvider.DisabledWAL.html
@@ -103,7 +103,7 @@
 
 
 
-private static class DisabledWALProvider.DisabledWAL
+private static class DisabledWALProvider.DisabledWAL
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements WAL
 
@@ -197,8 +197,7 @@ implements 
 long
-append(HTableDescriptorhtd,
-HRegionInfoinfo,
+append(HRegionInfoinfo,
 WALKeykey,
 WALEditedits,
 booleaninMemstore)
@@ -318,7 +317,7 @@ implements 
 
 listeners
-protected finalhttp://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWALActionsListener listeners
+protected finalhttp://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWALActionsListener listeners
 
 
 
@@ -327,7 +326,7 @@ implements 
 
 path
-protected finalorg.apache.hadoop.fs.Path path
+protected finalorg.apache.hadoop.fs.Path path
 
 
 
@@ -336,7 +335,7 @@ implements 
 
 coprocessorHost
-protected finalWALCoprocessorHost coprocessorHost
+protected finalWALCoprocessorHost coprocessorHost
 
 
 
@@ -345,7 +344,7 @@ implements 
 
 closed
-protected finalhttp://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicBoolean closed
+protected finalhttp://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicBoolean closed
 
 
 
@@ -362,7 +361,7 @@ implements 
 
 DisabledWALProvider.DisabledWAL
-publicDisabledWALProvider.DisabledWAL(org.apache.hadoop.fs.Pathpath,
+publicDisabledWALProvider.DisabledWAL(org.apache.hadoop.fs.Pathpath,
org.apache.hadoop.conf.Configurationconf,
http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWALActionsListenerlisteners)
 
@@ -381,7 +380,7 @@ implements 
 
 registerWALActionsListener
-publicvoidregisterWALActionsListener(WALActionsListenerlistener)
+publicvoidregisterWALActionsListener(WALActionsListenerlistener)
 Description copied from interface:WAL
 Registers WALActionsListener
 
@@ -396,7 +395,7 @@ implements 
 
 unregisterWALActionsListener
-publicbooleanunregisterWALActionsListener(WALActionsListenerlistener)
+publicbooleanunregisterWALActionsListener(WALActionsListenerlistener)
 Description copied from interface:WAL
 Unregisters WALActionsListener
 
@@ -411,7 +410,7 @@ implements 
 
 rollWriter
-publicbyte[][]rollWriter()
+publicbyte[][]rollWriter()
 Description copied from interface:WAL
 Roll the log writer. That is, start writing log messages to 
a new file.
 
@@ -432,7 +431,7 @@ implements 
 
 rollWriter
-publicbyte[][]rollWriter(booleanforce)
+publicbyte[][]rollWriter(booleanforce)
 Description copied from interface:WAL
 Roll the log writer. That is, start writing log messages to 
a new file.
 
@@ -455,7 +454,7 @@ implements 
 
 shutdown
-publicvoidshutdown()
+publicvoidshutdown()
 Description copied from interface:WAL
 Stop accepting new writes. If we have unsynced writes still 
in buffer, sync them.
  Extant edits are left in place in backing storage to be replayed later.
@@ -471,7 +470,7 @@ implements 
 
 close
-publicvoidclose()
+publicvoidclose()
 Description copied from interface:WAL
 Caller no longer needs any edits from this WAL. 
Implementers are free to reclaim
  underlying resources after this call; i.e. filesystem based WALs can archive 
or
@@ -482,27 +481,25 @@ implements 
+
 
 
 
 
 append
-publiclongappend(HTableDescriptorhtd,
-  HRegionInfoinfo,
+publiclongappend(HRegionInfoinfo,
   WALKeykey,
   WALEditedits,
   booleaninMemstore)
-Description copied from interface:WAL
+Description copied from interface:WAL
 Append a set of edits to the WAL. The WAL is not 
flushed/sync'd after this transaction
  completes BUT on return this edit must have its region edit/sequence id 
assigned
  else it messes up our unification of mvcc and sequenceid.  On return 
key will
  have the region edit/sequence id filled in.
 
 Specified by:
-appendin
 interfaceWAL
-Parameters:htd - used to 
give scope for replication TODO refactor out in favor of table name and

[30/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
index 14fb625..73c8a1c 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
@@ -106,7 +106,7 @@
 
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
-public class WALProcedureStore
+public class WALProcedureStore
 extends ProcedureStoreBase
 WAL implementation of the ProcedureStore.
 
@@ -135,6 +135,10 @@ extends private static class
 WALProcedureStore.PushType
 
+
+static class
+WALProcedureStore.SyncMetrics
+
 
 
 
@@ -187,112 +191,120 @@ extends 
 private static int
-DEFAULT_SYNC_WAIT_MSEC
+DEFAULT_SYNC_STATS_COUNT
 
 
+private static int
+DEFAULT_SYNC_WAIT_MSEC
+
+
 private static boolean
 DEFAULT_USE_HSYNC
 
-
+
 private static int
 DEFAULT_WAIT_BEFORE_ROLL
 
-
+
 private long
 flushLogId
 
-
+
 private org.apache.hadoop.fs.FileSystem
 fs
 
-
+
 private http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicBoolean
 inSync
 
-
+
 private http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicLong
 lastRollTs
 
-
+
 private WALProcedureStore.LeaseRecovery
 leaseRecovery
 
-
+
 private http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicBoolean
 loading
 
-
+
 private http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/locks/ReentrantLock.html?is-external=true;
 title="class or interface in 
java.util.concurrent.locks">ReentrantLock
 lock
 
-
+
 private static 
org.apache.commons.logging.Log
 LOG
 
-
+
 private org.apache.hadoop.fs.Path
 logDir
 
-
+
 private http://docs.oracle.com/javase/7/docs/api/java/util/LinkedList.html?is-external=true;
 title="class or interface in java.util">LinkedListProcedureWALFile
 logs
 
-
+
 private static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 MAX_RETRIES_BEFORE_ROLL_CONF_KEY
 
-
+
 private static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 MAX_SYNC_FAILURE_ROLL_CONF_KEY
 
-
+
 private int
 maxRetriesBeforeRoll
 
-
+
 private int
 maxSyncFailureRoll
 
-
+
 private static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 PERIODIC_ROLL_CONF_KEY
 
-
+
 private int
 periodicRollMsec
 
-
+
 private static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 ROLL_RETRIES_CONF_KEY
 
-
+
 private static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 ROLL_THRESHOLD_CONF_KEY
 
-
+
 private int
 rollRetries
 
-
+
 private long
 rollThreshold
 
-
+
 private http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/locks/Condition.html?is-external=true;
 title="class or interface in 
java.util.concurrent.locks">Condition
 slotCond
 
-
+
 private int
 slotIndex
 
-
+
 private ByteSlot[]
 slots
 
-
+
 private http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/LinkedTransferQueue.html?is-external=true;
 title="class or interface in 
java.util.concurrent">LinkedTransferQueueByteSlot
 slotsCache
 
+
+private static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+STORE_WAL_SYNC_STATS_COUNT
+
 
 private ProcedureStoreTracker
 storeTracker
@@ -314,34 +326,38 @@ extends syncException
 
 
+private 
org.apache.commons.collections.buffer.CircularFifoBuffer
+syncMetricsBuffer
+
+
 private http://docs.oracle.com/javase/7/docs/api/java/lang/Thread.html?is-external=true;
 title="class or interface in java.lang">Thread
 syncThread
 
-
+
 private int
 syncWaitMsec
 
-
+
 private http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicLong
 totalSynced
 
-
+
 private static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 USE_HSYNC_CONF_KEY
 
-
+
 private boolean
 useHsync
 
-
+
 private static 

[10/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputHelper.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputHelper.html
 
b/devapidocs/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputHelper.html
new file mode 100644
index 000..5ad1c3b
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputHelper.html
@@ -0,0 +1,715 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+FanOutOneBlockAsyncDFSOutputHelper (Apache HBase 2.0.0-SNAPSHOT 
API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev 
Class
+Next 
Class
+
+
+Frames
+No 
Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.util
+Class 
FanOutOneBlockAsyncDFSOutputHelper
+
+
+
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutputHelper
+
+
+
+
+
+
+
+
+@InterfaceAudience.Private
+public class FanOutOneBlockAsyncDFSOutputHelper
+extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+Helper class for implementing FanOutOneBlockAsyncDFSOutput.
+
+
+
+
+
+
+
+
+
+
+
+Nested Class Summary
+
+Nested Classes
+
+Modifier and Type
+Class and Description
+
+
+(package private) static class
+FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose
+
+
+private static interface
+FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor
+
+
+private static interface
+FanOutOneBlockAsyncDFSOutputHelper.FileCreater
+
+
+private static interface
+FanOutOneBlockAsyncDFSOutputHelper.LeaseManager
+
+
+private static interface
+FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter
+
+
+private static interface
+FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+private static 
io.netty.buffer.ByteBufAllocator
+ALLOC
+
+
+private static http://docs.oracle.com/javase/7/docs/api/java/lang/reflect/Method.html?is-external=true;
 title="class or interface in java.lang.reflect">Method
+CREATE_CHECKSUM
+
+
+private static FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor
+DFS_CLIENT_ADAPTOR
+
+
+private static FanOutOneBlockAsyncDFSOutputHelper.FileCreater
+FILE_CREATER
+
+
+static long
+HEART_BEAT_SEQNO
+
+
+private static FanOutOneBlockAsyncDFSOutputHelper.LeaseManager
+LEASE_MANAGER
+
+
+private static 
org.apache.commons.logging.Log
+LOG
+
+
+private static FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter
+PIPELINE_ACK_STATUS_GETTER
+
+
+private static FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter
+STORAGE_TYPE_SETTER
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Modifier
+Constructor and Description
+
+
+private 
+FanOutOneBlockAsyncDFSOutputHelper()
+
+
+
+
+
+
+
+
+
+Method Summary
+
+Methods
+
+Modifier and Type
+Method and Description
+
+
+(package private) static void
+beginFileLease(org.apache.hadoop.hdfs.DFSClientclient,
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringsrc,
+longinodeId)
+
+
+(package private) static void
+completeFile(org.apache.hadoop.hdfs.DFSClientclient,
+
org.apache.hadoop.hdfs.protocol.ClientProtocolnamenode,
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringsrc,
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringclientName,
+
org.apache.hadoop.hdfs.protocol.ExtendedBlockblock,
+longfileId)
+
+
+private static http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listio.netty.util.concurrent.Futureio.netty.channel.Channel
+connectToDataNodes(org.apache.hadoop.conf.Configurationconf,
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or 

[21/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionServicesForStores.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionServicesForStores.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionServicesForStores.html
new file mode 100644
index 000..c8f70f6
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionServicesForStores.html
@@ -0,0 +1,172 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+Uses of Class 
org.apache.hadoop.hbase.regionserver.RegionServicesForStores (Apache HBase 
2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+
+Uses of 
Classorg.apache.hadoop.hbase.regionserver.RegionServicesForStores
+
+
+
+
+
+Packages that use RegionServicesForStores
+
+Package
+Description
+
+
+
+org.apache.hadoop.hbase.regionserver
+
+
+
+
+
+
+
+
+
+
+Uses of RegionServicesForStores in org.apache.hadoop.hbase.regionserver
+
+Fields in org.apache.hadoop.hbase.regionserver
 declared as RegionServicesForStores
+
+Modifier and Type
+Field and Description
+
+
+
+private RegionServicesForStores
+HRegion.regionServicesForStores
+
+
+
+
+Methods in org.apache.hadoop.hbase.regionserver
 that return RegionServicesForStores
+
+Modifier and Type
+Method and Description
+
+
+
+RegionServicesForStores
+HRegion.getRegionServicesForStores()
+
+
+RegionServicesForStores
+Region.getRegionServicesForStores()
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+Copyright  20072016 http://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/regionserver/package-frame.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-frame.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/package-frame.html
index db185ea..a6b156f 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-frame.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-frame.html
@@ -171,6 +171,7 @@
 RegionServerCoprocessorHost.RegionServerEnvironment
 RegionServerServices.PostOpenDeployContext
 RegionServerServices.RegionStateTransitionContext
+RegionServicesForStores
 RegionSplitPolicy
 ReversedKeyValueHeap
 ReversedKeyValueHeap.ReversedKVScannerComparator

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/regionserver/package-summary.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/package-summary.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/package-summary.html
index 326001b..4a03ff1 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-summary.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-summary.html
@@ -1025,92 +1025,98 @@
 
 
 
+RegionServicesForStores
+
+Services a Store needs from a Region.
+
+
+
 RegionSplitPolicy
 
 A split policy determines when a region should be 
split.
 
 
-
+
 ReversedKeyValueHeap
 
 ReversedKeyValueHeap is used for supporting reversed 
scanning.
 
 
-
+
 ReversedKeyValueHeap.ReversedKVScannerComparator
 
 In ReversedKVScannerComparator, we compare the row of 
scanners' peek values
  first, sort bigger one before the smaller one.
 
 
-
+
 ReversedMobStoreScanner
 
 ReversedMobStoreScanner extends from ReversedStoreScanner, 
and is used to support
  reversed scanning in both the memstore and the MOB store.
 
 
-
+
 ReversedRegionScannerImpl
 
 ReversibleRegionScannerImpl extends from RegionScannerImpl, 
and is used to
  support reversed scanning.
 
 
-
+
 ReversedStoreScanner
 
 ReversedStoreScanner extends from StoreScanner, and is used 
to support
  reversed scanning.
 
 
-
+
 RSDumpServlet
 
 
-
+
 RSRpcServices
 
 Implements the regionserver RPC 

[39/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.html
index 060b1be..76fe747 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.html
@@ -217,6 +217,22 @@ extends 
 
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+NUM_GENERAL_CALLS_DROPPED_DESC
+
+
+static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+NUM_GENERAL_CALLS_DROPPED_NAME
+
+
+static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+NUM_LIFO_MODE_SWITCHES_DESC
+
+
+static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+NUM_LIFO_MODE_SWITCHES_NAME
+
+
+static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 NUM_OPEN_CONNECTIONS_DESC
 
 
@@ -413,7 +429,7 @@ extends 
 
 Methods inherited from interfaceorg.apache.hadoop.hbase.metrics.BaseSource
-decGauge,
 getMetricsContext,
 getMetricsDescription,
 getMetricsJmxContext,
 getMetricsName,
 incCounters,
 incGauge,
 init,
 removeMetric, setGauge,
 updateHistogram,
 updateQuantile
+decGauge,
 getMetricsContext,
 getMetricsDescription,
 getMetricsJmxContext,
 getMetricsName,
 incCounters,
 incGauge,
 init,
 removeMetric, setGauge,
 updateHistogram
 
 
 
@@ -789,13 +805,53 @@ extends See Also:Constant
 Field Values
 
 
+
+
+
+
+
+NUM_GENERAL_CALLS_DROPPED_NAME
+static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String NUM_GENERAL_CALLS_DROPPED_NAME
+See Also:Constant
 Field Values
+
+
+
+
+
+
+
+NUM_GENERAL_CALLS_DROPPED_DESC
+static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String NUM_GENERAL_CALLS_DROPPED_DESC
+See Also:Constant
 Field Values
+
+
+
+
+
+
+
+NUM_LIFO_MODE_SWITCHES_NAME
+static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String NUM_LIFO_MODE_SWITCHES_NAME
+See Also:Constant
 Field Values
+
+
+
+
+
+
+
+NUM_LIFO_MODE_SWITCHES_DESC
+static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String NUM_LIFO_MODE_SWITCHES_DESC
+See Also:Constant
 Field Values
+
+
 
 
 
 
 
 EXCEPTIONS_NAME
-static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String EXCEPTIONS_NAME
+static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String EXCEPTIONS_NAME
 See Also:Constant
 Field Values
 
 
@@ -805,7 +861,7 @@ extends 
 
 EXCEPTIONS_DESC
-static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String EXCEPTIONS_DESC
+static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String EXCEPTIONS_DESC
 See Also:Constant
 Field Values
 
 
@@ -815,7 +871,7 @@ extends 
 
 EXCEPTIONS_TYPE_DESC
-static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String EXCEPTIONS_TYPE_DESC
+static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String EXCEPTIONS_TYPE_DESC
 See Also:Constant
 Field Values
 
 
@@ -825,7 +881,7 @@ extends 
 
 EXCEPTIONS_OOO_NAME
-static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String EXCEPTIONS_OOO_NAME
+static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String EXCEPTIONS_OOO_NAME
 See Also:Constant
 Field Values
 
 
@@ -835,7 +891,7 @@ extends 
 
 EXCEPTIONS_BUSY_NAME
-static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String EXCEPTIONS_BUSY_NAME
+static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String EXCEPTIONS_BUSY_NAME
 See Also:Constant
 Field Values
 
 
@@ -845,7 +901,7 

[27/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
index a7e67ee..a5ff66d 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
@@ -99,7 +99,7 @@
 
 
 
-static class HRegion.WriteState
+static class HRegion.WriteState
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 
 
@@ -225,7 +225,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 flushing
-volatileboolean flushing
+volatileboolean flushing
 
 
 
@@ -234,7 +234,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 flushRequested
-volatileboolean flushRequested
+volatileboolean flushRequested
 
 
 
@@ -243,7 +243,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 compacting
-http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger compacting
+http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger compacting
 
 
 
@@ -252,7 +252,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 writesEnabled
-volatileboolean writesEnabled
+volatileboolean writesEnabled
 
 
 
@@ -261,7 +261,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 readOnly
-volatileboolean readOnly
+volatileboolean readOnly
 
 
 
@@ -270,7 +270,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 readsEnabled
-volatileboolean readsEnabled
+volatileboolean readsEnabled
 
 
 
@@ -279,7 +279,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 HEAP_SIZE
-static finallong HEAP_SIZE
+static finallong HEAP_SIZE
 
 
 
@@ -296,7 +296,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 HRegion.WriteState
-HRegion.WriteState()
+HRegion.WriteState()
 
 
 
@@ -313,7 +313,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 setReadOnly
-voidsetReadOnly(booleanonOff)
+voidsetReadOnly(booleanonOff)
 Set flags that make this region read-only.
 Parameters:onOff - 
flip value for region r/o setting
 
@@ -324,7 +324,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 isReadOnly
-booleanisReadOnly()
+booleanisReadOnly()
 
 
 
@@ -333,7 +333,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 isFlushRequested
-booleanisFlushRequested()
+booleanisFlushRequested()
 
 
 
@@ -342,7 +342,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 setReadsEnabled
-voidsetReadsEnabled(booleanreadsEnabled)
+voidsetReadsEnabled(booleanreadsEnabled)
 
 
 



[36/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html 
b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
index 97c52fa..5b64385 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
@@ -837,78 +837,82 @@ implements getTableStateManager()
 
 
+WALProcedureStore
+getWalProcedureStore()
+
+
 ZooKeeperWatcher
 getZooKeeper()
 Gets the ZooKeeper instance for this server.
 
 
-
+
 (package private) void
 initClusterSchemaService()
 
-
+
 (package private) void
 initializeZKBasedSystemTrackers()
 Initialize all ZK based system trackers.
 
 
-
+
 (package private) void
 initQuotaManager()
 
-
+
 boolean
 isActiveMaster()
 Report whether this master is currently the active master 
or not.
 
 
-
+
 boolean
 isBalancerOn()
 Queries the state of the LoadBalancerTracker.
 
 
-
+
 (package private) boolean
 isCatalogJanitorEnabled()
 
-
+
 private static boolean
 isCatalogTable(TableNametableName)
 
-
+
 boolean
 isInitializationStartsMetaRegionAssignment()
 Report whether this master has started initialization and 
is about to do meta region assignment
 
 
-
+
 boolean
 isInitialized()
 Report whether this master has completed with its 
initialization and is
  ready.
 
 
-
+
 boolean
 isNormalizerOn()
 Queries the state of the RegionNormalizerTracker.
 
 
-
+
 boolean
 isServerCrashProcessingEnabled()
 ServerCrashProcessingEnabled is set false before completing 
assignMeta to prevent processing
  of crashed servers.
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListProcedureInfo
 listProcedures()
 List procedures
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHTableDescriptor
 listTableDescriptors(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringnamespace,
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringregex,
@@ -917,13 +921,13 @@ implements Returns the list of table descriptors that match the 
specified request
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHTableDescriptor
 listTableDescriptorsByNamespace(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Get list of table descriptors by namespace
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableName
 listTableNames(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringnamespace,
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringregex,
@@ -931,24 +935,24 @@ implements Returns the list of table names that match the specified 
request
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableName
 listTableNamesByNamespace(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Get list of table names by namespace
 
 
-
+
 protected void
 login(UserProvideruser,
   http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringhost)
 For compatibility, if failed with regionserver credentials, 
try the master one
 
 
-
+
 static void
 main(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">String[]args)
 
-
+
 long
 modifyColumn(TableNametableName,
 HColumnDescriptordescriptor,
@@ -957,7 +961,7 @@ implements Modify the column descriptor of an existing column in an 
existing table
 
 
-
+
 (package private) long
 modifyNamespace(NamespaceDescriptornamespaceDescriptor,
   longnonceGroup,
@@ -965,7 +969,7 @@ implements Modify an existing Namespace.
 
 
-
+
 long
 modifyTable(TableNametableName,
   HTableDescriptordescriptor,
@@ -974,37 +978,37 @@ implements Modify the descriptor of an existing table
 
 
-
+
 (package private) void
 move(byte[]encodedRegionName,
 byte[]destServerName)
 
-
+
 boolean
 normalizeRegions()
 Perform normalization of cluster (invoked by RegionNormalizerChore).
 
 
-
+
 private int
 

[13/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheViewTmplImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheViewTmplImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheViewTmplImpl.html
index 692c593..2f2e988 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheViewTmplImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheViewTmplImpl.html
@@ -104,7 +104,7 @@
 
 
 
-public class BlockCacheViewTmplImpl
+public class BlockCacheViewTmplImpl
 extends org.jamon.AbstractTemplateImpl
 implements BlockCacheViewTmpl.Intf
 
@@ -222,7 +222,7 @@ implements 
 
 cacheConfig
-private finalCacheConfig cacheConfig
+private finalCacheConfig cacheConfig
 
 
 
@@ -231,7 +231,7 @@ implements 
 
 conf
-private finalorg.apache.hadoop.conf.Configuration conf
+private finalorg.apache.hadoop.conf.Configuration conf
 
 
 
@@ -240,7 +240,7 @@ implements 
 
 bcn
-private finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String bcn
+private finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String bcn
 
 
 
@@ -249,7 +249,7 @@ implements 
 
 bcv
-private finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String bcv
+private finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String bcv
 
 
 
@@ -266,7 +266,7 @@ implements 
 
 BlockCacheViewTmplImpl
-publicBlockCacheViewTmplImpl(org.jamon.TemplateManagerp_templateManager,
+publicBlockCacheViewTmplImpl(org.jamon.TemplateManagerp_templateManager,
   BlockCacheViewTmpl.ImplDatap_implData)
 
 
@@ -284,7 +284,7 @@ implements 
 
 __jamon_setOptionalArguments
-protected staticBlockCacheViewTmpl.ImplData__jamon_setOptionalArguments(BlockCacheViewTmpl.ImplDatap_implData)
+protected staticBlockCacheViewTmpl.ImplData__jamon_setOptionalArguments(BlockCacheViewTmpl.ImplDatap_implData)
 
 
 
@@ -293,7 +293,7 @@ implements 
 
 renderNoFlush
-publicvoidrenderNoFlush(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true;
 title="class or interface in java.io">WriterjamonWriter)
+publicvoidrenderNoFlush(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true;
 title="class or interface in java.io">WriterjamonWriter)
throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Specified by:
@@ -308,7 +308,7 @@ implements 
 
 __jamon_innerUnit__bc_by_file
-privatevoid__jamon_innerUnit__bc_by_file(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true;
 title="class or interface in java.io">WriterjamonWriter,
+privatevoid__jamon_innerUnit__bc_by_file(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true;
 title="class or interface in java.io">WriterjamonWriter,
  BlockCacheUtil.CachedBlocksByFilecbsbf)
 throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Throws:

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html
 
b/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html
index 6f37c59..1058a36 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html
@@ -297,58 +297,58 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 privateboolean m_bcv__IsNotDefault
 
 
-
+
 
 
 
 
-m_bcn
-privatehttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String m_bcn
+m_format
+privatehttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String m_format
 
 
-
+
 
 
 
 
-m_bcn__IsNotDefault
-privateboolean m_bcn__IsNotDefault
+m_format__IsNotDefault
+privateboolean m_format__IsNotDefault
 
 
-
+
 
 
 
 
-m_format
-privatehttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String m_format
+m_filter
+privatehttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or 

[42/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html
index 1669d7f..bb96d93 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html
@@ -103,7 +103,7 @@
 
 
 
-public static class HFile.FileInfo
+public static class HFile.FileInfo
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true;
 title="class or interface in 
java.util">SortedMapbyte[],byte[]
 Metadata for this file. Conjured by the writer. Read in by 
the reader.
@@ -355,7 +355,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 
 
 RESERVED_PREFIX
-static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String RESERVED_PREFIX
+static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String RESERVED_PREFIX
 See Also:Constant
 Field Values
 
 
@@ -365,7 +365,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 
 
 RESERVED_PREFIX_BYTES
-static finalbyte[] RESERVED_PREFIX_BYTES
+static finalbyte[] RESERVED_PREFIX_BYTES
 
 
 
@@ -374,7 +374,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 
 
 LASTKEY
-static finalbyte[] LASTKEY
+static finalbyte[] LASTKEY
 
 
 
@@ -383,7 +383,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 
 
 AVG_KEY_LEN
-static finalbyte[] AVG_KEY_LEN
+static finalbyte[] AVG_KEY_LEN
 
 
 
@@ -392,7 +392,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 
 
 AVG_VALUE_LEN
-static finalbyte[] AVG_VALUE_LEN
+static finalbyte[] AVG_VALUE_LEN
 
 
 
@@ -401,7 +401,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 
 
 CREATE_TIME_TS
-static finalbyte[] CREATE_TIME_TS
+static finalbyte[] CREATE_TIME_TS
 
 
 
@@ -410,7 +410,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 
 
 COMPARATOR
-static finalbyte[] COMPARATOR
+static finalbyte[] COMPARATOR
 
 
 
@@ -419,7 +419,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 
 
 TAGS_COMPRESSED
-static finalbyte[] TAGS_COMPRESSED
+static finalbyte[] TAGS_COMPRESSED
 
 
 
@@ -428,7 +428,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 
 
 MAX_TAGS_LEN
-public static finalbyte[] MAX_TAGS_LEN
+public static finalbyte[] MAX_TAGS_LEN
 
 
 
@@ -437,7 +437,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 
 
 map
-private finalhttp://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true;
 title="class or interface in java.util">SortedMapbyte[],byte[] map
+private finalhttp://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true;
 title="class or interface in java.util">SortedMapbyte[],byte[] map
 
 
 
@@ -454,7 +454,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 
 
 HFile.FileInfo
-publicHFile.FileInfo()
+publicHFile.FileInfo()
 
 
 
@@ -471,7 +471,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 
 
 append
-publicHFile.FileInfoappend(byte[]k,
+publicHFile.FileInfoappend(byte[]k,
 byte[]v,
 booleancheckPrefix)
   throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
@@ -490,7 +490,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 
 
 clear
-publicvoidclear()
+publicvoidclear()
 
 Specified by:
 http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true#clear()"
 title="class or interface in java.util">clearin 
interfacehttp://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in 
java.util">Mapbyte[],byte[]
@@ -503,7 +503,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 
 
 comparator
-publichttp://docs.oracle.com/javase/7/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">Comparator? super 
byte[]comparator()
+publichttp://docs.oracle.com/javase/7/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">Comparator? super 
byte[]comparator()
 
 Specified by:
 http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true#comparator()"
 title="class or interface in 

[02/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.html
--
diff --git 
a/devapidocs/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.html 
b/devapidocs/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.html
index f56dac4..cc2e9bb 100644
--- a/devapidocs/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.html
+++ b/devapidocs/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.html
@@ -36,7 +36,7 @@
 
 
 Prev 
Class
-Next 
Class
+Next 
Class
 
 
 Frames
@@ -217,78 +217,52 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 
-MutableHistogram
-getHistogram(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringhistoName)
-
-
-org.apache.hadoop.metrics2.lib.MutableCounterLong
-getLongCounter(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringcounterName,
-longpotentialStartingValue)
+MutableFastCounter
+getCounter(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringcounterName,
+longpotentialStartingValue)
 Get a MetricMutableCounterLong from the storage.
 
 
-
+
 org.apache.hadoop.metrics2.lib.MutableGaugeLong
-getLongGauge(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgaugeName,
-longpotentialStartingValue)
+getGauge(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgaugeName,
+longpotentialStartingValue)
 Get a MetricMutableGaugeLong from the storage.
 
 
-
-MetricMutableQuantiles
-getQuantile(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringhistoName)
-
 
+MutableHistogram
+getHistogram(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringhistoName)
+
+
 org.apache.hadoop.metrics2.MetricsTag
 getTag(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Get a tag by name
 
 
-
+
 org.apache.hadoop.metrics2.MetricsInfo
 info()
 
-
+
 (package private) http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in 
java.util">Collectionorg.apache.hadoop.metrics2.lib.MutableMetric
 metrics()
 
-
-org.apache.hadoop.metrics2.lib.MutableCounterInt
-newCounter(org.apache.hadoop.metrics2.MetricsInfoinfo,
-intiVal)
-Create a mutable integer counter
-
-
 
-org.apache.hadoop.metrics2.lib.MutableCounterLong
+MutableFastCounter
 newCounter(org.apache.hadoop.metrics2.MetricsInfoinfo,
 longiVal)
 Create a mutable long integer counter
 
 
 
-org.apache.hadoop.metrics2.lib.MutableCounterInt
-newCounter(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
-http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringdesc,
-intiVal)
-Create a mutable integer counter
-
-
-
-org.apache.hadoop.metrics2.lib.MutableCounterLong
+MutableFastCounter
 newCounter(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringdesc,
 longiVal)
 Create a mutable long integer counter
 
 
-
-org.apache.hadoop.metrics2.lib.MutableGaugeInt
-newGauge(org.apache.hadoop.metrics2.MetricsInfoinfo,
-intiVal)
-Create a mutable integer gauge
-
-
 
 org.apache.hadoop.metrics2.lib.MutableGaugeLong
 newGauge(org.apache.hadoop.metrics2.MetricsInfoinfo,
@@ -297,14 +271,6 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 
-org.apache.hadoop.metrics2.lib.MutableGaugeInt
-newGauge(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
-http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringdesc,
-intiVal)
-Create a mutable integer gauge
-
-
-
 org.apache.hadoop.metrics2.lib.MutableGaugeLong
 newGauge(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
   

[11/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
 
b/devapidocs/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
new file mode 100644
index 000..9ff0616
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
@@ -0,0 +1,310 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose (Apache HBase 
2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev 
Class
+Next 
Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.util
+Class FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose
+
+
+
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose
+
+
+
+
+
+
+
+All Implemented Interfaces:
+CancelableProgressable
+
+
+Enclosing class:
+FanOutOneBlockAsyncDFSOutputHelper
+
+
+
+static final class FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose
+extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+implements CancelableProgressable
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+private org.apache.hadoop.hdfs.DFSClient
+client
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose(org.apache.hadoop.hdfs.DFSClientclient)
+
+
+
+
+
+
+
+
+
+Method Summary
+
+Methods
+
+Modifier and Type
+Method and Description
+
+
+boolean
+progress()
+Report progress.
+
+
+
+
+
+
+
+Methods inherited from classjava.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#clone()"
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#equals(java.lang.Object)"
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#finalize()"
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#getClass()"
 title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#hashCode()"
 title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#notify()"
 title="class or interface in java.lang">notify, http://docs.oracle.com/javase/7/docs/api/java/lang
 /Object.html?is-external=true#notifyAll()" title="class or interface in 
java.lang">notifyAll, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString()"
 title="class or interface in java.lang">toString, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait()"
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait(long)"
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait(long,%20int)"
 title="class or interface in java.lang">wait
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Field Detail
+
+
+
+
+
+client
+private finalorg.apache.hadoop.hdfs.DFSClient client
+
+
+
+
+
+
+
+
+
+Constructor Detail
+
+
+
+
+
+FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose
+publicFanOutOneBlockAsyncDFSOutputHelper.CancelOnClose(org.apache.hadoop.hdfs.DFSClientclient)
+
+
+
+
+
+
+
+
+
+Method Detail
+
+
+
+
+
+progress
+publicbooleanprogress()
+Description copied from interface:CancelableProgressable
+Report progress.  Returns true if operations should 
continue, false if the
+ operation should be canceled and rolled back.
+
+Specified by:
+progressin
 

[31/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.SyncMetrics.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.SyncMetrics.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.SyncMetrics.html
new file mode 100644
index 000..a009ca4
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.SyncMetrics.html
@@ -0,0 +1,400 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+WALProcedureStore.SyncMetrics (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev Class
+Next Class
+
+
+Frames
+No 
Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.procedure2.store.wal
+Class 
WALProcedureStore.SyncMetrics
+
+
+
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore.SyncMetrics
+
+
+
+
+
+
+
+Enclosing class:
+WALProcedureStore
+
+
+
+public static class WALProcedureStore.SyncMetrics
+extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+private int
+syncedEntries
+
+
+private float
+syncedPerSec
+
+
+private long
+syncWaitMs
+
+
+private long
+timestamp
+
+
+private long
+totalSyncedBytes
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+WALProcedureStore.SyncMetrics()
+
+
+
+
+
+
+
+
+
+Method Summary
+
+Methods
+
+Modifier and Type
+Method and Description
+
+
+long
+getSyncedEntries()
+
+
+float
+getSyncedPerSec()
+
+
+long
+getSyncWaitMs()
+
+
+long
+getTimestamp()
+
+
+long
+getTotalSyncedBytes()
+
+
+
+
+
+
+Methods inherited from classjava.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#clone()"
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#equals(java.lang.Object)"
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#finalize()"
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#getClass()"
 title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#hashCode()"
 title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#notify()"
 title="class or interface in java.lang">notify, http://docs.oracle.com/javase/7/docs/api/java/lang
 /Object.html?is-external=true#notifyAll()" title="class or interface in 
java.lang">notifyAll, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString()"
 title="class or interface in java.lang">toString, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait()"
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait(long)"
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait(long,%20int)"
 title="class or interface in java.lang">wait
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Field Detail
+
+
+
+
+
+timestamp
+privatelong timestamp
+
+
+
+
+
+
+
+syncWaitMs
+privatelong syncWaitMs
+
+
+
+
+
+
+
+totalSyncedBytes
+privatelong totalSyncedBytes
+
+
+
+
+
+
+
+syncedEntries
+privateint syncedEntries
+
+
+
+
+
+
+
+syncedPerSec
+privatefloat syncedPerSec
+
+
+
+
+
+
+
+
+
+Constructor Detail
+
+
+
+
+
+WALProcedureStore.SyncMetrics
+publicWALProcedureStore.SyncMetrics()
+
+
+
+
+
+
+
+
+
+Method Detail
+
+
+
+
+
+getTimestamp
+publiclonggetTimestamp()
+
+
+
+
+
+
+
+getSyncWaitMs
+publiclonggetSyncWaitMs()
+
+
+
+
+
+
+
+getTotalSyncedBytes
+publiclonggetTotalSyncedBytes()
+
+
+
+
+
+
+

[41/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.html
index 14c186d..80e7169 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.html
@@ -231,11 +231,11 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 
-(package private) static http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicLong
+(package private) static Counter
 checksumFailures
 
 
-static http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicLong
+static Counter
 dataBlockReadCnt
 
 
@@ -579,7 +579,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 checksumFailures
-static finalhttp://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicLong checksumFailures
+static finalCounter checksumFailures
 
 
 
@@ -588,7 +588,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 dataBlockReadCnt
-public static finalhttp://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicLong dataBlockReadCnt
+public static finalCounter dataBlockReadCnt
 
 
 
@@ -597,7 +597,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 FORMAT_VERSION_KEY
-public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String FORMAT_VERSION_KEY
+public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String FORMAT_VERSION_KEY
 The configuration key for HFile version to use for new 
files
 See Also:Constant
 Field Values
 
@@ -644,7 +644,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 getFormatVersion
-public staticintgetFormatVersion(org.apache.hadoop.conf.Configurationconf)
+public staticintgetFormatVersion(org.apache.hadoop.conf.Configurationconf)
 
 
 
@@ -653,7 +653,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 getWriterFactoryNoCache
-public static finalHFile.WriterFactorygetWriterFactoryNoCache(org.apache.hadoop.conf.Configurationconf)
+public static finalHFile.WriterFactorygetWriterFactoryNoCache(org.apache.hadoop.conf.Configurationconf)
 Returns the factory to be used to create HFile 
writers.
  Disables block cache access for all writers created through the
  returned factory.
@@ -665,7 +665,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 getWriterFactory
-public static finalHFile.WriterFactorygetWriterFactory(org.apache.hadoop.conf.Configurationconf,
+public static finalHFile.WriterFactorygetWriterFactory(org.apache.hadoop.conf.Configurationconf,
CacheConfigcacheConf)
 Returns the factory to be used to create HFile 
writers
 
@@ -676,7 +676,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 pickReaderVersion
-private staticHFile.ReaderpickReaderVersion(org.apache.hadoop.fs.Pathpath,
+private staticHFile.ReaderpickReaderVersion(org.apache.hadoop.fs.Pathpath,
  FSDataInputStreamWrapperfsdis,
  longsize,
  CacheConfigcacheConf,
@@ -697,7 +697,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 createReader
-public staticHFile.ReadercreateReader(org.apache.hadoop.fs.FileSystemfs,
+public staticHFile.ReadercreateReader(org.apache.hadoop.fs.FileSystemfs,
 org.apache.hadoop.fs.Pathpath,
 FSDataInputStreamWrapperfsdis,
 longsize,
@@ -716,7 +716,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 createReader
-public staticHFile.ReadercreateReader(org.apache.hadoop.fs.FileSystemfs,
+public staticHFile.ReadercreateReader(org.apache.hadoop.fs.FileSystemfs,
 org.apache.hadoop.fs.Pathpath,
 CacheConfigcacheConf,
 org.apache.hadoop.conf.Configurationconf)
@@ -733,7 +733,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 createReaderFromStream
-staticHFile.ReadercreateReaderFromStream(org.apache.hadoop.fs.Pathpath,

[28/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.PrepareFlushResult.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.PrepareFlushResult.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.PrepareFlushResult.html
index 985efa6..4968bc1 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.PrepareFlushResult.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.PrepareFlushResult.html
@@ -99,7 +99,7 @@
 
 
 
-static class HRegion.PrepareFlushResult
+static class HRegion.PrepareFlushResult
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 A result object from prepare flush cache stage
 
@@ -244,7 +244,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 result
-finalRegion.FlushResult result
+finalRegion.FlushResult result
 
 
 
@@ -253,7 +253,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 storeFlushCtxs
-finalhttp://docs.oracle.com/javase/7/docs/api/java/util/TreeMap.html?is-external=true;
 title="class or interface in java.util">TreeMapbyte[],StoreFlushContext storeFlushCtxs
+finalhttp://docs.oracle.com/javase/7/docs/api/java/util/TreeMap.html?is-external=true;
 title="class or interface in java.util">TreeMapbyte[],StoreFlushContext storeFlushCtxs
 
 
 
@@ -262,7 +262,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 committedFiles
-finalhttp://docs.oracle.com/javase/7/docs/api/java/util/TreeMap.html?is-external=true;
 title="class or interface in java.util">TreeMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.fs.Path committedFiles
+finalhttp://docs.oracle.com/javase/7/docs/api/java/util/TreeMap.html?is-external=true;
 title="class or interface in java.util">TreeMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.fs.Path committedFiles
 
 
 
@@ -271,7 +271,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 storeFlushableSize
-finalhttp://docs.oracle.com/javase/7/docs/api/java/util/TreeMap.html?is-external=true;
 title="class or interface in java.util">TreeMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long storeFlushableSize
+finalhttp://docs.oracle.com/javase/7/docs/api/java/util/TreeMap.html?is-external=true;
 title="class or interface in java.util">TreeMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long storeFlushableSize
 
 
 
@@ -280,7 +280,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 startTime
-finallong startTime
+finallong startTime
 
 
 
@@ -289,7 +289,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 flushOpSeqId
-finallong flushOpSeqId
+finallong flushOpSeqId
 
 
 
@@ -298,7 +298,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 flushedSeqId
-finallong flushedSeqId
+finallong flushedSeqId
 
 
 
@@ -307,7 +307,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 totalFlushableSize
-finallong totalFlushableSize
+finallong totalFlushableSize
 
 
 
@@ -324,7 +324,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 HRegion.PrepareFlushResult
-HRegion.PrepareFlushResult(Region.FlushResultresult,
+HRegion.PrepareFlushResult(Region.FlushResultresult,
   longflushSeqId)
 Constructs an early exit case
 
@@ -335,7 +335,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 HRegion.PrepareFlushResult
-HRegion.PrepareFlushResult(http://docs.oracle.com/javase/7/docs/api/java/util/TreeMap.html?is-external=true;
 title="class or interface in java.util">TreeMapbyte[],StoreFlushContextstoreFlushCtxs,
+HRegion.PrepareFlushResult(http://docs.oracle.com/javase/7/docs/api/java/util/TreeMap.html?is-external=true;
 title="class or interface in java.util">TreeMapbyte[],StoreFlushContextstoreFlushCtxs,
   http://docs.oracle.com/javase/7/docs/api/java/util/TreeMap.html?is-external=true;
 title="class or interface in java.util">TreeMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.fs.PathcommittedFiles,
   http://docs.oracle.com/javase/7/docs/api/java/util/TreeMap.html?is-external=true;
 title="class or interface in 

[29/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/class-use/WALProcedureStore.SyncMetrics.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/class-use/WALProcedureStore.SyncMetrics.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/class-use/WALProcedureStore.SyncMetrics.html
new file mode 100644
index 000..cb2e88f
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/class-use/WALProcedureStore.SyncMetrics.html
@@ -0,0 +1,155 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+Uses of Class 
org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore.SyncMetrics 
(Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No 
Frames
+
+
+All 
Classes
+
+
+
+
+
+
+
+
+
+
+Uses of 
Classorg.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore.SyncMetrics
+
+
+
+
+
+Packages that use WALProcedureStore.SyncMetrics
+
+Package
+Description
+
+
+
+org.apache.hadoop.hbase.procedure2.store.wal
+
+
+
+
+
+
+
+
+
+
+Uses of WALProcedureStore.SyncMetrics 
in org.apache.hadoop.hbase.procedure2.store.wal
+
+Methods in org.apache.hadoop.hbase.procedure2.store.wal
 that return types with arguments of type WALProcedureStore.SyncMetrics
+
+Modifier and Type
+Method and Description
+
+
+
+http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in java.util">ArrayListWALProcedureStore.SyncMetrics
+WALProcedureStore.getSyncMetrics()
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No 
Frames
+
+
+All 
Classes
+
+
+
+
+
+
+
+
+
+Copyright  20072016 http://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/class-use/WALProcedureStore.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/class-use/WALProcedureStore.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/class-use/WALProcedureStore.html
index 93eaca1..dae27c4 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/class-use/WALProcedureStore.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/class-use/WALProcedureStore.html
@@ -100,6 +100,19 @@
 
 
 
+
+Methods in org.apache.hadoop.hbase.master
 that return WALProcedureStore
+
+Modifier and Type
+Method and Description
+
+
+
+WALProcedureStore
+HMaster.getWalProcedureStore()
+
+
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/package-frame.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/package-frame.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/package-frame.html
index 2565285..d51d1d6 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/package-frame.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/package-frame.html
@@ -23,6 +23,7 @@
 ProcedureWALFormatReader.EntryIterator
 ProcedureWALFormatReader.WalProcedureMap
 WALProcedureStore
+WALProcedureStore.SyncMetrics
 
 Enums
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/package-summary.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/package-summary.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/package-summary.html
index edc9ac0..8e64c42 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/package-summary.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/package-summary.html
@@ -129,6 +129,10 @@
 WAL implementation of the ProcedureStore.
 
 
+
+WALProcedureStore.SyncMetrics
+
+
 
 
 


[44/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
 
b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
index 3b89a33..55608d3 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
@@ -3149,23 +3149,29 @@ service.
 
 
 class
+AdaptiveLifoCoDelCallQueue
+Adaptive LIFO blocking queue utilizing CoDel algorithm to 
prevent queue overloading.
+
+
+
+class
 AsyncCall
 Represents an Async Hbase call and its response.
 
 
-
+
 class
 AsyncRpcChannel
 Netty RPC channel
 
 
-
+
 class
 AsyncServerResponseHandler
 Handles Hbase responses
 
 
-
+
 class
 BlockingRpcCallbackR
 Simple RpcCallback 
implementation providing a
@@ -3173,144 +3179,144 @@ service.
  will block util the instance's BlockingRpcCallback.run(Object)
 method has been called.
 
 
-
+
 (package private) class
 BufferChain
 Chain of ByteBuffers.
 
 
-
+
 class
 Call
 A call waiting for a value.
 
 
-
+
 class
 CallRunner
 The request processing logic, which is usually executed in 
thread pools provided by an
  RpcScheduler.
 
 
-
+
 class
 ConnectionId
 This class holds the address and the user ticket, etc.
 
 
-
+
 class
 DelegatingPayloadCarryingRpcController
 Simple delegating controller for use with the RpcControllerFactory to help 
override
  standard behavior of a PayloadCarryingRpcController.
 
 
-
+
 class
 FailedServers
 A class to manage a list of servers that failed 
recently.
 
 
-
+
 interface
 HBaseRPCErrorHandler
 An interface for calling out of RPC for error 
conditions.
 
 
-
+
 class
 IPCUtil
 Utility to help ipc'ing.
 
 
-
+
 class
 MasterCoprocessorRpcChannel
 Provides clients with an RPC connection to call coprocessor 
endpoint Services
  against the active master.
 
 
-
+
 class
 MetricsHBaseServer
 
-
+
 class
 MetricsHBaseServerSourceFactoryImpl
 
-
+
 class
 MetricsHBaseServerSourceImpl
 
-
+
 class
 PayloadCarryingRpcController
 Optionally carries Cells across the proxy/service interface 
down into ipc.
 
 
-
+
 class
 RegionCoprocessorRpcChannel
 Provides clients with an RPC connection to call coprocessor 
endpoint Services
  against a given table region.
 
 
-
+
 class
 RegionServerCoprocessorRpcChannel
 Provides clients with an RPC connection to call coprocessor 
endpoint
  Services against a given region server.
 
 
-
+
 interface
 RpcCallback
 Denotes a callback action that has to be executed at the 
end of an Rpc Call.
 
 
-
+
 interface
 RpcCallContext
 
-
+
 interface
 RpcClient
 Interface for RpcClient implementations so 
ConnectionManager can handle it.
 
 
-
+
 class
 RpcClientFactory
 Factory to create a RpcClient
 
 
-
+
 class
 RpcClientImpl
 Does RPC against a cluster.
 
 
-
+
 class
 RpcControllerFactory
 Factory to create a PayloadCarryingRpcController
 
 
-
+
 class
 RpcExecutor
 
-
+
 (package private) class
 RpcSchedulerContext
 
-
+
 class
 ServerRpcController
 Used for server-side protobuf RPC service invocations.
 
 
-
+
 class
 TimeLimitedRpcController
 
@@ -5516,235 +5522,241 @@ service.
 
 
 
+class
+RegionServicesForStores
+Services a Store needs from a Region.
+
+
+
 interface
 ReplicationService
 Gateway to Cluster Replication.
 
 
-
+
 interface
 ReplicationSinkService
 A sink for a replication stream has to expose this 
service.
 
 
-
+
 interface
 ReplicationSourceService
 A source for a replication stream has to expose this 
service.
 
 
-
+
 class
 ReversedKeyValueHeap
 ReversedKeyValueHeap is used for supporting reversed 
scanning.
 
 
-
+
 class
 ReversedMobStoreScanner
 ReversedMobStoreScanner extends from ReversedStoreScanner, 
and is used to support
  reversed scanning in both the memstore and the MOB store.
 
 
-
+
 (package private) class
 ReversedRegionScannerImpl
 ReversibleRegionScannerImpl extends from RegionScannerImpl, 
and is used to
  support reversed scanning.
 
 
-
+
 (package private) class
 ReversedStoreScanner
 ReversedStoreScanner extends from StoreScanner, and is used 
to support
  reversed scanning.
 
 
-
+
 class
 RSDumpServlet
 
-
+
 class
 RSRpcServices
 Implements the regionserver RPC services.
 
 
-
+
 class
 RSStatusServlet
 
-
+
 class
 ScanDeleteTracker
 This class is responsible for the tracking and enforcement 
of Deletes
  during the course of a Scan operation.
 
 
-
+
 class
 ScanInfo
 Immutable information for scans over a store.
 
 
-
+
 class
 ScanQueryMatcher
 A query matcher that is specifically designed for the scan 
case.
 
 
-
+
 class
 ScanWildcardColumnTracker
 Keeps track of the columns for a scan if they are not 
explicitly specified
 
 
-
+
 class
 Segment
 This is an abstraction of 

[47/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index d51af73..8bd28bc 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
 en-us
 2007 - 2016 The Apache Software Foundation
 
-  File: 1688,
- Errors: 12743,
+  File: 1692,
+ Errors: 12712,
  Warnings: 0,
  Infos: 0
   
@@ -1147,7 +1147,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -1427,7 +1427,21 @@ under the License.
   0
 
 
-  210
+  209
+
+  
+  
+
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.metrics2.lib.MutableFastCounter.java;>org/apache/hadoop/metrics2/lib/MutableFastCounter.java
+
+
+  0
+
+
+  0
+
+
+  0
 
   
   
@@ -2309,7 +2323,7 @@ under the License.
   0
 
 
-  4
+  3
 
   
   
@@ -2477,7 +2491,7 @@ under the License.
   0
 
 
-  60
+  59
 
   
   
@@ -4862,6 +4876,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutput.java;>org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutput.java
+
+
+  0
+
+
+  0
+
+
+  6
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.zookeeper.ZKClusterId.java;>org/apache/hadoop/hbase/zookeeper/ZKClusterId.java
 
 
@@ -4946,6 +4974,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.ipc.AdaptiveLifoCoDelCallQueue.java;>org/apache/hadoop/hbase/ipc/AdaptiveLifoCoDelCallQueue.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.io.hfile.LruCachedBlock.java;>org/apache/hadoop/hbase/io/hfile/LruCachedBlock.java
 
 
@@ -5963,7 +6005,7 @@ under the License.
   0
 
 
-  10
+  9
 
   
   
@@ -6243,7 +6285,7 @@ under the License.
   0
 
 
-  12
+  0
 
   
   
@@ -6453,7 +6495,7 @@ under the License.
   0
 
 
-  2
+  0
 
   
   
@@ -6486,6 +6528,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutputHelper.java;>org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputHelper.java
+
+
+  0
+
+
+  0
+
+
+  3
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.metrics2.lib.MutableSizeHistogram.java;>org/apache/hadoop/metrics2/lib/MutableSizeHistogram.java
 
 
@@ -8777,7 +8833,7 @@ under the License.
   0
 
 
-  3
+  1
 
   
   
@@ -10989,7 +11045,7 @@ under the License.
   0
 
 
-  7
+  6
 
   
   

[46/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index 0dfdd5b..b212e75 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -465,6 +465,8 @@
 
 Set ack/noAck mode.
 
+ackedLength
 - Variable in class org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutput.Callback
+
 ACL_GLOBAL_NAME
 - Static variable in class org.apache.hadoop.hbase.security.access.AccessControlLists
 
 ACL_KEY_DELIMITER
 - Static variable in class org.apache.hadoop.hbase.security.access.AccessControlLists
@@ -664,6 +666,12 @@
 
 adaptee
 - Variable in class org.apache.hadoop.hbase.regionserver.ImmutableSegmentAdapter
 
+AdaptiveLifoCoDelCallQueue - Class in org.apache.hadoop.hbase.ipc
+
+Adaptive LIFO blocking queue utilizing CoDel algorithm to 
prevent queue overloading.
+
+AdaptiveLifoCoDelCallQueue(int,
 int, int, double, AtomicLong, AtomicLong) - Constructor for class 
org.apache.hadoop.hbase.ipc.AdaptiveLifoCoDelCallQueue
+
 add(byte[],
 byte[], byte[]) - Method in class org.apache.hadoop.hbase.client.Append
 
 Add the specified column and value to this Append 
operation.
@@ -775,6 +783,8 @@
 
 Attempt to add the specified cached block to this 
queue.
 
+add(CallRunner)
 - Method in class org.apache.hadoop.hbase.ipc.AdaptiveLifoCoDelCallQueue
+
 add(ServerName)
 - Method in class org.apache.hadoop.hbase.master.DeadServer
 
 Adds the server to the dead server list if it's not there 
already.
@@ -938,12 +948,8 @@
 
 Add sample to a stat metric by name.
 
-add(long)
 - Method in class org.apache.hadoop.metrics2.lib.MetricMutableQuantiles
-
 add(long)
 - Method in class org.apache.hadoop.metrics2.lib.MutableHistogram
 
-add(long)
 - Method in class org.apache.hadoop.metrics2.lib.MutableRangeHistogram
-
 add(long)
 - Method in interface org.apache.hadoop.metrics2.MetricHistogram
 
 Add a single value to a histogram's stream of values.
@@ -960,6 +966,8 @@
 
 building
 
+addAll(Collection?
 extends CallRunner) - Method in class 
org.apache.hadoop.hbase.ipc.AdaptiveLifoCoDelCallQueue
+
 addAll(Collection?
 extends Cell) - Method in class 
org.apache.hadoop.hbase.regionserver.CellSet
 
 addAll(Collection?
 extends Runnable) - Method in class 
org.apache.hadoop.hbase.thrift.CallQueue
@@ -989,6 +997,8 @@
 
 addAndGetGlobalMemstoreSize(long)
 - Method in class org.apache.hadoop.hbase.regionserver.RegionServerAccounting
 
+addAndGetGlobalMemstoreSize(long)
 - Method in class org.apache.hadoop.hbase.regionserver.RegionServicesForStores
+
 addAndGetRegionReplayEditsSize(byte[],
 long) - Method in class org.apache.hadoop.hbase.regionserver.RegionServerAccounting
 
 Add memStoreSize to replayEditsPerRegion.
@@ -2008,6 +2018,10 @@
 
 addToServerHoldings(ServerName,
 HRegionInfo) - Method in class org.apache.hadoop.hbase.master.RegionStates
 
+addToSize(long)
 - Method in class org.apache.hadoop.hbase.procedure2.store.wal.ProcedureWALFile
+
+Used to update in-progress log sizes.
+
 addToSortedRanges()
 - Method in class org.apache.hadoop.hbase.util.byterange.ByteRangeSet
 
 abstract
@@ -2146,14 +2160,14 @@
 
 afterLast
 - Variable in class org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArrayScanner
 
-age
 - Variable in class org.apache.hadoop.hbase.io.hfile.BlockCacheUtil.CachedBlocksByFile
-
 ageAtEviction
 - Variable in class org.apache.hadoop.hbase.io.hfile.CacheStats
 
 Keep running age at eviction time
 
 ageGauge
 - Variable in class org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSinkSourceImpl
 
+ageHistogram
 - Variable in class org.apache.hadoop.hbase.io.hfile.AgeSnapshot
+
 ageOfLastAppliedOp
 - Variable in class org.apache.hadoop.hbase.replication.ReplicationLoadSink
 
 ageOfLastShippedOp
 - Variable in class org.apache.hadoop.hbase.replication.ReplicationLoadSource
@@ -2168,7 +2182,7 @@
 
 Snapshot of block cache age in cache.
 
-AgeSnapshot(Histogram)
 - Constructor for class org.apache.hadoop.hbase.io.hfile.AgeSnapshot
+AgeSnapshot(FastLongHistogram)
 - Constructor for class org.apache.hadoop.hbase.io.hfile.AgeSnapshot
 
 agg
 - Variable in class org.apache.hadoop.hbase.regionserver.MetricsRegionSourceImpl
 
@@ -2229,6 +2243,10 @@
 
 Try to allocate size bytes from the 
chunk.
 
+alloc
 - Variable in class org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutput
+
+ALLOC
 - Static variable in class org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutputHelper
+
 allocate(int,
 boolean) - Static method in class org.apache.hadoop.hbase.io.ByteBufferOutputStream
 
 allocate()
 - Method in class org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator.Bucket
@@ -2417,7 +2435,7 @@
 
 append(Cell)
 - Method in class org.apache.hadoop.hbase.regionserver.StripeMultiFileWriter.SizeMultiWriter
 
-append(HTableDescriptor,
 HRegionInfo, WALKey, WALEdit, boolean) - Method in class 

[32/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.Entry.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.Entry.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.Entry.html
index d52d840..d523d46 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.Entry.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.Entry.html
@@ -99,7 +99,7 @@
 
 
 
-private static class ProcedureWALFormatReader.Entry
+private static class ProcedureWALFormatReader.Entry
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 
 
@@ -247,7 +247,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 hashNext
-protectedProcedureWALFormatReader.Entry
 hashNext
+protectedProcedureWALFormatReader.Entry
 hashNext
 
 
 
@@ -256,7 +256,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 childHead
-protectedProcedureWALFormatReader.Entry
 childHead
+protectedProcedureWALFormatReader.Entry
 childHead
 
 
 
@@ -265,7 +265,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 linkNext
-protectedProcedureWALFormatReader.Entry
 linkNext
+protectedProcedureWALFormatReader.Entry
 linkNext
 
 
 
@@ -274,7 +274,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 linkPrev
-protectedProcedureWALFormatReader.Entry
 linkPrev
+protectedProcedureWALFormatReader.Entry
 linkPrev
 
 
 
@@ -283,7 +283,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 replayNext
-protectedProcedureWALFormatReader.Entry
 replayNext
+protectedProcedureWALFormatReader.Entry
 replayNext
 
 
 
@@ -292,7 +292,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 replayPrev
-protectedProcedureWALFormatReader.Entry
 replayPrev
+protectedProcedureWALFormatReader.Entry
 replayPrev
 
 
 
@@ -301,7 +301,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 procedure
-protectedProcedure procedure
+protectedProcedure procedure
 
 
 
@@ -310,7 +310,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 proto
-protectedorg.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure
 proto
+protectedorg.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure
 proto
 
 
 
@@ -319,7 +319,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 ready
-protectedboolean ready
+protectedboolean ready
 
 
 
@@ -336,7 +336,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 ProcedureWALFormatReader.Entry
-publicProcedureWALFormatReader.Entry(ProcedureWALFormatReader.EntryhashNext)
+publicProcedureWALFormatReader.Entry(ProcedureWALFormatReader.EntryhashNext)
 
 
 
@@ -353,7 +353,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 getProcId
-publiclonggetProcId()
+publiclonggetProcId()
 
 
 
@@ -362,7 +362,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 getParentId
-publiclonggetParentId()
+publiclonggetParentId()
 
 
 
@@ -371,7 +371,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 hasParent
-publicbooleanhasParent()
+publicbooleanhasParent()
 
 
 
@@ -380,7 +380,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 isReady
-publicbooleanisReady()
+publicbooleanisReady()
 
 
 
@@ -389,7 +389,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 isCompleted
-publicbooleanisCompleted()
+publicbooleanisCompleted()
 
 
 
@@ -398,7 +398,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 convert
-publicProcedureconvert()
+publicProcedureconvert()
   throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Throws:
 http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
@@ -410,7 +410,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 convertToInfo
-publicProcedureInfoconvertToInfo()
+publicProcedureInfoconvertToInfo()
 
 
 
@@ -419,7 +419,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 toString
-publichttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
+publichttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
 

[34/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/master/MetricsMasterSource.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/MetricsMasterSource.html 
b/devapidocs/org/apache/hadoop/hbase/master/MetricsMasterSource.html
index 15dc026..ca6a949 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/MetricsMasterSource.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/MetricsMasterSource.html
@@ -282,7 +282,7 @@ extends 
 
 Methods inherited from interfaceorg.apache.hadoop.hbase.metrics.BaseSource
-decGauge,
 getMetricsContext,
 getMetricsDescription,
 getMetricsJmxContext,
 getMetricsName,
 incCounters,
 incGauge,
 init,
 removeMetric, setGauge,
 updateHistogram,
 updateQuantile
+decGauge,
 getMetricsContext,
 getMetricsDescription,
 getMetricsJmxContext,
 getMetricsName,
 incCounters,
 incGauge,
 init,
 removeMetric, setGauge,
 updateHistogram
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/master/MetricsMasterSourceImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/MetricsMasterSourceImpl.html 
b/devapidocs/org/apache/hadoop/hbase/master/MetricsMasterSourceImpl.html
index f8ae4f5..6f2978f 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/MetricsMasterSourceImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/MetricsMasterSourceImpl.html
@@ -130,7 +130,7 @@ implements Field and Description
 
 
-private 
org.apache.hadoop.metrics2.lib.MutableCounterLong
+private MutableFastCounter
 clusterRequestsCounter
 
 
@@ -220,7 +220,7 @@ implements BaseSourceImpl
-decGauge,
 getMetricsContext,
 getMetricsDescription,
 getMetricsJmxContext,
 getMetricsName,
 getMetricsRegistry,
 incCounters,
 incGauge, href="../../../../../org/apache/hadoop/hbase/metrics/BaseSourceImpl.html#removeMetric(java.lang.String)">removeMetric,
 > href="../../../../../org/apache/hadoop/hbase/metrics/BaseSourceImpl.html#setGauge(java.lang.String,%20long)">setGauge,
 > href="../../../../../org/apache/hadoop/hbase/metrics/BaseSourceImpl.html#updateHistogram(java.lang.String,%20long)">updateHistogram,
 > href="../../../../../org/apache/hadoop/hbase/metrics/BaseSourceImpl.html#updateQuantile(java.lang.String,%20long)">updateQuantile
+decGauge,
 getMetricsContext,
 getMetricsDescription,
 getMetricsJmxContext,
 getMetricsName,
 getMetricsRegistry,
 incCounters,
 incGauge, href="../../../../../org/apache/hadoop/hbase/metrics/BaseSourceImpl.html#removeMetric(java.lang.String)">removeMetric,
 > href="../../../../../org/apache/hadoop/hbase/metrics/BaseSourceImpl.html#setGauge(java.lang.String,%20long)">setGauge,
 > href="../../../../../org/apache/hadoop/hbase/metrics/BaseSourceImpl.html#updateHistogram(java.lang.String,%20long)">updateHistogram
 
 
 
@@ -234,7 +234,7 @@ implements BaseSource
-decGauge,
 getMetricsContext,
 getMetricsDescription,
 getMetricsJmxContext,
 getMetricsName,
 incCounters,
 incGauge,
 removeMetric,
 setGauge,
 updateHistogram,
 updateQuantile
+decGauge,
 getMetricsContext,
 getMetricsDescription,
 getMetricsJmxContext,
 getMetricsName,
 incCounters,
 incGauge,
 removeMetric,
 setGauge,
 updateHistogram
 
 
 
@@ -265,7 +265,7 @@ implements 
 
 clusterRequestsCounter
-privateorg.apache.hadoop.metrics2.lib.MutableCounterLong clusterRequestsCounter
+privateMutableFastCounter clusterRequestsCounter
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/master/MetricsSnapshotSource.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/MetricsSnapshotSource.html 
b/devapidocs/org/apache/hadoop/hbase/master/MetricsSnapshotSource.html
index 468d7dd..c044c24 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/MetricsSnapshotSource.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/MetricsSnapshotSource.html
@@ -203,7 +203,7 @@ extends 
 
 Methods inherited from interfaceorg.apache.hadoop.hbase.metrics.BaseSource
-decGauge,
 getMetricsContext,
 getMetricsDescription,
 getMetricsJmxContext,
 getMetricsName,
 incCounters,
 incGauge,
 init,
 removeMetric, setGauge,
 updateHistogram,
 updateQuantile
+decGauge,
 getMetricsContext,
 getMetricsDescription,
 getMetricsJmxContext,
 getMetricsName,
 incCounters,
 incGauge,
 init,
 removeMetric, setGauge,
 updateHistogram
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/master/MetricsSnapshotSourceImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/MetricsSnapshotSourceImpl.html 
b/devapidocs/org/apache/hadoop/hbase/master/MetricsSnapshotSourceImpl.html
index 699433f..991d456 100644
--- 

[08/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/util/class-use/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/util/class-use/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html
 
b/devapidocs/org/apache/hadoop/hbase/util/class-use/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html
new file mode 100644
index 000..cdca8bd
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/util/class-use/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html
@@ -0,0 +1,168 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+Uses of Interface 
org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter
 (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+
+Uses of 
Interfaceorg.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter
+
+
+
+
+
+Packages that use FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter
+
+Package
+Description
+
+
+
+org.apache.hadoop.hbase.util
+
+
+
+
+
+
+
+
+
+
+Uses of FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter
 in org.apache.hadoop.hbase.util
+
+Fields in org.apache.hadoop.hbase.util
 declared as FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter
+
+Modifier and Type
+Field and Description
+
+
+
+private static FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter
+FanOutOneBlockAsyncDFSOutputHelper.PIPELINE_ACK_STATUS_GETTER
+
+
+
+
+Methods in org.apache.hadoop.hbase.util
 that return FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter
+
+Modifier and Type
+Method and Description
+
+
+
+private static FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter
+FanOutOneBlockAsyncDFSOutputHelper.createPipelineAckStatusGetter()
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+Copyright  20072016 http://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/util/class-use/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/util/class-use/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html
 
b/devapidocs/org/apache/hadoop/hbase/util/class-use/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html
new file mode 100644
index 000..2338681
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/util/class-use/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html
@@ -0,0 +1,168 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+Uses of Interface 
org.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter
 (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+
+Uses of 
Interfaceorg.apache.hadoop.hbase.util.FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter
+
+
+
+
+
+Packages that use FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter
+
+Package
+Description
+
+
+
+org.apache.hadoop.hbase.util
+
+
+
+
+
+
+
+
+
+
+Uses of FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter
 in org.apache.hadoop.hbase.util
+
+Fields in org.apache.hadoop.hbase.util
 declared as 

[03/51] [partial] hbase-site git commit: Published site at c5288947ddc4abae2f4036544a775ff81538df2f.

2016-02-26 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/55dfd6fe/devapidocs/org/apache/hadoop/hbase/wal/class-use/WALKey.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/class-use/WALKey.html 
b/devapidocs/org/apache/hadoop/hbase/wal/class-use/WALKey.html
index a4aab37..6c9b885 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/class-use/WALKey.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/class-use/WALKey.html
@@ -328,8 +328,8 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static WALKey
-WALUtil.doFullAppendTransaction(WALwal,
-  HTableDescriptorhtd,
+WALUtil.doFullAppendTransaction(WALwal,
+  http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">IntegerreplicationScope,
   HRegionInfohri,
   WALEditedit,
   MultiVersionConcurrencyControlmvcc,
@@ -340,8 +340,8 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static WALKey
-WALUtil.writeBulkLoadMarkerAndSync(WALwal,
-HTableDescriptorhtd,
+WALUtil.writeBulkLoadMarkerAndSync(WALwal,
+http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">IntegerreplicationScope,
 HRegionInfohri,
 
org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptordesc,
 MultiVersionConcurrencyControlmvcc)
@@ -350,8 +350,8 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static WALKey
-WALUtil.writeCompactionMarker(WALwal,
-  HTableDescriptorhtd,
+WALUtil.writeCompactionMarker(WALwal,
+  http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">IntegerreplicationScope,
   HRegionInfohri,
   
org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptorc,
   MultiVersionConcurrencyControlmvcc)
@@ -360,8 +360,8 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static WALKey
-WALUtil.writeFlushMarker(WALwal,
-HTableDescriptorhtd,
+WALUtil.writeFlushMarker(WALwal,
+http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">IntegerreplicationScope,
 HRegionInfohri,
 
org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptorf,
 booleansync,
@@ -371,16 +371,16 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private static WALKey
-WALUtil.writeMarker(WALwal,
-  HTableDescriptorhtd,
+WALUtil.writeMarker(WALwal,
+  http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">IntegerreplicationScope,
   HRegionInfohri,
   WALEditedit,
   MultiVersionConcurrencyControlmvcc)
 
 
 static WALKey
-WALUtil.writeRegionEventMarker(WALwal,
-HTableDescriptorhtd,
+WALUtil.writeRegionEventMarker(WALwal,
+http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in 

  1   2   >