[1/6] hbase git commit: HBASE-20513 Collect and emit ScanMetrics in PerformanceEvaluation

2018-05-04 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1 fd24083e2 -> 23b905408
  refs/heads/branch-1.3 b6bb52110 -> 7a4a7d2e4
  refs/heads/branch-1.4 489120afb -> f46a4ca69
  refs/heads/branch-2 7df8e5e4a -> 991d78ca2
  refs/heads/branch-2.0 3c4fadae4 -> 81f69e585
  refs/heads/master 78ffd7ace -> 291dedbf8


HBASE-20513 Collect and emit ScanMetrics in PerformanceEvaluation


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/291dedbf
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/291dedbf
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/291dedbf

Branch: refs/heads/master
Commit: 291dedbf8185ede93808746e4233c2507d86f5cb
Parents: 78ffd7a
Author: Andrew Purtell 
Authored: Tue May 1 10:58:09 2018 -0700
Committer: Andrew Purtell 
Committed: Fri May 4 17:59:12 2018 -0700

--
 .../hadoop/hbase/PerformanceEvaluation.java | 119 ---
 1 file changed, 100 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/291dedbf/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index 25a1d3c..7fc064f 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -69,6 +69,7 @@ import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.RowMutations;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
 import org.apache.hadoop.hbase.filter.BinaryComparator;
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.filter.FilterAllFilter;
@@ -1046,6 +1047,12 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 private String testName;
 private Histogram latencyHistogram;
 private Histogram valueSizeHistogram;
+private Histogram rpcCallsHistogram;
+private Histogram remoteRpcCallsHistogram;
+private Histogram millisBetweenNextHistogram;
+private Histogram regionsScannedHistogram;
+private Histogram bytesInResultsHistogram;
+private Histogram bytesInRemoteResultsHistogram;
 private RandomDistribution.Zipf zipf;
 
 /**
@@ -1102,6 +1109,34 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   this.valueSizeHistogram.update(valueSize);
 }
 
+void updateScanMetrics(final ScanMetrics metrics) {
+  Map metricsMap = metrics.getMetricsMap();
+  Long rpcCalls = metricsMap.get(ScanMetrics.RPC_CALLS_METRIC_NAME);
+  if (rpcCalls != null) {
+this.rpcCallsHistogram.update(rpcCalls.longValue());
+  }
+  Long remoteRpcCalls = 
metricsMap.get(ScanMetrics.REMOTE_RPC_CALLS_METRIC_NAME);
+  if (remoteRpcCalls != null) {
+this.remoteRpcCallsHistogram.update(remoteRpcCalls.longValue());
+  }
+  Long millisBetweenNext = 
metricsMap.get(ScanMetrics.MILLIS_BETWEEN_NEXTS_METRIC_NAME);
+  if (millisBetweenNext != null) {
+this.millisBetweenNextHistogram.update(millisBetweenNext.longValue());
+  }
+  Long regionsScanned = 
metricsMap.get(ScanMetrics.REGIONS_SCANNED_METRIC_NAME);
+  if (regionsScanned != null) {
+this.regionsScannedHistogram.update(regionsScanned.longValue());
+  }
+  Long bytesInResults = 
metricsMap.get(ScanMetrics.BYTES_IN_RESULTS_METRIC_NAME);
+  if (bytesInResults != null && bytesInResults.longValue() > 0) {
+this.bytesInResultsHistogram.update(bytesInResults.longValue());
+  }
+  Long bytesInRemoteResults = 
metricsMap.get(ScanMetrics.BYTES_IN_REMOTE_RESULTS_METRIC_NAME);
+  if (bytesInRemoteResults != null && bytesInRemoteResults.longValue() > 
0) {
+
this.bytesInRemoteResultsHistogram.update(bytesInRemoteResults.longValue());
+  }
+}
+
 String generateStatus(final int sr, final int i, final int lr) {
   return sr + "/" + i + "/" + lr + ", latency " + getShortLatencyReport() +
 (!isRandomValueSize()? "": ", value size " + 
getShortValueSizeReport());
@@ -1123,10 +1158,19 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 }
 
 void testSetup() throws IOException {
-  createConnection();
-  onStartup();
+  // test metrics
   latencyHistogram = YammerHistogramUtils.newHistogram(new 
UniformReservoir(1024 * 500));
   valueSizeHistogram = 

[4/6] hbase git commit: HBASE-20513 Collect and emit ScanMetrics in PerformanceEvaluation

2018-05-04 Thread apurtell
HBASE-20513 Collect and emit ScanMetrics in PerformanceEvaluation


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/23b90540
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/23b90540
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/23b90540

Branch: refs/heads/branch-1
Commit: 23b9054089b2e529a45db63ee100e4748ce10f2f
Parents: fd24083
Author: Andrew Purtell 
Authored: Tue May 1 10:58:09 2018 -0700
Committer: Andrew Purtell 
Committed: Fri May 4 17:59:28 2018 -0700

--
 .../hadoop/hbase/PerformanceEvaluation.java | 127 ---
 1 file changed, 108 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/23b90540/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index 60d6cb6..d31e39f 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -70,6 +70,7 @@ import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.RowMutations;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
 import org.apache.hadoop.hbase.filter.BinaryComparator;
 import org.apache.hadoop.hbase.filter.CompareFilter;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
@@ -973,6 +974,12 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 private String testName;
 private Histogram latencyHistogram;
 private Histogram valueSizeHistogram;
+private Histogram rpcCallsHistogram;
+private Histogram remoteRpcCallsHistogram;
+private Histogram millisBetweenNextHistogram;
+private Histogram regionsScannedHistogram;
+private Histogram bytesInResultsHistogram;
+private Histogram bytesInRemoteResultsHistogram;
 private RandomDistribution.Zipf zipf;
 
 /**
@@ -1030,6 +1037,34 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   this.valueSizeHistogram.update(valueSize);
 }
 
+void updateScanMetrics(final ScanMetrics metrics) {
+  Map metricsMap = metrics.getMetricsMap();
+  Long rpcCalls = metricsMap.get(ScanMetrics.RPC_CALLS_METRIC_NAME);
+  if (rpcCalls != null) {
+this.rpcCallsHistogram.update(rpcCalls.longValue());
+  }
+  Long remoteRpcCalls = 
metricsMap.get(ScanMetrics.REMOTE_RPC_CALLS_METRIC_NAME);
+  if (remoteRpcCalls != null) {
+this.remoteRpcCallsHistogram.update(remoteRpcCalls.longValue());
+  }
+  Long millisBetweenNext = 
metricsMap.get(ScanMetrics.MILLIS_BETWEEN_NEXTS_METRIC_NAME);
+  if (millisBetweenNext != null) {
+this.millisBetweenNextHistogram.update(millisBetweenNext.longValue());
+  }
+  Long regionsScanned = 
metricsMap.get(ScanMetrics.REGIONS_SCANNED_METRIC_NAME);
+  if (regionsScanned != null) {
+this.regionsScannedHistogram.update(regionsScanned.longValue());
+  }
+  Long bytesInResults = 
metricsMap.get(ScanMetrics.BYTES_IN_RESULTS_METRIC_NAME);
+  if (bytesInResults != null && bytesInResults.longValue() > 0) {
+this.bytesInResultsHistogram.update(bytesInResults.longValue());
+  }
+  Long bytesInRemoteResults = 
metricsMap.get(ScanMetrics.BYTES_IN_REMOTE_RESULTS_METRIC_NAME);
+  if (bytesInRemoteResults != null && bytesInRemoteResults.longValue() > 
0) {
+
this.bytesInRemoteResultsHistogram.update(bytesInRemoteResults.longValue());
+  }
+}
+
 String generateStatus(final int sr, final int i, final int lr) {
   return sr + "/" + i + "/" + lr + ", latency " + getShortLatencyReport() +
 (!isRandomValueSize()? "": ", value size " + 
getShortValueSizeReport());
@@ -1051,12 +1086,22 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 }
 
 void testSetup() throws IOException {
+  // test metrics
+  latencyHistogram = YammerHistogramUtils.newHistogram(new 
UniformSample(1024 * 500));
+  valueSizeHistogram = YammerHistogramUtils.newHistogram(new 
UniformSample(1024 * 500));
+  // scan metrics
+  rpcCallsHistogram = YammerHistogramUtils.newHistogram(new 
UniformSample(1024 * 500));
+  remoteRpcCallsHistogram = YammerHistogramUtils.newHistogram(new 
UniformSample(1024 * 500));
+  millisBetweenNextHistogram = YammerHistogramUtils.newHistogram(new 
UniformSample(1024 * 500));
+  

[5/6] hbase git commit: HBASE-20513 Collect and emit ScanMetrics in PerformanceEvaluation

2018-05-04 Thread apurtell
HBASE-20513 Collect and emit ScanMetrics in PerformanceEvaluation


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f46a4ca6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f46a4ca6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f46a4ca6

Branch: refs/heads/branch-1.4
Commit: f46a4ca69a44b1f9f017132d7b4a9c39d615cfc9
Parents: 489120a
Author: Andrew Purtell 
Authored: Tue May 1 10:58:09 2018 -0700
Committer: Andrew Purtell 
Committed: Fri May 4 17:59:57 2018 -0700

--
 .../hadoop/hbase/PerformanceEvaluation.java | 127 ---
 1 file changed, 108 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f46a4ca6/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index 60d6cb6..d31e39f 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -70,6 +70,7 @@ import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.RowMutations;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
 import org.apache.hadoop.hbase.filter.BinaryComparator;
 import org.apache.hadoop.hbase.filter.CompareFilter;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
@@ -973,6 +974,12 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 private String testName;
 private Histogram latencyHistogram;
 private Histogram valueSizeHistogram;
+private Histogram rpcCallsHistogram;
+private Histogram remoteRpcCallsHistogram;
+private Histogram millisBetweenNextHistogram;
+private Histogram regionsScannedHistogram;
+private Histogram bytesInResultsHistogram;
+private Histogram bytesInRemoteResultsHistogram;
 private RandomDistribution.Zipf zipf;
 
 /**
@@ -1030,6 +1037,34 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   this.valueSizeHistogram.update(valueSize);
 }
 
+void updateScanMetrics(final ScanMetrics metrics) {
+  Map metricsMap = metrics.getMetricsMap();
+  Long rpcCalls = metricsMap.get(ScanMetrics.RPC_CALLS_METRIC_NAME);
+  if (rpcCalls != null) {
+this.rpcCallsHistogram.update(rpcCalls.longValue());
+  }
+  Long remoteRpcCalls = 
metricsMap.get(ScanMetrics.REMOTE_RPC_CALLS_METRIC_NAME);
+  if (remoteRpcCalls != null) {
+this.remoteRpcCallsHistogram.update(remoteRpcCalls.longValue());
+  }
+  Long millisBetweenNext = 
metricsMap.get(ScanMetrics.MILLIS_BETWEEN_NEXTS_METRIC_NAME);
+  if (millisBetweenNext != null) {
+this.millisBetweenNextHistogram.update(millisBetweenNext.longValue());
+  }
+  Long regionsScanned = 
metricsMap.get(ScanMetrics.REGIONS_SCANNED_METRIC_NAME);
+  if (regionsScanned != null) {
+this.regionsScannedHistogram.update(regionsScanned.longValue());
+  }
+  Long bytesInResults = 
metricsMap.get(ScanMetrics.BYTES_IN_RESULTS_METRIC_NAME);
+  if (bytesInResults != null && bytesInResults.longValue() > 0) {
+this.bytesInResultsHistogram.update(bytesInResults.longValue());
+  }
+  Long bytesInRemoteResults = 
metricsMap.get(ScanMetrics.BYTES_IN_REMOTE_RESULTS_METRIC_NAME);
+  if (bytesInRemoteResults != null && bytesInRemoteResults.longValue() > 
0) {
+
this.bytesInRemoteResultsHistogram.update(bytesInRemoteResults.longValue());
+  }
+}
+
 String generateStatus(final int sr, final int i, final int lr) {
   return sr + "/" + i + "/" + lr + ", latency " + getShortLatencyReport() +
 (!isRandomValueSize()? "": ", value size " + 
getShortValueSizeReport());
@@ -1051,12 +1086,22 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 }
 
 void testSetup() throws IOException {
+  // test metrics
+  latencyHistogram = YammerHistogramUtils.newHistogram(new 
UniformSample(1024 * 500));
+  valueSizeHistogram = YammerHistogramUtils.newHistogram(new 
UniformSample(1024 * 500));
+  // scan metrics
+  rpcCallsHistogram = YammerHistogramUtils.newHistogram(new 
UniformSample(1024 * 500));
+  remoteRpcCallsHistogram = YammerHistogramUtils.newHistogram(new 
UniformSample(1024 * 500));
+  millisBetweenNextHistogram = YammerHistogramUtils.newHistogram(new 
UniformSample(1024 * 500));
+  

[6/6] hbase git commit: HBASE-20513 Collect and emit ScanMetrics in PerformanceEvaluation

2018-05-04 Thread apurtell
HBASE-20513 Collect and emit ScanMetrics in PerformanceEvaluation


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7a4a7d2e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7a4a7d2e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7a4a7d2e

Branch: refs/heads/branch-1.3
Commit: 7a4a7d2e4ab581083229e62228eea3b5916b32df
Parents: b6bb521
Author: Andrew Purtell 
Authored: Tue May 1 10:58:09 2018 -0700
Committer: Andrew Purtell 
Committed: Fri May 4 18:00:01 2018 -0700

--
 .../hadoop/hbase/PerformanceEvaluation.java | 127 ---
 1 file changed, 108 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7a4a7d2e/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index faf52d0..f2097d4 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -68,6 +68,7 @@ import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.RowMutations;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
 import org.apache.hadoop.hbase.filter.BinaryComparator;
 import org.apache.hadoop.hbase.filter.CompareFilter;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
@@ -959,6 +960,12 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 private String testName;
 private Histogram latencyHistogram;
 private Histogram valueSizeHistogram;
+private Histogram rpcCallsHistogram;
+private Histogram remoteRpcCallsHistogram;
+private Histogram millisBetweenNextHistogram;
+private Histogram regionsScannedHistogram;
+private Histogram bytesInResultsHistogram;
+private Histogram bytesInRemoteResultsHistogram;
 private RandomDistribution.Zipf zipf;
 
 /**
@@ -1012,6 +1019,34 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   this.valueSizeHistogram.update(valueSize);
 }
 
+void updateScanMetrics(final ScanMetrics metrics) {
+  Map metricsMap = metrics.getMetricsMap();
+  Long rpcCalls = metricsMap.get(ScanMetrics.RPC_CALLS_METRIC_NAME);
+  if (rpcCalls != null) {
+this.rpcCallsHistogram.update(rpcCalls.longValue());
+  }
+  Long remoteRpcCalls = 
metricsMap.get(ScanMetrics.REMOTE_RPC_CALLS_METRIC_NAME);
+  if (remoteRpcCalls != null) {
+this.remoteRpcCallsHistogram.update(remoteRpcCalls.longValue());
+  }
+  Long millisBetweenNext = 
metricsMap.get(ScanMetrics.MILLIS_BETWEEN_NEXTS_METRIC_NAME);
+  if (millisBetweenNext != null) {
+this.millisBetweenNextHistogram.update(millisBetweenNext.longValue());
+  }
+  Long regionsScanned = 
metricsMap.get(ScanMetrics.REGIONS_SCANNED_METRIC_NAME);
+  if (regionsScanned != null) {
+this.regionsScannedHistogram.update(regionsScanned.longValue());
+  }
+  Long bytesInResults = 
metricsMap.get(ScanMetrics.BYTES_IN_RESULTS_METRIC_NAME);
+  if (bytesInResults != null && bytesInResults.longValue() > 0) {
+this.bytesInResultsHistogram.update(bytesInResults.longValue());
+  }
+  Long bytesInRemoteResults = 
metricsMap.get(ScanMetrics.BYTES_IN_REMOTE_RESULTS_METRIC_NAME);
+  if (bytesInRemoteResults != null && bytesInRemoteResults.longValue() > 
0) {
+
this.bytesInRemoteResultsHistogram.update(bytesInRemoteResults.longValue());
+  }
+}
+
 String generateStatus(final int sr, final int i, final int lr) {
   return sr + "/" + i + "/" + lr + ", latency " + getShortLatencyReport() +
 (!isRandomValueSize()? "": ", value size " + 
getShortValueSizeReport());
@@ -1033,12 +1068,22 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 }
 
 void testSetup() throws IOException {
+  // test metrics
+  latencyHistogram = YammerHistogramUtils.newHistogram(new 
UniformSample(1024 * 500));
+  valueSizeHistogram = YammerHistogramUtils.newHistogram(new 
UniformSample(1024 * 500));
+  // scan metrics
+  rpcCallsHistogram = YammerHistogramUtils.newHistogram(new 
UniformSample(1024 * 500));
+  remoteRpcCallsHistogram = YammerHistogramUtils.newHistogram(new 
UniformSample(1024 * 500));
+  millisBetweenNextHistogram = YammerHistogramUtils.newHistogram(new 
UniformSample(1024 * 500));
+  

[2/6] hbase git commit: HBASE-20513 Collect and emit ScanMetrics in PerformanceEvaluation

2018-05-04 Thread apurtell
HBASE-20513 Collect and emit ScanMetrics in PerformanceEvaluation


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/81f69e58
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/81f69e58
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/81f69e58

Branch: refs/heads/branch-2.0
Commit: 81f69e585159840b622b9030e4eb6ebf35a7e6ae
Parents: 3c4fada
Author: Andrew Purtell 
Authored: Tue May 1 10:58:09 2018 -0700
Committer: Andrew Purtell 
Committed: Fri May 4 17:59:16 2018 -0700

--
 .../hadoop/hbase/PerformanceEvaluation.java | 119 ---
 1 file changed, 100 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/81f69e58/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index a0d5572..11905aa 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -69,6 +69,7 @@ import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.RowMutations;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
 import org.apache.hadoop.hbase.filter.BinaryComparator;
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.filter.FilterAllFilter;
@@ -1046,6 +1047,12 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 private String testName;
 private Histogram latencyHistogram;
 private Histogram valueSizeHistogram;
+private Histogram rpcCallsHistogram;
+private Histogram remoteRpcCallsHistogram;
+private Histogram millisBetweenNextHistogram;
+private Histogram regionsScannedHistogram;
+private Histogram bytesInResultsHistogram;
+private Histogram bytesInRemoteResultsHistogram;
 private RandomDistribution.Zipf zipf;
 
 /**
@@ -1102,6 +1109,34 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   this.valueSizeHistogram.update(valueSize);
 }
 
+void updateScanMetrics(final ScanMetrics metrics) {
+  Map metricsMap = metrics.getMetricsMap();
+  Long rpcCalls = metricsMap.get(ScanMetrics.RPC_CALLS_METRIC_NAME);
+  if (rpcCalls != null) {
+this.rpcCallsHistogram.update(rpcCalls.longValue());
+  }
+  Long remoteRpcCalls = 
metricsMap.get(ScanMetrics.REMOTE_RPC_CALLS_METRIC_NAME);
+  if (remoteRpcCalls != null) {
+this.remoteRpcCallsHistogram.update(remoteRpcCalls.longValue());
+  }
+  Long millisBetweenNext = 
metricsMap.get(ScanMetrics.MILLIS_BETWEEN_NEXTS_METRIC_NAME);
+  if (millisBetweenNext != null) {
+this.millisBetweenNextHistogram.update(millisBetweenNext.longValue());
+  }
+  Long regionsScanned = 
metricsMap.get(ScanMetrics.REGIONS_SCANNED_METRIC_NAME);
+  if (regionsScanned != null) {
+this.regionsScannedHistogram.update(regionsScanned.longValue());
+  }
+  Long bytesInResults = 
metricsMap.get(ScanMetrics.BYTES_IN_RESULTS_METRIC_NAME);
+  if (bytesInResults != null && bytesInResults.longValue() > 0) {
+this.bytesInResultsHistogram.update(bytesInResults.longValue());
+  }
+  Long bytesInRemoteResults = 
metricsMap.get(ScanMetrics.BYTES_IN_REMOTE_RESULTS_METRIC_NAME);
+  if (bytesInRemoteResults != null && bytesInRemoteResults.longValue() > 
0) {
+
this.bytesInRemoteResultsHistogram.update(bytesInRemoteResults.longValue());
+  }
+}
+
 String generateStatus(final int sr, final int i, final int lr) {
   return sr + "/" + i + "/" + lr + ", latency " + getShortLatencyReport() +
 (!isRandomValueSize()? "": ", value size " + 
getShortValueSizeReport());
@@ -1123,10 +1158,19 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 }
 
 void testSetup() throws IOException {
-  createConnection();
-  onStartup();
+  // test metrics
   latencyHistogram = YammerHistogramUtils.newHistogram(new 
UniformReservoir(1024 * 500));
   valueSizeHistogram = YammerHistogramUtils.newHistogram(new 
UniformReservoir(1024 * 500));
+  // scan metrics
+  rpcCallsHistogram = YammerHistogramUtils.newHistogram(new 
UniformReservoir(1024 * 500));
+  remoteRpcCallsHistogram = YammerHistogramUtils.newHistogram(new 
UniformReservoir(1024 * 500));
+  millisBetweenNextHistogram = 

[3/6] hbase git commit: HBASE-20513 Collect and emit ScanMetrics in PerformanceEvaluation

2018-05-04 Thread apurtell
HBASE-20513 Collect and emit ScanMetrics in PerformanceEvaluation


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/991d78ca
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/991d78ca
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/991d78ca

Branch: refs/heads/branch-2
Commit: 991d78ca2bf838a24e504590b1124367d255ec03
Parents: 7df8e5e
Author: Andrew Purtell 
Authored: Tue May 1 10:58:09 2018 -0700
Committer: Andrew Purtell 
Committed: Fri May 4 17:59:20 2018 -0700

--
 .../hadoop/hbase/PerformanceEvaluation.java | 119 ---
 1 file changed, 100 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/991d78ca/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index a0d5572..11905aa 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -69,6 +69,7 @@ import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.RowMutations;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
 import org.apache.hadoop.hbase.filter.BinaryComparator;
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.filter.FilterAllFilter;
@@ -1046,6 +1047,12 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 private String testName;
 private Histogram latencyHistogram;
 private Histogram valueSizeHistogram;
+private Histogram rpcCallsHistogram;
+private Histogram remoteRpcCallsHistogram;
+private Histogram millisBetweenNextHistogram;
+private Histogram regionsScannedHistogram;
+private Histogram bytesInResultsHistogram;
+private Histogram bytesInRemoteResultsHistogram;
 private RandomDistribution.Zipf zipf;
 
 /**
@@ -1102,6 +1109,34 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   this.valueSizeHistogram.update(valueSize);
 }
 
+void updateScanMetrics(final ScanMetrics metrics) {
+  Map metricsMap = metrics.getMetricsMap();
+  Long rpcCalls = metricsMap.get(ScanMetrics.RPC_CALLS_METRIC_NAME);
+  if (rpcCalls != null) {
+this.rpcCallsHistogram.update(rpcCalls.longValue());
+  }
+  Long remoteRpcCalls = 
metricsMap.get(ScanMetrics.REMOTE_RPC_CALLS_METRIC_NAME);
+  if (remoteRpcCalls != null) {
+this.remoteRpcCallsHistogram.update(remoteRpcCalls.longValue());
+  }
+  Long millisBetweenNext = 
metricsMap.get(ScanMetrics.MILLIS_BETWEEN_NEXTS_METRIC_NAME);
+  if (millisBetweenNext != null) {
+this.millisBetweenNextHistogram.update(millisBetweenNext.longValue());
+  }
+  Long regionsScanned = 
metricsMap.get(ScanMetrics.REGIONS_SCANNED_METRIC_NAME);
+  if (regionsScanned != null) {
+this.regionsScannedHistogram.update(regionsScanned.longValue());
+  }
+  Long bytesInResults = 
metricsMap.get(ScanMetrics.BYTES_IN_RESULTS_METRIC_NAME);
+  if (bytesInResults != null && bytesInResults.longValue() > 0) {
+this.bytesInResultsHistogram.update(bytesInResults.longValue());
+  }
+  Long bytesInRemoteResults = 
metricsMap.get(ScanMetrics.BYTES_IN_REMOTE_RESULTS_METRIC_NAME);
+  if (bytesInRemoteResults != null && bytesInRemoteResults.longValue() > 
0) {
+
this.bytesInRemoteResultsHistogram.update(bytesInRemoteResults.longValue());
+  }
+}
+
 String generateStatus(final int sr, final int i, final int lr) {
   return sr + "/" + i + "/" + lr + ", latency " + getShortLatencyReport() +
 (!isRandomValueSize()? "": ", value size " + 
getShortValueSizeReport());
@@ -1123,10 +1158,19 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 }
 
 void testSetup() throws IOException {
-  createConnection();
-  onStartup();
+  // test metrics
   latencyHistogram = YammerHistogramUtils.newHistogram(new 
UniformReservoir(1024 * 500));
   valueSizeHistogram = YammerHistogramUtils.newHistogram(new 
UniformReservoir(1024 * 500));
+  // scan metrics
+  rpcCallsHistogram = YammerHistogramUtils.newHistogram(new 
UniformReservoir(1024 * 500));
+  remoteRpcCallsHistogram = YammerHistogramUtils.newHistogram(new 
UniformReservoir(1024 * 500));
+  millisBetweenNextHistogram = 

[7/7] hbase git commit: HBASE-20517 Fix PerformanceEvaluation 'column' parameter

2018-05-04 Thread apurtell
HBASE-20517 Fix PerformanceEvaluation 'column' parameter


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a275e863
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a275e863
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a275e863

Branch: refs/heads/branch-1.2
Commit: a275e863124e979745bea493c578c1f9deb400be
Parents: 1519dc0
Author: Andrew Purtell 
Authored: Thu May 3 16:27:23 2018 -0700
Committer: Andrew Purtell 
Committed: Fri May 4 17:24:49 2018 -0700

--
 .../hadoop/hbase/PerformanceEvaluation.java | 31 +++-
 1 file changed, 24 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a275e863/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index 0f12cc7..eea5257 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -133,7 +133,6 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   public static final String TABLE_NAME = "TestTable";
   public static final byte[] FAMILY_NAME = Bytes.toBytes("info");
   public static final byte [] COLUMN_ZERO = Bytes.toBytes("" + 0);
-  public static final byte [] QUALIFIER_NAME = COLUMN_ZERO;
   public static final int DEFAULT_VALUE_LENGTH = 1000;
   public static final int ROW_LENGTH = 26;
 
@@ -1203,7 +1202,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   scan.setCaching(opts.caching);
   FilterList list = new FilterList();
   if (opts.addColumns) {
-scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  scan.addColumn(FAMILY_NAME, qualifier);
+}
   } else {
 scan.addFamily(FAMILY_NAME);
   }
@@ -1241,7 +1243,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 scan.setFilter(new FilterAllFilter());
   }
   if (opts.addColumns) {
-scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  scan.addColumn(FAMILY_NAME, qualifier);
+}
   } else {
 scan.addFamily(FAMILY_NAME);
   }
@@ -1341,7 +1346,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   }
   Get get = new Get(getRandomRow(this.rand, opts.totalRows));
   if (opts.addColumns) {
-get.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  get.addColumn(FAMILY_NAME, qualifier);
+}
   } else {
 get.addFamily(FAMILY_NAME);
   }
@@ -1436,7 +1444,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 Scan scan = new Scan(format(opts.startRow));
 scan.setCaching(opts.caching);
 if (opts.addColumns) {
-  scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+  for (int column = 0; column < opts.columns; column++) {
+byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+scan.addColumn(FAMILY_NAME, qualifier);
+  }
 } else {
   scan.addFamily(FAMILY_NAME);
 }
@@ -1569,7 +1580,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 void testRow(final int i) throws IOException {
   Get get = new Get(format(i));
   if (opts.addColumns) {
-get.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  get.addColumn(FAMILY_NAME, qualifier);
+}
   }
   if (opts.filterAll) {
 get.setFilter(new FilterAllFilter());
@@ -1649,7 +1663,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   Scan scan = new Scan();
   scan.setCaching(opts.caching);
   if (opts.addColumns) {
-scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);

[2/7] hbase git commit: HBASE-20517 Fix PerformanceEvaluation 'column' parameter

2018-05-04 Thread apurtell
HBASE-20517 Fix PerformanceEvaluation 'column' parameter


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7df8e5e4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7df8e5e4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7df8e5e4

Branch: refs/heads/branch-2
Commit: 7df8e5e4a77426fa5ff9064e3a16a15b6b7fe0c0
Parents: de71cb5
Author: Andrew Purtell 
Authored: Thu May 3 16:25:17 2018 -0700
Committer: Andrew Purtell 
Committed: Fri May 4 17:24:21 2018 -0700

--
 .../hadoop/hbase/PerformanceEvaluation.java | 46 +++-
 1 file changed, 36 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7df8e5e4/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index 95d363f..a0d5572 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -139,7 +139,6 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   public static final String TABLE_NAME = "TestTable";
   public static final byte[] FAMILY_NAME = Bytes.toBytes("info");
   public static final byte [] COLUMN_ZERO = Bytes.toBytes("" + 0);
-  public static final byte [] QUALIFIER_NAME = COLUMN_ZERO;
   public static final int DEFAULT_VALUE_LENGTH = 1000;
   public static final int ROW_LENGTH = 26;
 
@@ -1341,7 +1340,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   }
   Get get = new Get(getRandomRow(this.rand, opts.totalRows));
   if (opts.addColumns) {
-get.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  get.addColumn(FAMILY_NAME, qualifier);
+}
   } else {
 get.addFamily(FAMILY_NAME);
   }
@@ -1466,7 +1468,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 
.setCacheBlocks(opts.cacheBlocks).setAsyncPrefetch(opts.asyncPrefetch)
 .setReadType(opts.scanReadType);
 if (opts.addColumns) {
-  scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+  for (int column = 0; column < opts.columns; column++) {
+byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+scan.addColumn(FAMILY_NAME, qualifier);
+  }
 } else {
   scan.addFamily(FAMILY_NAME);
 }
@@ -1489,7 +1494,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 void testRow(final int i) throws IOException, InterruptedException {
   Get get = new Get(format(i));
   if (opts.addColumns) {
-get.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  get.addColumn(FAMILY_NAME, qualifier);
+}
   }
   if (opts.filterAll) {
 get.setFilter(new FilterAllFilter());
@@ -1572,7 +1580,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   .setAsyncPrefetch(opts.asyncPrefetch).setReadType(opts.scanReadType);
   FilterList list = new FilterList();
   if (opts.addColumns) {
-scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  scan.addColumn(FAMILY_NAME, qualifier);
+}
   } else {
 scan.addFamily(FAMILY_NAME);
   }
@@ -1612,7 +1623,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 scan.setFilter(new FilterAllFilter());
   }
   if (opts.addColumns) {
-scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  scan.addColumn(FAMILY_NAME, qualifier);
+}
   } else {
 scan.addFamily(FAMILY_NAME);
   }
@@ -1712,7 +1726,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   }
   Get get = new Get(getRandomRow(this.rand, opts.totalRows));
   if (opts.addColumns) {
-get.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int 

[6/7] hbase git commit: HBASE-20517 Fix PerformanceEvaluation 'column' parameter

2018-05-04 Thread apurtell
HBASE-20517 Fix PerformanceEvaluation 'column' parameter


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b6bb5211
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b6bb5211
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b6bb5211

Branch: refs/heads/branch-1.3
Commit: b6bb5211026d3419e6ee24da2ce44d9c92287f84
Parents: 62a3434
Author: Andrew Purtell 
Authored: Thu May 3 16:27:23 2018 -0700
Committer: Andrew Purtell 
Committed: Fri May 4 17:24:45 2018 -0700

--
 .../hadoop/hbase/PerformanceEvaluation.java | 31 +++-
 1 file changed, 24 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b6bb5211/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index b68b61a..faf52d0 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -133,7 +133,6 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   public static final String TABLE_NAME = "TestTable";
   public static final byte[] FAMILY_NAME = Bytes.toBytes("info");
   public static final byte [] COLUMN_ZERO = Bytes.toBytes("" + 0);
-  public static final byte [] QUALIFIER_NAME = COLUMN_ZERO;
   public static final int DEFAULT_VALUE_LENGTH = 1000;
   public static final int ROW_LENGTH = 26;
 
@@ -1189,7 +1188,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   scan.setCaching(opts.caching);
   FilterList list = new FilterList();
   if (opts.addColumns) {
-scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  scan.addColumn(FAMILY_NAME, qualifier);
+}
   } else {
 scan.addFamily(FAMILY_NAME);
   }
@@ -1227,7 +1229,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 scan.setFilter(new FilterAllFilter());
   }
   if (opts.addColumns) {
-scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  scan.addColumn(FAMILY_NAME, qualifier);
+}
   } else {
 scan.addFamily(FAMILY_NAME);
   }
@@ -1327,7 +1332,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   }
   Get get = new Get(getRandomRow(this.rand, opts.totalRows));
   if (opts.addColumns) {
-get.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  get.addColumn(FAMILY_NAME, qualifier);
+}
   } else {
 get.addFamily(FAMILY_NAME);
   }
@@ -1422,7 +1430,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 Scan scan = new Scan(format(opts.startRow));
 scan.setCaching(opts.caching);
 if (opts.addColumns) {
-  scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+  for (int column = 0; column < opts.columns; column++) {
+byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+scan.addColumn(FAMILY_NAME, qualifier);
+  }
 } else {
   scan.addFamily(FAMILY_NAME);
 }
@@ -1555,7 +1566,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 void testRow(final int i) throws IOException {
   Get get = new Get(format(i));
   if (opts.addColumns) {
-get.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  get.addColumn(FAMILY_NAME, qualifier);
+}
   }
   if (opts.filterAll) {
 get.setFilter(new FilterAllFilter());
@@ -1635,7 +1649,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   Scan scan = new Scan();
   scan.setCaching(opts.caching);
   if (opts.addColumns) {
-scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);

[4/7] hbase git commit: HBASE-20517 Fix PerformanceEvaluation 'column' parameter

2018-05-04 Thread apurtell
HBASE-20517 Fix PerformanceEvaluation 'column' parameter


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/fd24083e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/fd24083e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/fd24083e

Branch: refs/heads/branch-1
Commit: fd24083e26a2d49e30009bb2b3b26f0095f99c77
Parents: e793e7c
Author: Andrew Purtell 
Authored: Thu May 3 16:27:23 2018 -0700
Committer: Andrew Purtell 
Committed: Fri May 4 17:24:38 2018 -0700

--
 .../hadoop/hbase/PerformanceEvaluation.java | 31 +++-
 1 file changed, 24 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/fd24083e/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index 85d3613..60d6cb6 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -135,7 +135,6 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   public static final String TABLE_NAME = "TestTable";
   public static final byte[] FAMILY_NAME = Bytes.toBytes("info");
   public static final byte [] COLUMN_ZERO = Bytes.toBytes("" + 0);
-  public static final byte [] QUALIFIER_NAME = COLUMN_ZERO;
   public static final int DEFAULT_VALUE_LENGTH = 1000;
   public static final int ROW_LENGTH = 26;
 
@@ -1207,7 +1206,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   scan.setCaching(opts.caching);
   FilterList list = new FilterList();
   if (opts.addColumns) {
-scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  scan.addColumn(FAMILY_NAME, qualifier);
+}
   } else {
 scan.addFamily(FAMILY_NAME);
   }
@@ -1245,7 +1247,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 scan.setFilter(new FilterAllFilter());
   }
   if (opts.addColumns) {
-scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  scan.addColumn(FAMILY_NAME, qualifier);
+}
   } else {
 scan.addFamily(FAMILY_NAME);
   }
@@ -1345,7 +1350,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   }
   Get get = new Get(getRandomRow(this.rand, opts.totalRows));
   if (opts.addColumns) {
-get.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  get.addColumn(FAMILY_NAME, qualifier);
+}
   } else {
 get.addFamily(FAMILY_NAME);
   }
@@ -1440,7 +1448,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 Scan scan = new Scan(format(opts.startRow));
 scan.setCaching(opts.caching);
 if (opts.addColumns) {
-  scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+  for (int column = 0; column < opts.columns; column++) {
+byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+scan.addColumn(FAMILY_NAME, qualifier);
+  }
 } else {
   scan.addFamily(FAMILY_NAME);
 }
@@ -1573,7 +1584,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 void testRow(final int i) throws IOException {
   Get get = new Get(format(i));
   if (opts.addColumns) {
-get.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  get.addColumn(FAMILY_NAME, qualifier);
+}
   }
   if (opts.filterAll) {
 get.setFilter(new FilterAllFilter());
@@ -1653,7 +1667,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   Scan scan = new Scan();
   scan.setCaching(opts.caching);
   if (opts.addColumns) {
-scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  

[1/7] hbase git commit: HBASE-20517 Fix PerformanceEvaluation 'column' parameter

2018-05-04 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1 e793e7c30 -> fd24083e2
  refs/heads/branch-1.2 1519dc05b -> a275e8631
  refs/heads/branch-1.3 62a3434b9 -> b6bb52110
  refs/heads/branch-1.4 f6dd195d0 -> 489120afb
  refs/heads/branch-2 de71cb591 -> 7df8e5e4a
  refs/heads/branch-2.0 d1c729b5c -> 3c4fadae4
  refs/heads/master 87f5b5f34 -> 78ffd7ace


HBASE-20517 Fix PerformanceEvaluation 'column' parameter


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/78ffd7ac
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/78ffd7ac
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/78ffd7ac

Branch: refs/heads/master
Commit: 78ffd7ace61d983e42c6766fb743831a6ce2fc62
Parents: 87f5b5f
Author: Andrew Purtell 
Authored: Thu May 3 16:25:17 2018 -0700
Committer: Andrew Purtell 
Committed: Fri May 4 17:24:17 2018 -0700

--
 .../hadoop/hbase/PerformanceEvaluation.java | 46 +++-
 1 file changed, 36 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/78ffd7ac/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index 5a63ef4..25a1d3c 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -139,7 +139,6 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   public static final String TABLE_NAME = "TestTable";
   public static final byte[] FAMILY_NAME = Bytes.toBytes("info");
   public static final byte [] COLUMN_ZERO = Bytes.toBytes("" + 0);
-  public static final byte [] QUALIFIER_NAME = COLUMN_ZERO;
   public static final int DEFAULT_VALUE_LENGTH = 1000;
   public static final int ROW_LENGTH = 26;
 
@@ -1341,7 +1340,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   }
   Get get = new Get(getRandomRow(this.rand, opts.totalRows));
   if (opts.addColumns) {
-get.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  get.addColumn(FAMILY_NAME, qualifier);
+}
   } else {
 get.addFamily(FAMILY_NAME);
   }
@@ -1466,7 +1468,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 
.setCacheBlocks(opts.cacheBlocks).setAsyncPrefetch(opts.asyncPrefetch)
 .setReadType(opts.scanReadType);
 if (opts.addColumns) {
-  scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+  for (int column = 0; column < opts.columns; column++) {
+byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+scan.addColumn(FAMILY_NAME, qualifier);
+  }
 } else {
   scan.addFamily(FAMILY_NAME);
 }
@@ -1489,7 +1494,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 void testRow(final int i) throws IOException, InterruptedException {
   Get get = new Get(format(i));
   if (opts.addColumns) {
-get.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  get.addColumn(FAMILY_NAME, qualifier);
+}
   }
   if (opts.filterAll) {
 get.setFilter(new FilterAllFilter());
@@ -1572,7 +1580,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   .setAsyncPrefetch(opts.asyncPrefetch).setReadType(opts.scanReadType);
   FilterList list = new FilterList();
   if (opts.addColumns) {
-scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  scan.addColumn(FAMILY_NAME, qualifier);
+}
   } else {
 scan.addFamily(FAMILY_NAME);
   }
@@ -1612,7 +1623,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 scan.setFilter(new FilterAllFilter());
   }
   if (opts.addColumns) {
-scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  scan.addColumn(FAMILY_NAME, 

[3/7] hbase git commit: HBASE-20517 Fix PerformanceEvaluation 'column' parameter

2018-05-04 Thread apurtell
HBASE-20517 Fix PerformanceEvaluation 'column' parameter


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3c4fadae
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3c4fadae
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3c4fadae

Branch: refs/heads/branch-2.0
Commit: 3c4fadae4257ff5078d2cc247efc938ea0d350ee
Parents: d1c729b
Author: Andrew Purtell 
Authored: Thu May 3 16:25:17 2018 -0700
Committer: Andrew Purtell 
Committed: Fri May 4 17:24:30 2018 -0700

--
 .../hadoop/hbase/PerformanceEvaluation.java | 46 +++-
 1 file changed, 36 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3c4fadae/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index 95d363f..a0d5572 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -139,7 +139,6 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   public static final String TABLE_NAME = "TestTable";
   public static final byte[] FAMILY_NAME = Bytes.toBytes("info");
   public static final byte [] COLUMN_ZERO = Bytes.toBytes("" + 0);
-  public static final byte [] QUALIFIER_NAME = COLUMN_ZERO;
   public static final int DEFAULT_VALUE_LENGTH = 1000;
   public static final int ROW_LENGTH = 26;
 
@@ -1341,7 +1340,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   }
   Get get = new Get(getRandomRow(this.rand, opts.totalRows));
   if (opts.addColumns) {
-get.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  get.addColumn(FAMILY_NAME, qualifier);
+}
   } else {
 get.addFamily(FAMILY_NAME);
   }
@@ -1466,7 +1468,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 
.setCacheBlocks(opts.cacheBlocks).setAsyncPrefetch(opts.asyncPrefetch)
 .setReadType(opts.scanReadType);
 if (opts.addColumns) {
-  scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+  for (int column = 0; column < opts.columns; column++) {
+byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+scan.addColumn(FAMILY_NAME, qualifier);
+  }
 } else {
   scan.addFamily(FAMILY_NAME);
 }
@@ -1489,7 +1494,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 void testRow(final int i) throws IOException, InterruptedException {
   Get get = new Get(format(i));
   if (opts.addColumns) {
-get.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  get.addColumn(FAMILY_NAME, qualifier);
+}
   }
   if (opts.filterAll) {
 get.setFilter(new FilterAllFilter());
@@ -1572,7 +1580,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   .setAsyncPrefetch(opts.asyncPrefetch).setReadType(opts.scanReadType);
   FilterList list = new FilterList();
   if (opts.addColumns) {
-scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  scan.addColumn(FAMILY_NAME, qualifier);
+}
   } else {
 scan.addFamily(FAMILY_NAME);
   }
@@ -1612,7 +1623,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 scan.setFilter(new FilterAllFilter());
   }
   if (opts.addColumns) {
-scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  scan.addColumn(FAMILY_NAME, qualifier);
+}
   } else {
 scan.addFamily(FAMILY_NAME);
   }
@@ -1712,7 +1726,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   }
   Get get = new Get(getRandomRow(this.rand, opts.totalRows));
   if (opts.addColumns) {
-get.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int 

[5/7] hbase git commit: HBASE-20517 Fix PerformanceEvaluation 'column' parameter

2018-05-04 Thread apurtell
HBASE-20517 Fix PerformanceEvaluation 'column' parameter


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/489120af
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/489120af
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/489120af

Branch: refs/heads/branch-1.4
Commit: 489120afb65f4587e06c270cc5fa746b87bd5df0
Parents: f6dd195
Author: Andrew Purtell 
Authored: Thu May 3 16:27:23 2018 -0700
Committer: Andrew Purtell 
Committed: Fri May 4 17:24:41 2018 -0700

--
 .../hadoop/hbase/PerformanceEvaluation.java | 31 +++-
 1 file changed, 24 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/489120af/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index 85d3613..60d6cb6 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -135,7 +135,6 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   public static final String TABLE_NAME = "TestTable";
   public static final byte[] FAMILY_NAME = Bytes.toBytes("info");
   public static final byte [] COLUMN_ZERO = Bytes.toBytes("" + 0);
-  public static final byte [] QUALIFIER_NAME = COLUMN_ZERO;
   public static final int DEFAULT_VALUE_LENGTH = 1000;
   public static final int ROW_LENGTH = 26;
 
@@ -1207,7 +1206,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   scan.setCaching(opts.caching);
   FilterList list = new FilterList();
   if (opts.addColumns) {
-scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  scan.addColumn(FAMILY_NAME, qualifier);
+}
   } else {
 scan.addFamily(FAMILY_NAME);
   }
@@ -1245,7 +1247,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 scan.setFilter(new FilterAllFilter());
   }
   if (opts.addColumns) {
-scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  scan.addColumn(FAMILY_NAME, qualifier);
+}
   } else {
 scan.addFamily(FAMILY_NAME);
   }
@@ -1345,7 +1350,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   }
   Get get = new Get(getRandomRow(this.rand, opts.totalRows));
   if (opts.addColumns) {
-get.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  get.addColumn(FAMILY_NAME, qualifier);
+}
   } else {
 get.addFamily(FAMILY_NAME);
   }
@@ -1440,7 +1448,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 Scan scan = new Scan(format(opts.startRow));
 scan.setCaching(opts.caching);
 if (opts.addColumns) {
-  scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+  for (int column = 0; column < opts.columns; column++) {
+byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+scan.addColumn(FAMILY_NAME, qualifier);
+  }
 } else {
   scan.addFamily(FAMILY_NAME);
 }
@@ -1573,7 +1584,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 void testRow(final int i) throws IOException {
   Get get = new Get(format(i));
   if (opts.addColumns) {
-get.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);
+  get.addColumn(FAMILY_NAME, qualifier);
+}
   }
   if (opts.filterAll) {
 get.setFilter(new FilterAllFilter());
@@ -1653,7 +1667,10 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   Scan scan = new Scan();
   scan.setCaching(opts.caching);
   if (opts.addColumns) {
-scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+for (int column = 0; column < opts.columns; column++) {
+  byte [] qualifier = column == 0? COLUMN_ZERO: Bytes.toBytes("" + 
column);

[48/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
index f6fc79b..a4ab1b7 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
@@ -238,8 +238,8 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader.WALHdrResult
 org.apache.hadoop.hbase.regionserver.wal.RingBufferTruck.Type
+org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader.WALHdrResult
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.html 
b/devapidocs/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.html
index d39649d..bf00494 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.html
@@ -361,7 +361,7 @@ implements Parameters:
 context - replication context
 Throws:
-https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException - error occur 
when initialize the endpoint.
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/org/apache/hadoop/hbase/replication/ReplicationEndpoint.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/ReplicationEndpoint.html 
b/devapidocs/org/apache/hadoop/hbase/replication/ReplicationEndpoint.html
index 8ae5a0c..9c5f017 100644
--- a/devapidocs/org/apache/hadoop/hbase/replication/ReplicationEndpoint.html
+++ b/devapidocs/org/apache/hadoop/hbase/replication/ReplicationEndpoint.html
@@ -288,7 +288,7 @@ extends Parameters:
 context - replication context
 Throws:
-https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException - error occur 
when initialize the endpoint.
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationEndpoint.ReplicateContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationEndpoint.ReplicateContext.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationEndpoint.ReplicateContext.html
index 81f9b08..3d5e841 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationEndpoint.ReplicateContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/class-use/ReplicationEndpoint.ReplicateContext.html
@@ -154,10 +154,16 @@
 
 
 
+private long
+HBaseInterClusterReplicationEndpoint.parallelReplicate(https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionService.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletionServicehttps://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integerpool,
+ ReplicationEndpoint.ReplicateContextreplicateContext,
+ https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWAL.Entrybatches)
+
+
 boolean
 RegionReplicaReplicationEndpoint.replicate(ReplicationEndpoint.ReplicateContextreplicateContext)
 
-
+
 boolean
 HBaseInterClusterReplicationEndpoint.replicate(ReplicationEndpoint.ReplicateContextreplicateContext)
 Do the shipping logic


[42/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html 
b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html
index 2df47c7..05289a3 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":9,"i22":10,"i23":9,"i24":9,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":9,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":9,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":9,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":9,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":9,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109":9,"
 
i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":9,"i23":10,"i24":9,"i25":9,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":9,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":9,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":9,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":9,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":9,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109":10,
 
"i110":10,"i111":10,"i112":9,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -120,7 +120,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.LimitedPrivate(value="Tools")
  @InterfaceStability.Evolving
-public class HBaseFsck
+public class HBaseFsck
 extends org.apache.hadoop.conf.Configured
 implements https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable
 HBaseFsck (hbck) is a tool for checking and repairing 
region consistency and
@@ -303,6 +303,14 @@ implements https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 checkRegionBoundaries
 
 
+private boolean
+cleanReplicationBarrier
+
+
+private TableName
+cleanReplicationBarrierTable
+
+
 (package private) static https://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">ComparatorHBaseFsck.HbckInfo
 cmp
 
@@ -703,59 +711,63 @@ implements https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 
 
 
+void
+cleanReplicationBarrier()
+
+
 private void
 cleanupHbckZnode()
 
-
+
 private void
 clearState()
 Clear the current state of hbck.
 
 
-
+
 void
 close()
 
-
+
 private void
 closeRegion(HBaseFsck.HbckInfohi)
 Attempts to undeploy a region from a region server based in 
information in
  META.
 
 
-
+
 void
 connect()
 To repair region consistency, one must call connect() in 
order to repair
  online state.
 
 
-
+
 protected HFileCorruptionChecker
 createHFileCorruptionChecker(booleansidelineCorruptHFiles)
 
-
+
 private HRegion
 createNewMeta(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringwalFactoryID)
 This borrows code from 

[30/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html
index e1bc325..63e7421 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html
@@ -66,5125 +66,5224 @@
 058import 
java.util.concurrent.TimeoutException;
 059import 
java.util.concurrent.atomic.AtomicBoolean;
 060import 
java.util.concurrent.atomic.AtomicInteger;
-061import org.apache.commons.io.IOUtils;
-062import 
org.apache.commons.lang3.RandomStringUtils;
-063import 
org.apache.commons.lang3.StringUtils;
-064import 
org.apache.hadoop.conf.Configuration;
-065import 
org.apache.hadoop.conf.Configured;
-066import 
org.apache.hadoop.fs.FSDataOutputStream;
-067import org.apache.hadoop.fs.FileStatus;
-068import org.apache.hadoop.fs.FileSystem;
-069import org.apache.hadoop.fs.Path;
-070import 
org.apache.hadoop.fs.permission.FsAction;
-071import 
org.apache.hadoop.fs.permission.FsPermission;
-072import 
org.apache.hadoop.hbase.Abortable;
-073import org.apache.hadoop.hbase.Cell;
-074import 
org.apache.hadoop.hbase.CellUtil;
-075import 
org.apache.hadoop.hbase.ClusterMetrics;
-076import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-077import 
org.apache.hadoop.hbase.HBaseConfiguration;
-078import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-079import 
org.apache.hadoop.hbase.HConstants;
-080import 
org.apache.hadoop.hbase.HRegionInfo;
-081import 
org.apache.hadoop.hbase.HRegionLocation;
-082import 
org.apache.hadoop.hbase.KeyValue;
-083import 
org.apache.hadoop.hbase.MasterNotRunningException;
-084import 
org.apache.hadoop.hbase.MetaTableAccessor;
-085import 
org.apache.hadoop.hbase.RegionLocations;
-086import 
org.apache.hadoop.hbase.ServerName;
-087import 
org.apache.hadoop.hbase.TableName;
-088import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-089import 
org.apache.hadoop.hbase.client.Admin;
-090import 
org.apache.hadoop.hbase.client.ClusterConnection;
-091import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-092import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-093import 
org.apache.hadoop.hbase.client.Connection;
-094import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-095import 
org.apache.hadoop.hbase.client.Delete;
-096import 
org.apache.hadoop.hbase.client.Get;
-097import 
org.apache.hadoop.hbase.client.Put;
-098import 
org.apache.hadoop.hbase.client.RegionInfo;
-099import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-100import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-101import 
org.apache.hadoop.hbase.client.Result;
-102import 
org.apache.hadoop.hbase.client.RowMutations;
-103import 
org.apache.hadoop.hbase.client.Table;
-104import 
org.apache.hadoop.hbase.client.TableDescriptor;
-105import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-106import 
org.apache.hadoop.hbase.client.TableState;
-107import 
org.apache.hadoop.hbase.io.FileLink;
-108import 
org.apache.hadoop.hbase.io.HFileLink;
-109import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-110import 
org.apache.hadoop.hbase.io.hfile.HFile;
-111import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-112import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-113import 
org.apache.hadoop.hbase.master.RegionState;
-114import 
org.apache.hadoop.hbase.regionserver.HRegion;
-115import 
org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-116import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-117import 
org.apache.hadoop.hbase.replication.ReplicationException;
-118import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-119import 
org.apache.hadoop.hbase.security.UserProvider;
-120import 
org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
-121import 
org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
-122import 
org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
-123import 
org.apache.hadoop.hbase.util.hbck.ReplicationChecker;
-124import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler;
-125import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandlerImpl;
-126import org.apache.hadoop.hbase.wal.WAL;
-127import 
org.apache.hadoop.hbase.wal.WALFactory;
-128import 
org.apache.hadoop.hbase.wal.WALSplitter;
-129import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-130import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-131import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-132import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-133import 
org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-134import 
org.apache.hadoop.ipc.RemoteException;
-135import 

[39/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.html
index d5f3932..166de59 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.html
@@ -32,9 +32,9 @@
 024import java.net.UnknownHostException;
 025import java.util.ArrayList;
 026import java.util.Collections;
-027import java.util.HashMap;
-028import java.util.List;
-029import java.util.Map;
+027import java.util.List;
+028import java.util.Map;
+029import java.util.TreeMap;
 030import java.util.concurrent.Callable;
 031import 
java.util.concurrent.CompletionService;
 032import 
java.util.concurrent.ExecutionException;
@@ -45,473 +45,494 @@
 037import java.util.concurrent.TimeUnit;
 038import java.util.regex.Matcher;
 039import java.util.regex.Pattern;
-040import 
org.apache.commons.lang3.StringUtils;
-041import 
org.apache.hadoop.conf.Configuration;
-042import org.apache.hadoop.fs.Path;
-043import 
org.apache.hadoop.hbase.Abortable;
-044import 
org.apache.hadoop.hbase.HBaseConfiguration;
-045import 
org.apache.hadoop.hbase.HConstants;
-046import 
org.apache.hadoop.hbase.TableName;
-047import 
org.apache.hadoop.hbase.TableNotFoundException;
-048import 
org.apache.hadoop.hbase.client.ClusterConnection;
-049import 
org.apache.hadoop.hbase.client.Connection;
-050import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-051import 
org.apache.hadoop.hbase.ipc.RpcServer;
-052import 
org.apache.hadoop.hbase.protobuf.ReplicationProtbufUtil;
-053import 
org.apache.hadoop.hbase.replication.HBaseReplicationEndpoint;
-054import 
org.apache.hadoop.hbase.replication.regionserver.ReplicationSinkManager.SinkPeer;
-055import 
org.apache.hadoop.hbase.util.Bytes;
-056import 
org.apache.hadoop.hbase.util.FSUtils;
-057import 
org.apache.hadoop.hbase.wal.WAL.Entry;
-058import 
org.apache.hadoop.ipc.RemoteException;
-059import 
org.apache.yetus.audience.InterfaceAudience;
-060import org.slf4j.Logger;
-061import org.slf4j.LoggerFactory;
-062
-063import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-064
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;
-066
-067/**
-068 * A {@link 
org.apache.hadoop.hbase.replication.ReplicationEndpoint}
-069 * implementation for replicating to 
another HBase cluster.
-070 * For the slave cluster it selects a 
random number of peers
-071 * using a replication ratio. For 
example, if replication ration = 0.1
-072 * and slave cluster has 100 region 
servers, 10 will be selected.
-073 * p
-074 * A stream is considered down when we 
cannot contact a region server on the
-075 * peer cluster for more than 55 seconds 
by default.
-076 * /p
-077 */
-078@InterfaceAudience.Private
-079public class 
HBaseInterClusterReplicationEndpoint extends HBaseReplicationEndpoint {
-080  private static final Logger LOG =
-081  
LoggerFactory.getLogger(HBaseInterClusterReplicationEndpoint.class);
-082
-083  private static final long 
DEFAULT_MAX_TERMINATION_WAIT_MULTIPLIER = 2;
-084
-085  private ClusterConnection conn;
-086  private Configuration localConf;
-087  private Configuration conf;
-088  // How long should we sleep for each 
retry
-089  private long sleepForRetries;
-090  // Maximum number of retries before 
taking bold actions
-091  private int maxRetriesMultiplier;
-092  // Socket timeouts require even bolder 
actions since we don't want to DDOS
-093  private int socketTimeoutMultiplier;
-094  // Amount of time for shutdown to wait 
for all tasks to complete
-095  private long maxTerminationWait;
-096  // Size limit for replication RPCs, in 
bytes
-097  private int replicationRpcLimit;
-098  //Metrics for this source
-099  private MetricsSource metrics;
-100  // Handles connecting to peer region 
servers
-101  private ReplicationSinkManager 
replicationSinkMgr;
-102  private boolean peersSelected = 
false;
-103  private String replicationClusterId = 
"";
-104  private ThreadPoolExecutor exec;
-105  private int maxThreads;
-106  private Path baseNamespaceDir;
-107  private Path hfileArchiveDir;
-108  private boolean 
replicationBulkLoadDataEnabled;
-109  private Abortable abortable;
-110  private boolean dropOnDeletedTables;
-111
-112  @Override
-113  public void init(Context context) 
throws IOException {
-114super.init(context);
-115this.conf = 
HBaseConfiguration.create(ctx.getConfiguration());
-116this.localConf = 

[11/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.html
 
b/testdevapidocs/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.html
index 0f850d3..82cd795 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.html
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames
@@ -74,7 +74,7 @@ var activeTableTab = "activeTableTab";
 
 
 Summary:
-Nested|
+Nested|
 Field|
 Constr|
 Method
@@ -137,7 +137,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class TestReplicationEndpoint.InterClusterReplicationEndpointForTest
+public static class TestReplicationEndpoint.InterClusterReplicationEndpointForTest
 extends 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint
 
 
@@ -151,28 +151,6 @@ extends 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplic
 
 
 Nested Class Summary
-
-Nested Classes
-
-Modifier and Type
-Class and Description
-
-
-protected class
-TestReplicationEndpoint.InterClusterReplicationEndpointForTest.DummyReplicator
-
-
-protected class
-TestReplicationEndpoint.InterClusterReplicationEndpointForTest.FailingDummyReplicator
-
-
-
-
-
-
-Nested classes/interfaces inherited from 
classorg.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint
-org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint.Replicator
-
 
 
 
@@ -256,7 +234,7 @@ extends 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplic
 Method and Description
 
 
-protected 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint.Replicator
+protected https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Callable.html?is-external=true;
 title="class or interface in java.util.concurrent">Callablehttps://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
 createReplicator(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.wal.WAL.Entryentries,
 intordinal)
 
@@ -270,7 +248,7 @@ extends 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplic
 
 
 Methods inherited from 
classorg.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint
-doStop, init, isPeerEnabled, sleepForRetries
+doStop, init, isPeerEnabled, replicateEntries, 
sleepForRetries
 
 
 
@@ -327,7 +305,7 @@ extends 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplic
 
 
 replicateCount
-statichttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger replicateCount
+statichttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger replicateCount
 
 
 
@@ -336,7 +314,7 @@ extends 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplic
 
 
 failedOnce
-staticboolean failedOnce
+staticboolean failedOnce
 
 
 
@@ -353,7 +331,7 @@ extends 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplic
 
 
 InterClusterReplicationEndpointForTest
-publicInterClusterReplicationEndpointForTest()
+publicInterClusterReplicationEndpointForTest()
 
 
 
@@ -370,7 +348,7 @@ extends 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplic
 
 
 replicate
-publicbooleanreplicate(org.apache.hadoop.hbase.replication.ReplicationEndpoint.ReplicateContextreplicateContext)
+publicbooleanreplicate(org.apache.hadoop.hbase.replication.ReplicationEndpoint.ReplicateContextreplicateContext)
 
 Specified by:
 replicatein 
interfaceorg.apache.hadoop.hbase.replication.ReplicationEndpoint
@@ -385,8 +363,8 @@ extends 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplic
 
 
 createReplicator
-protectedorg.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint.ReplicatorcreateReplicator(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.wal.WAL.Entryentries,
-   

[19/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html
index e1bc325..63e7421 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html
@@ -66,5125 +66,5224 @@
 058import 
java.util.concurrent.TimeoutException;
 059import 
java.util.concurrent.atomic.AtomicBoolean;
 060import 
java.util.concurrent.atomic.AtomicInteger;
-061import org.apache.commons.io.IOUtils;
-062import 
org.apache.commons.lang3.RandomStringUtils;
-063import 
org.apache.commons.lang3.StringUtils;
-064import 
org.apache.hadoop.conf.Configuration;
-065import 
org.apache.hadoop.conf.Configured;
-066import 
org.apache.hadoop.fs.FSDataOutputStream;
-067import org.apache.hadoop.fs.FileStatus;
-068import org.apache.hadoop.fs.FileSystem;
-069import org.apache.hadoop.fs.Path;
-070import 
org.apache.hadoop.fs.permission.FsAction;
-071import 
org.apache.hadoop.fs.permission.FsPermission;
-072import 
org.apache.hadoop.hbase.Abortable;
-073import org.apache.hadoop.hbase.Cell;
-074import 
org.apache.hadoop.hbase.CellUtil;
-075import 
org.apache.hadoop.hbase.ClusterMetrics;
-076import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-077import 
org.apache.hadoop.hbase.HBaseConfiguration;
-078import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-079import 
org.apache.hadoop.hbase.HConstants;
-080import 
org.apache.hadoop.hbase.HRegionInfo;
-081import 
org.apache.hadoop.hbase.HRegionLocation;
-082import 
org.apache.hadoop.hbase.KeyValue;
-083import 
org.apache.hadoop.hbase.MasterNotRunningException;
-084import 
org.apache.hadoop.hbase.MetaTableAccessor;
-085import 
org.apache.hadoop.hbase.RegionLocations;
-086import 
org.apache.hadoop.hbase.ServerName;
-087import 
org.apache.hadoop.hbase.TableName;
-088import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-089import 
org.apache.hadoop.hbase.client.Admin;
-090import 
org.apache.hadoop.hbase.client.ClusterConnection;
-091import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-092import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-093import 
org.apache.hadoop.hbase.client.Connection;
-094import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-095import 
org.apache.hadoop.hbase.client.Delete;
-096import 
org.apache.hadoop.hbase.client.Get;
-097import 
org.apache.hadoop.hbase.client.Put;
-098import 
org.apache.hadoop.hbase.client.RegionInfo;
-099import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-100import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-101import 
org.apache.hadoop.hbase.client.Result;
-102import 
org.apache.hadoop.hbase.client.RowMutations;
-103import 
org.apache.hadoop.hbase.client.Table;
-104import 
org.apache.hadoop.hbase.client.TableDescriptor;
-105import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-106import 
org.apache.hadoop.hbase.client.TableState;
-107import 
org.apache.hadoop.hbase.io.FileLink;
-108import 
org.apache.hadoop.hbase.io.HFileLink;
-109import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-110import 
org.apache.hadoop.hbase.io.hfile.HFile;
-111import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-112import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-113import 
org.apache.hadoop.hbase.master.RegionState;
-114import 
org.apache.hadoop.hbase.regionserver.HRegion;
-115import 
org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-116import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-117import 
org.apache.hadoop.hbase.replication.ReplicationException;
-118import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-119import 
org.apache.hadoop.hbase.security.UserProvider;
-120import 
org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
-121import 
org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
-122import 
org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
-123import 
org.apache.hadoop.hbase.util.hbck.ReplicationChecker;
-124import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler;
-125import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandlerImpl;
-126import org.apache.hadoop.hbase.wal.WAL;
-127import 
org.apache.hadoop.hbase.wal.WALFactory;
-128import 
org.apache.hadoop.hbase.wal.WALSplitter;
-129import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-130import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-131import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-132import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-133import 
org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-134import 
org.apache.hadoop.ipc.RemoteException;
-135import 

[10/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingBlockingInterface.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingBlockingInterface.html
 
b/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingBlockingInterface.html
deleted file mode 100644
index adbfc60..000
--- 
a/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingBlockingInterface.html
+++ /dev/null
@@ -1,757 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-
-TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingBlockingInterface
 (Apache HBase 3.0.0-SNAPSHOT Test API)
-
-
-
-
-
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
-var altColor = "altColor";
-var rowColor = "rowColor";
-var tableTab = "tableTab";
-var activeTableTab = "activeTableTab";
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-PrevClass
-NextClass
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-Summary:
-Nested|
-Field|
-Constr|
-Method
-
-
-Detail:
-Field|
-Constr|
-Method
-
-
-
-
-
-
-
-
-org.apache.hadoop.hbase.replication.regionserver
-Class 
TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingBlockingInterface
-
-
-
-https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
-
-
-org.apache.hadoop.hbase.replication.regionserver.TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingBlockingInterface
-
-
-
-
-
-
-
-All Implemented Interfaces:
-org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface
-
-
-Enclosing class:
-TestReplicator.FailureInjectingReplicationEndpointForTest
-
-
-
-static class TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingBlockingInterface
-extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-implements 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface
-
-
-
-
-
-
-
-
-
-
-
-Field Summary
-
-Fields
-
-Modifier and Type
-Field and Description
-
-
-private 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface
-delegate
-
-
-private boolean
-failNext
-
-
-
-
-
-
-
-
-
-Constructor Summary
-
-Constructors
-
-Constructor and Description
-
-
-FailureInjectingBlockingInterface(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterfacedelegate)
-
-
-
-
-
-
-
-
-
-Method Summary
-
-All MethodsInstance MethodsConcrete Methods
-
-Modifier and Type
-Method and Description
-
-
-org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesResponse
-clearCompactionQueues(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
- 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesRequestrequest)
-
-
-org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearRegionBlockCacheResponse
-clearRegionBlockCache(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
- 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearRegionBlockCacheRequestrequest)
-
-
-org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse
-closeRegion(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
-   
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequestrequest)
-
-
-org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse

[44/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html 
b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
index e1c2cca..785b6d1 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class HBaseFsck.HbckInfo
+public static class HBaseFsck.HbckInfo
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements KeyRange
 Maintain information about a particular region.  It gathers 
information
@@ -305,7 +305,7 @@ implements 
 
 metaEntry
-privateHBaseFsck.MetaEntry metaEntry
+privateHBaseFsck.MetaEntry metaEntry
 
 
 
@@ -314,7 +314,7 @@ implements 
 
 hdfsEntry
-privateHBaseFsck.HdfsEntry hdfsEntry
+privateHBaseFsck.HdfsEntry hdfsEntry
 
 
 
@@ -323,7 +323,7 @@ implements 
 
 deployedEntries
-privatehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHBaseFsck.OnlineEntry deployedEntries
+privatehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHBaseFsck.OnlineEntry deployedEntries
 
 
 
@@ -332,7 +332,7 @@ implements 
 
 deployedOn
-privatehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName deployedOn
+privatehttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName deployedOn
 
 
 
@@ -341,7 +341,7 @@ implements 
 
 skipChecks
-privateboolean skipChecks
+privateboolean skipChecks
 
 
 
@@ -350,7 +350,7 @@ implements 
 
 isMerged
-privateboolean isMerged
+privateboolean isMerged
 
 
 
@@ -359,7 +359,7 @@ implements 
 
 deployedReplicaId
-privateint deployedReplicaId
+privateint deployedReplicaId
 
 
 
@@ -368,7 +368,7 @@ implements 
 
 primaryHRIForDeployedReplica
-privateRegionInfo primaryHRIForDeployedReplica
+privateRegionInfo primaryHRIForDeployedReplica
 
 
 
@@ -385,7 +385,7 @@ implements 
 
 HbckInfo
-HbckInfo(HBaseFsck.MetaEntrymetaEntry)
+HbckInfo(HBaseFsck.MetaEntrymetaEntry)
 
 
 
@@ -402,7 +402,7 @@ implements 
 
 getReplicaId
-publicintgetReplicaId()
+publicintgetReplicaId()
 
 
 
@@ -411,7 +411,7 @@ implements 
 
 addServer
-publicvoidaddServer(RegionInfohri,
+publicvoidaddServer(RegionInfohri,
   ServerNameserver)
 
 
@@ -421,7 +421,7 @@ implements 
 
 toString
-publichttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
+publichttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
 
 Overrides:
 https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--;
 title="class or interface in java.lang">toStringin 
classhttps://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
@@ -434,7 +434,7 @@ implements 
 
 getStartKey
-publicbyte[]getStartKey()
+publicbyte[]getStartKey()
 
 Specified by:
 getStartKeyin
 interfaceKeyRange
@@ -447,7 +447,7 @@ implements 
 
 getEndKey
-publicbyte[]getEndKey()
+publicbyte[]getEndKey()
 
 Specified by:
 getEndKeyin
 interfaceKeyRange
@@ -460,7 +460,7 @@ implements 
 
 getTableName
-publicTableNamegetTableName()
+publicTableNamegetTableName()
 
 
 
@@ -469,7 +469,7 @@ implements 
 
 getRegionNameAsString
-publichttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetRegionNameAsString()
+publichttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetRegionNameAsString()
 
 
 
@@ -478,7 +478,7 @@ implements 
 
 getRegionName
-publicbyte[]getRegionName()
+publicbyte[]getRegionName()
 
 
 
@@ -487,7 +487,7 @@ implements 
 
 getPrimaryHRIForDeployedReplica
-publicRegionInfogetPrimaryHRIForDeployedReplica()
+publicRegionInfogetPrimaryHRIForDeployedReplica()
 
 
 
@@ -496,7 +496,7 @@ implements 
 
 getHdfsRegionDir
-org.apache.hadoop.fs.PathgetHdfsRegionDir()
+org.apache.hadoop.fs.PathgetHdfsRegionDir()
 
 
 
@@ -505,7 +505,7 @@ implements 
 
 containsOnlyHdfsEdits
-booleancontainsOnlyHdfsEdits()
+booleancontainsOnlyHdfsEdits()
 
 
 
@@ -514,7 +514,7 @@ implements 
 
 isHdfsRegioninfoPresent
-booleanisHdfsRegioninfoPresent()
+booleanisHdfsRegioninfoPresent()
 
 
 
@@ -523,7 +523,7 @@ implements 
 
 getModTime
-longgetModTime()

[27/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
index e1bc325..63e7421 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
@@ -66,5125 +66,5224 @@
 058import 
java.util.concurrent.TimeoutException;
 059import 
java.util.concurrent.atomic.AtomicBoolean;
 060import 
java.util.concurrent.atomic.AtomicInteger;
-061import org.apache.commons.io.IOUtils;
-062import 
org.apache.commons.lang3.RandomStringUtils;
-063import 
org.apache.commons.lang3.StringUtils;
-064import 
org.apache.hadoop.conf.Configuration;
-065import 
org.apache.hadoop.conf.Configured;
-066import 
org.apache.hadoop.fs.FSDataOutputStream;
-067import org.apache.hadoop.fs.FileStatus;
-068import org.apache.hadoop.fs.FileSystem;
-069import org.apache.hadoop.fs.Path;
-070import 
org.apache.hadoop.fs.permission.FsAction;
-071import 
org.apache.hadoop.fs.permission.FsPermission;
-072import 
org.apache.hadoop.hbase.Abortable;
-073import org.apache.hadoop.hbase.Cell;
-074import 
org.apache.hadoop.hbase.CellUtil;
-075import 
org.apache.hadoop.hbase.ClusterMetrics;
-076import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-077import 
org.apache.hadoop.hbase.HBaseConfiguration;
-078import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-079import 
org.apache.hadoop.hbase.HConstants;
-080import 
org.apache.hadoop.hbase.HRegionInfo;
-081import 
org.apache.hadoop.hbase.HRegionLocation;
-082import 
org.apache.hadoop.hbase.KeyValue;
-083import 
org.apache.hadoop.hbase.MasterNotRunningException;
-084import 
org.apache.hadoop.hbase.MetaTableAccessor;
-085import 
org.apache.hadoop.hbase.RegionLocations;
-086import 
org.apache.hadoop.hbase.ServerName;
-087import 
org.apache.hadoop.hbase.TableName;
-088import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-089import 
org.apache.hadoop.hbase.client.Admin;
-090import 
org.apache.hadoop.hbase.client.ClusterConnection;
-091import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-092import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-093import 
org.apache.hadoop.hbase.client.Connection;
-094import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-095import 
org.apache.hadoop.hbase.client.Delete;
-096import 
org.apache.hadoop.hbase.client.Get;
-097import 
org.apache.hadoop.hbase.client.Put;
-098import 
org.apache.hadoop.hbase.client.RegionInfo;
-099import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-100import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-101import 
org.apache.hadoop.hbase.client.Result;
-102import 
org.apache.hadoop.hbase.client.RowMutations;
-103import 
org.apache.hadoop.hbase.client.Table;
-104import 
org.apache.hadoop.hbase.client.TableDescriptor;
-105import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-106import 
org.apache.hadoop.hbase.client.TableState;
-107import 
org.apache.hadoop.hbase.io.FileLink;
-108import 
org.apache.hadoop.hbase.io.HFileLink;
-109import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-110import 
org.apache.hadoop.hbase.io.hfile.HFile;
-111import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-112import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-113import 
org.apache.hadoop.hbase.master.RegionState;
-114import 
org.apache.hadoop.hbase.regionserver.HRegion;
-115import 
org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-116import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-117import 
org.apache.hadoop.hbase.replication.ReplicationException;
-118import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-119import 
org.apache.hadoop.hbase.security.UserProvider;
-120import 
org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
-121import 
org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
-122import 
org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
-123import 
org.apache.hadoop.hbase.util.hbck.ReplicationChecker;
-124import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler;
-125import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandlerImpl;
-126import org.apache.hadoop.hbase.wal.WAL;
-127import 
org.apache.hadoop.hbase.wal.WALFactory;
-128import 
org.apache.hadoop.hbase.wal.WALSplitter;
-129import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-130import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-131import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-132import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-133import 
org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-134import 
org.apache.hadoop.ipc.RemoteException;
-135import 
org.apache.hadoop.security.UserGroupInformation;
-136import 

[40/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.Replicator.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.Replicator.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.Replicator.html
deleted file mode 100644
index d5f3932..000
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.Replicator.html
+++ /dev/null
@@ -1,578 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-Source code
-
-
-
-
-001/**
-002 * Licensed to the Apache Software 
Foundation (ASF) under one
-003 * or more contributor license 
agreements.  See the NOTICE file
-004 * distributed with this work for 
additional information
-005 * regarding copyright ownership.  The 
ASF licenses this file
-006 * to you under the Apache License, 
Version 2.0 (the
-007 * "License"); you may not use this file 
except in compliance
-008 * with the License.  You may obtain a 
copy of the License at
-009 *
-010 * 
http://www.apache.org/licenses/LICENSE-2.0
-011 *
-012 * Unless required by applicable law or 
agreed to in writing, software
-013 * distributed under the License is 
distributed on an "AS IS" BASIS,
-014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
-015 * See the License for the specific 
language governing permissions and
-016 * limitations under the License.
-017 */
-018
-019package 
org.apache.hadoop.hbase.replication.regionserver;
-020
-021import java.io.IOException;
-022import java.net.ConnectException;
-023import java.net.SocketTimeoutException;
-024import java.net.UnknownHostException;
-025import java.util.ArrayList;
-026import java.util.Collections;
-027import java.util.HashMap;
-028import java.util.List;
-029import java.util.Map;
-030import java.util.concurrent.Callable;
-031import 
java.util.concurrent.CompletionService;
-032import 
java.util.concurrent.ExecutionException;
-033import 
java.util.concurrent.ExecutorCompletionService;
-034import java.util.concurrent.Future;
-035import 
java.util.concurrent.LinkedBlockingQueue;
-036import 
java.util.concurrent.ThreadPoolExecutor;
-037import java.util.concurrent.TimeUnit;
-038import java.util.regex.Matcher;
-039import java.util.regex.Pattern;
-040import 
org.apache.commons.lang3.StringUtils;
-041import 
org.apache.hadoop.conf.Configuration;
-042import org.apache.hadoop.fs.Path;
-043import 
org.apache.hadoop.hbase.Abortable;
-044import 
org.apache.hadoop.hbase.HBaseConfiguration;
-045import 
org.apache.hadoop.hbase.HConstants;
-046import 
org.apache.hadoop.hbase.TableName;
-047import 
org.apache.hadoop.hbase.TableNotFoundException;
-048import 
org.apache.hadoop.hbase.client.ClusterConnection;
-049import 
org.apache.hadoop.hbase.client.Connection;
-050import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-051import 
org.apache.hadoop.hbase.ipc.RpcServer;
-052import 
org.apache.hadoop.hbase.protobuf.ReplicationProtbufUtil;
-053import 
org.apache.hadoop.hbase.replication.HBaseReplicationEndpoint;
-054import 
org.apache.hadoop.hbase.replication.regionserver.ReplicationSinkManager.SinkPeer;
-055import 
org.apache.hadoop.hbase.util.Bytes;
-056import 
org.apache.hadoop.hbase.util.FSUtils;
-057import 
org.apache.hadoop.hbase.wal.WAL.Entry;
-058import 
org.apache.hadoop.ipc.RemoteException;
-059import 
org.apache.yetus.audience.InterfaceAudience;
-060import org.slf4j.Logger;
-061import org.slf4j.LoggerFactory;
-062
-063import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-064
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;
-066
-067/**
-068 * A {@link 
org.apache.hadoop.hbase.replication.ReplicationEndpoint}
-069 * implementation for replicating to 
another HBase cluster.
-070 * For the slave cluster it selects a 
random number of peers
-071 * using a replication ratio. For 
example, if replication ration = 0.1
-072 * and slave cluster has 100 region 
servers, 10 will be selected.
-073 * p
-074 * A stream is considered down when we 
cannot contact a region server on the
-075 * peer cluster for more than 55 seconds 
by default.
-076 * /p
-077 */
-078@InterfaceAudience.Private
-079public class 
HBaseInterClusterReplicationEndpoint extends HBaseReplicationEndpoint {
-080  private static final Logger LOG =
-081  
LoggerFactory.getLogger(HBaseInterClusterReplicationEndpoint.class);
-082
-083  private static final long 
DEFAULT_MAX_TERMINATION_WAIT_MULTIPLIER = 2;
-084
-085  private ClusterConnection conn;
-086  private Configuration localConf;
-087  private Configuration conf;
-088  // How long should we sleep for each 
retry
-089  private long sleepForRetries;
-090  // Maximum number 

[46/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.NodeFailoverWorker.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.NodeFailoverWorker.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.NodeFailoverWorker.html
index a3f0da3..af0be80 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.NodeFailoverWorker.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.NodeFailoverWorker.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-class ReplicationSourceManager.NodeFailoverWorker
+class ReplicationSourceManager.NodeFailoverWorker
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true;
 title="class or interface in java.lang">Thread
 Class responsible to setup new ReplicationSources to take 
care of the queues from dead region
  servers.
@@ -250,7 +250,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 
 
 deadRS
-private finalServerName deadRS
+private finalServerName deadRS
 
 
 
@@ -259,7 +259,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 
 
 peersSnapshot
-private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,ReplicationPeerImpl peersSnapshot
+private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,ReplicationPeerImpl peersSnapshot
 
 
 
@@ -276,7 +276,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 
 
 NodeFailoverWorker
-publicNodeFailoverWorker(ServerNamedeadRS)
+publicNodeFailoverWorker(ServerNamedeadRS)
 
 
 
@@ -293,7 +293,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 
 
 isOldPeer
-privatebooleanisOldPeer(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId,
+privatebooleanisOldPeer(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId,
   ReplicationPeerImplnewPeerRef)
 
 
@@ -303,7 +303,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 
 
 run
-publicvoidrun()
+publicvoidrun()
 
 Specified by:
 https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true#run--;
 title="class or interface in java.lang">runin 
interfacehttps://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
 title="class or interface in java.lang">Runnable

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.ReplicationQueueOperation.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.ReplicationQueueOperation.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.ReplicationQueueOperation.html
index db0500a..416b1e7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.ReplicationQueueOperation.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.ReplicationQueueOperation.html
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 https://docs.oracle.com/javase/8/docs/api/java/lang/FunctionalInterface.html?is-external=true;
 title="class or interface in java.lang">@FunctionalInterface
-private static interface ReplicationSourceManager.ReplicationQueueOperation
+private static interface ReplicationSourceManager.ReplicationQueueOperation
 
 
 
@@ -154,7 +154,7 @@ private static interface 
 
 exec
-voidexec()
+voidexec()
throws ReplicationException
 
 Throws:

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.html
index 7a257e4..be50be5 100644
--- 

[47/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.html
index 9a3d7b2..c2b5c86 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames
@@ -74,7 +74,7 @@ var activeTableTab = "activeTableTab";
 
 
 Summary:
-Nested|
+Nested|
 Field|
 Constr|
 Method
@@ -129,7 +129,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class HBaseInterClusterReplicationEndpoint
+public class HBaseInterClusterReplicationEndpoint
 extends HBaseReplicationEndpoint
 A ReplicationEndpoint
  implementation for replicating to another HBase cluster.
@@ -152,17 +152,6 @@ extends 
-Nested Classes
-
-Modifier and Type
-Class and Description
-
-
-protected class
-HBaseInterClusterReplicationEndpoint.Replicator
-
-
 
 
 
@@ -231,54 +220,58 @@ extends hfileArchiveDir
 
 
+private boolean
+isSerial
+
+
 private 
org.apache.hadoop.conf.Configuration
 localConf
 
-
+
 private static org.slf4j.Logger
 LOG
 
-
+
 private int
 maxRetriesMultiplier
 
-
+
 private long
 maxTerminationWait
 
-
+
 private int
 maxThreads
 
-
+
 private MetricsSource
 metrics
 
-
+
 private boolean
 peersSelected
 
-
+
 private boolean
 replicationBulkLoadDataEnabled
 
-
+
 private https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 replicationClusterId
 
-
+
 private int
 replicationRpcLimit
 
-
+
 private ReplicationSinkManager
 replicationSinkMgr
 
-
+
 private long
 sleepForRetries
 
-
+
 private int
 socketTimeoutMultiplier
 
@@ -327,51 +320,82 @@ extends 
 private https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWAL.Entry
-createBatches(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWAL.Entryentries)
+createBatches(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWAL.Entryentries)
+Divide the entries into multiple batches, so that we can 
replicate each batch in a thread pool
+ concurrently.
+
 
 
-protected HBaseInterClusterReplicationEndpoint.Replicator
-createReplicator(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWAL.Entryentries,
-intordinal)
+private https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWAL.Entry
+createParallelBatches(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWAL.Entryentries)
 
 
+protected https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Callable.html?is-external=true;
 title="class or interface in java.util.concurrent">Callablehttps://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
+createReplicator(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWAL.Entryentries,
+intbatchIndex)
+
+
+private https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWAL.Entry
+createSerialBatches(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 

[50/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index eab9d19..2e471d7 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -281,10 +281,10 @@
 Warnings
 Errors
 
-3607
+3609
 0
 0
-15867
+15864
 
 Files
 
@@ -7937,7 +7937,7 @@
 org/apache/hadoop/hbase/replication/ReplicationEndpoint.java
 0
 0
-5
+4
 
 org/apache/hadoop/hbase/replication/ReplicationException.java
 0
@@ -8024,2186 +8024,2181 @@
 0
 6
 
-org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java
-0
-0
-1
-
 org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceImpl.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSourceImpl.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/replication/regionserver/MetricsSink.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/replication/regionserver/Replication.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/replication/regionserver/ReplicationLoad.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/replication/regionserver/ReplicationObserver.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java
 0
 0
 15
-
+
 org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceFactory.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceInterface.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/replication/regionserver/ReplicationThrottler.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/replication/regionserver/TestMetricsReplicationSourceFactoryImpl.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/replication/regionserver/TestMetricsReplicationSourceImpl.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpointNoMaster.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/replication/regionserver/TestReplicationSource.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java
 0
 0
 16
-
+
 org/apache/hadoop/hbase/replication/regionserver/TestWALEntryStream.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/replication/regionserver/WALEntrySinkFilter.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/rest/ExistsResource.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
 0
 0
 14
-
+
 org/apache/hadoop/hbase/rest/MetricsREST.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/rest/MetricsRESTSourceImpl.java
 0
 0
 12
-
+
 org/apache/hadoop/hbase/rest/MultiRowResource.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/rest/NamespacesResource.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
 0
 0
 20
-
+
 org/apache/hadoop/hbase/rest/ProtobufMessageHandler.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/rest/RESTServer.java
 0
 0
 16
-
+
 org/apache/hadoop/hbase/rest/RESTServlet.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/rest/RESTServletContainer.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/rest/RegionsResource.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/rest/ResourceBase.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/rest/ResultGenerator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/rest/RootResource.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/rest/RowResource.java
 0
 0
 60
-
+
 org/apache/hadoop/hbase/rest/RowResourceBase.java
 0
 0
 15
-
+
 org/apache/hadoop/hbase/rest/RowSpec.java
 0
 0
 32
-
+
 org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/rest/ScannerResource.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/rest/ScannerResultGenerator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/rest/SchemaResource.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/rest/TableResource.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/rest/TableScanResource.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/rest/TestGetAndPutResource.java
 0
 0
 4
-
+
 

hbase-site git commit: INFRA-10751 Empty commit

2018-05-04 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site de18d4687 -> 64bc8d859


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/64bc8d85
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/64bc8d85
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/64bc8d85

Branch: refs/heads/asf-site
Commit: 64bc8d8595e3ecf19a3b39c69f33ea96d4d03f23
Parents: de18d46
Author: jenkins 
Authored: Fri May 4 14:48:03 2018 +
Committer: jenkins 
Committed: Fri May 4 14:48:03 2018 +

--

--




[41/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
index 59c95fc..bf2f539 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
@@ -532,14 +532,14 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.util.PoolMap.PoolType
-org.apache.hadoop.hbase.util.Order
-org.apache.hadoop.hbase.util.ChecksumType
+org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE
 org.apache.hadoop.hbase.util.PrettyPrinter.Unit
+org.apache.hadoop.hbase.util.ChecksumType
 org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType
-org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE
-org.apache.hadoop.hbase.util.Bytes.LexicographicalComparerHolder.UnsafeComparer
 (implements org.apache.hadoop.hbase.util.Bytes.ComparerT)
+org.apache.hadoop.hbase.util.Order
 org.apache.hadoop.hbase.util.Bytes.LexicographicalComparerHolder.PureJavaComparer
 (implements org.apache.hadoop.hbase.util.Bytes.ComparerT)
+org.apache.hadoop.hbase.util.PoolMap.PoolType
+org.apache.hadoop.hbase.util.Bytes.LexicographicalComparerHolder.UnsafeComparer
 (implements org.apache.hadoop.hbase.util.Bytes.ComparerT)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/org/apache/hadoop/hbase/wal/class-use/WAL.Entry.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/class-use/WAL.Entry.html 
b/devapidocs/org/apache/hadoop/hbase/wal/class-use/WAL.Entry.html
index f862989..296589b 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/class-use/WAL.Entry.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/class-use/WAL.Entry.html
@@ -370,10 +370,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWAL.Entry
-HBaseInterClusterReplicationEndpoint.Replicator.entries
-
-
-private https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWAL.Entry
 WALEntryBatch.walEntries
 
 
@@ -412,7 +408,18 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWAL.Entry
-HBaseInterClusterReplicationEndpoint.createBatches(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWAL.Entryentries)
+HBaseInterClusterReplicationEndpoint.createBatches(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWAL.Entryentries)
+Divide the entries into multiple batches, so that we can 
replicate each batch in a thread pool
+ concurrently.
+
+
+
+private https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWAL.Entry
+HBaseInterClusterReplicationEndpoint.createParallelBatches(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWAL.Entryentries)
+
+
+private https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWAL.Entry
+HBaseInterClusterReplicationEndpoint.createSerialBatches(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWAL.Entryentries)
 
 
 private https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWAL.Entry
@@ -460,16 +467,20 @@ 

[03/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.DummyReplicator.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.DummyReplicator.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.DummyReplicator.html
deleted file mode 100644
index 7a938de..000
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.DummyReplicator.html
+++ /dev/null
@@ -1,632 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-Source code
-
-
-
-
-001/**
-002 * Licensed to the Apache Software 
Foundation (ASF) under one
-003 * or more contributor license 
agreements.  See the NOTICE file
-004 * distributed with this work for 
additional information
-005 * regarding copyright ownership.  The 
ASF licenses this file
-006 * to you under the Apache License, 
Version 2.0 (the
-007 * "License"); you may not use this file 
except in compliance
-008 * with the License.  You may obtain a 
copy of the License at
-009 *
-010 * 
http://www.apache.org/licenses/LICENSE-2.0
-011 *
-012 * Unless required by applicable law or 
agreed to in writing, software
-013 * distributed under the License is 
distributed on an "AS IS" BASIS,
-014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
-015 * See the License for the specific 
language governing permissions and
-016 * limitations under the License.
-017 */
-018package 
org.apache.hadoop.hbase.replication;
-019
-020import static org.mockito.Mockito.mock;
-021import static 
org.mockito.Mockito.verify;
-022import static org.mockito.Mockito.when;
-023
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.List;
-027import java.util.UUID;
-028import 
java.util.concurrent.atomic.AtomicBoolean;
-029import 
java.util.concurrent.atomic.AtomicInteger;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import org.apache.hadoop.hbase.Cell;
-032import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-033import org.apache.hadoop.hbase.Waiter;
-034import 
org.apache.hadoop.hbase.client.Connection;
-035import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-036import 
org.apache.hadoop.hbase.client.Put;
-037import 
org.apache.hadoop.hbase.client.RegionInfo;
-038import 
org.apache.hadoop.hbase.client.Table;
-039import 
org.apache.hadoop.hbase.regionserver.HRegion;
-040import 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint;
-041import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationGlobalSourceSource;
-042import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceImpl;
-043import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSource;
-044import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSourceImpl;
-045import 
org.apache.hadoop.hbase.replication.regionserver.MetricsSource;
-046import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-047import 
org.apache.hadoop.hbase.testclassification.ReplicationTests;
-048import 
org.apache.hadoop.hbase.util.Bytes;
-049import 
org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
-050import 
org.apache.hadoop.hbase.util.Threads;
-051import 
org.apache.hadoop.hbase.wal.WAL.Entry;
-052import 
org.apache.hadoop.hbase.zookeeper.ZKConfig;
-053import 
org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry;
-054import org.junit.AfterClass;
-055import org.junit.Assert;
-056import org.junit.Before;
-057import org.junit.BeforeClass;
-058import org.junit.ClassRule;
-059import org.junit.Test;
-060import 
org.junit.experimental.categories.Category;
-061import org.slf4j.Logger;
-062import org.slf4j.LoggerFactory;
-063
-064/**
-065 * Tests ReplicationSource and 
ReplicationEndpoint interactions
-066 */
-067@Category({ ReplicationTests.class, 
MediumTests.class })
-068public class TestReplicationEndpoint 
extends TestReplicationBase {
-069
-070  @ClassRule
-071  public static final HBaseClassTestRule 
CLASS_RULE =
-072  
HBaseClassTestRule.forClass(TestReplicationEndpoint.class);
-073
-074  private static final Logger LOG = 
LoggerFactory.getLogger(TestReplicationEndpoint.class);
-075
-076  static int numRegionServers;
-077
-078  @BeforeClass
-079  public static void setUpBeforeClass() 
throws Exception {
-080
TestReplicationBase.setUpBeforeClass();
-081numRegionServers = 
utility1.getHBaseCluster().getRegionServerThreads().size();
-082  }
-083
-084  @AfterClass
-085  public static void tearDownAfterClass() 
throws Exception {
-086
TestReplicationBase.tearDownAfterClass();
-087// check stop is called
-088

[35/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.html
index c282308..094b592 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.html
@@ -426,483 +426,481 @@
 418   */
 419  void 
removeRecoveredSource(ReplicationSourceInterface src) {
 420LOG.info("Done with the recovered 
queue " + src.getQueueId());
-421src.getSourceMetrics().clear();
-422this.oldsources.remove(src);
-423// Delete queue from storage and 
memory
-424deleteQueue(src.getQueueId());
-425
this.walsByIdRecoveredQueues.remove(src.getQueueId());
-426  }
-427
-428  /**
-429   * Clear the metrics and related 
replication queue of the specified old source
-430   * @param src source to clear
-431   */
-432  void 
removeSource(ReplicationSourceInterface src) {
-433LOG.info("Done with the queue " + 
src.getQueueId());
-434src.getSourceMetrics().clear();
-435
this.sources.remove(src.getPeerId());
-436// Delete queue from storage and 
memory
-437deleteQueue(src.getQueueId());
-438
this.walsById.remove(src.getQueueId());
-439  }
-440
-441  /**
-442   * Delete a complete queue of wals 
associated with a replication source
-443   * @param queueId the id of replication 
queue to delete
-444   */
-445  private void deleteQueue(String 
queueId) {
-446abortWhenFail(() - 
this.queueStorage.removeQueue(server.getServerName(), queueId));
-447  }
-448
-449  @FunctionalInterface
-450  private interface 
ReplicationQueueOperation {
-451void exec() throws 
ReplicationException;
-452  }
-453
-454  private void 
abortWhenFail(ReplicationQueueOperation op) {
-455try {
-456  op.exec();
-457} catch (ReplicationException e) {
-458  server.abort("Failed to operate on 
replication queue", e);
-459}
-460  }
-461
-462  private void 
throwIOExceptionWhenFail(ReplicationQueueOperation op) throws IOException {
-463try {
-464  op.exec();
-465} catch (ReplicationException e) {
-466  throw new IOException(e);
-467}
-468  }
-469
-470  private void 
abortAndThrowIOExceptionWhenFail(ReplicationQueueOperation op) throws 
IOException {
-471try {
-472  op.exec();
-473} catch (ReplicationException e) {
-474  server.abort("Failed to operate on 
replication queue", e);
-475  throw new IOException(e);
-476}
-477  }
-478
-479  /**
-480   * This method will log the current 
position to storage. And also clean old logs from the
-481   * replication queue.
-482   * @param queueId id of the replication 
queue
-483   * @param queueRecovered indicates if 
this queue comes from another region server
-484   * @param entryBatch the wal entry 
batch we just shipped
-485   */
-486  public void 
logPositionAndCleanOldLogs(String queueId, boolean queueRecovered,
-487  WALEntryBatch entryBatch) {
-488String fileName = 
entryBatch.getLastWalPath().getName();
-489abortWhenFail(() - 
this.queueStorage.setWALPosition(server.getServerName(), queueId, fileName,
-490  entryBatch.getLastWalPosition(), 
entryBatch.getLastSeqIds()));
-491cleanOldLogs(fileName, 
entryBatch.isEndOfFile(), queueId, queueRecovered);
-492  }
-493
-494  /**
-495   * Cleans a log file and all older logs 
from replication queue. Called when we are sure that a log
-496   * file is closed and has no more 
entries.
-497   * @param log Path to the log
-498   * @param inclusive whether we should 
also remove the given log file
-499   * @param queueId id of the replication 
queue
-500   * @param queueRecovered Whether this 
is a recovered queue
-501   */
-502  @VisibleForTesting
-503  void cleanOldLogs(String log, boolean 
inclusive, String queueId, boolean queueRecovered) {
-504String logPrefix = 
AbstractFSWALProvider.getWALPrefixFromWALName(log);
-505if (queueRecovered) {
-506  NavigableSetString wals = 
walsByIdRecoveredQueues.get(queueId).get(logPrefix);
-507  if (wals != null) {
-508cleanOldLogs(wals, log, 
inclusive, queueId);
-509  }
-510} else {
-511  // synchronized on walsById to 
avoid race with preLogRoll
-512  synchronized (this.walsById) {
-513NavigableSetString wals = 
walsById.get(queueId).get(logPrefix);
-514if (wals != null) {
-515  cleanOldLogs(wals, log, 
inclusive, queueId);
-516}
-517  }
-518}
-519  }
-520
-521  private void 
cleanOldLogs(NavigableSetString wals, String key, boolean inclusive, 
String id) {
-522NavigableSetString walSet = 

[20/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
index e1bc325..63e7421 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
@@ -66,5125 +66,5224 @@
 058import 
java.util.concurrent.TimeoutException;
 059import 
java.util.concurrent.atomic.AtomicBoolean;
 060import 
java.util.concurrent.atomic.AtomicInteger;
-061import org.apache.commons.io.IOUtils;
-062import 
org.apache.commons.lang3.RandomStringUtils;
-063import 
org.apache.commons.lang3.StringUtils;
-064import 
org.apache.hadoop.conf.Configuration;
-065import 
org.apache.hadoop.conf.Configured;
-066import 
org.apache.hadoop.fs.FSDataOutputStream;
-067import org.apache.hadoop.fs.FileStatus;
-068import org.apache.hadoop.fs.FileSystem;
-069import org.apache.hadoop.fs.Path;
-070import 
org.apache.hadoop.fs.permission.FsAction;
-071import 
org.apache.hadoop.fs.permission.FsPermission;
-072import 
org.apache.hadoop.hbase.Abortable;
-073import org.apache.hadoop.hbase.Cell;
-074import 
org.apache.hadoop.hbase.CellUtil;
-075import 
org.apache.hadoop.hbase.ClusterMetrics;
-076import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-077import 
org.apache.hadoop.hbase.HBaseConfiguration;
-078import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-079import 
org.apache.hadoop.hbase.HConstants;
-080import 
org.apache.hadoop.hbase.HRegionInfo;
-081import 
org.apache.hadoop.hbase.HRegionLocation;
-082import 
org.apache.hadoop.hbase.KeyValue;
-083import 
org.apache.hadoop.hbase.MasterNotRunningException;
-084import 
org.apache.hadoop.hbase.MetaTableAccessor;
-085import 
org.apache.hadoop.hbase.RegionLocations;
-086import 
org.apache.hadoop.hbase.ServerName;
-087import 
org.apache.hadoop.hbase.TableName;
-088import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-089import 
org.apache.hadoop.hbase.client.Admin;
-090import 
org.apache.hadoop.hbase.client.ClusterConnection;
-091import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-092import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-093import 
org.apache.hadoop.hbase.client.Connection;
-094import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-095import 
org.apache.hadoop.hbase.client.Delete;
-096import 
org.apache.hadoop.hbase.client.Get;
-097import 
org.apache.hadoop.hbase.client.Put;
-098import 
org.apache.hadoop.hbase.client.RegionInfo;
-099import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-100import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-101import 
org.apache.hadoop.hbase.client.Result;
-102import 
org.apache.hadoop.hbase.client.RowMutations;
-103import 
org.apache.hadoop.hbase.client.Table;
-104import 
org.apache.hadoop.hbase.client.TableDescriptor;
-105import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-106import 
org.apache.hadoop.hbase.client.TableState;
-107import 
org.apache.hadoop.hbase.io.FileLink;
-108import 
org.apache.hadoop.hbase.io.HFileLink;
-109import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-110import 
org.apache.hadoop.hbase.io.hfile.HFile;
-111import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-112import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-113import 
org.apache.hadoop.hbase.master.RegionState;
-114import 
org.apache.hadoop.hbase.regionserver.HRegion;
-115import 
org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-116import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-117import 
org.apache.hadoop.hbase.replication.ReplicationException;
-118import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-119import 
org.apache.hadoop.hbase.security.UserProvider;
-120import 
org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
-121import 
org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
-122import 
org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
-123import 
org.apache.hadoop.hbase.util.hbck.ReplicationChecker;
-124import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler;
-125import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandlerImpl;
-126import org.apache.hadoop.hbase.wal.WAL;
-127import 
org.apache.hadoop.hbase.wal.WALFactory;
-128import 
org.apache.hadoop.hbase.wal.WALSplitter;
-129import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-130import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-131import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-132import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-133import 
org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-134import 
org.apache.hadoop.ipc.RemoteException;
-135import 
org.apache.hadoop.security.UserGroupInformation;
-136import 

[24/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html
index e1bc325..63e7421 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html
@@ -66,5125 +66,5224 @@
 058import 
java.util.concurrent.TimeoutException;
 059import 
java.util.concurrent.atomic.AtomicBoolean;
 060import 
java.util.concurrent.atomic.AtomicInteger;
-061import org.apache.commons.io.IOUtils;
-062import 
org.apache.commons.lang3.RandomStringUtils;
-063import 
org.apache.commons.lang3.StringUtils;
-064import 
org.apache.hadoop.conf.Configuration;
-065import 
org.apache.hadoop.conf.Configured;
-066import 
org.apache.hadoop.fs.FSDataOutputStream;
-067import org.apache.hadoop.fs.FileStatus;
-068import org.apache.hadoop.fs.FileSystem;
-069import org.apache.hadoop.fs.Path;
-070import 
org.apache.hadoop.fs.permission.FsAction;
-071import 
org.apache.hadoop.fs.permission.FsPermission;
-072import 
org.apache.hadoop.hbase.Abortable;
-073import org.apache.hadoop.hbase.Cell;
-074import 
org.apache.hadoop.hbase.CellUtil;
-075import 
org.apache.hadoop.hbase.ClusterMetrics;
-076import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-077import 
org.apache.hadoop.hbase.HBaseConfiguration;
-078import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-079import 
org.apache.hadoop.hbase.HConstants;
-080import 
org.apache.hadoop.hbase.HRegionInfo;
-081import 
org.apache.hadoop.hbase.HRegionLocation;
-082import 
org.apache.hadoop.hbase.KeyValue;
-083import 
org.apache.hadoop.hbase.MasterNotRunningException;
-084import 
org.apache.hadoop.hbase.MetaTableAccessor;
-085import 
org.apache.hadoop.hbase.RegionLocations;
-086import 
org.apache.hadoop.hbase.ServerName;
-087import 
org.apache.hadoop.hbase.TableName;
-088import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-089import 
org.apache.hadoop.hbase.client.Admin;
-090import 
org.apache.hadoop.hbase.client.ClusterConnection;
-091import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-092import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-093import 
org.apache.hadoop.hbase.client.Connection;
-094import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-095import 
org.apache.hadoop.hbase.client.Delete;
-096import 
org.apache.hadoop.hbase.client.Get;
-097import 
org.apache.hadoop.hbase.client.Put;
-098import 
org.apache.hadoop.hbase.client.RegionInfo;
-099import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-100import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-101import 
org.apache.hadoop.hbase.client.Result;
-102import 
org.apache.hadoop.hbase.client.RowMutations;
-103import 
org.apache.hadoop.hbase.client.Table;
-104import 
org.apache.hadoop.hbase.client.TableDescriptor;
-105import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-106import 
org.apache.hadoop.hbase.client.TableState;
-107import 
org.apache.hadoop.hbase.io.FileLink;
-108import 
org.apache.hadoop.hbase.io.HFileLink;
-109import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-110import 
org.apache.hadoop.hbase.io.hfile.HFile;
-111import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-112import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-113import 
org.apache.hadoop.hbase.master.RegionState;
-114import 
org.apache.hadoop.hbase.regionserver.HRegion;
-115import 
org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-116import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-117import 
org.apache.hadoop.hbase.replication.ReplicationException;
-118import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-119import 
org.apache.hadoop.hbase.security.UserProvider;
-120import 
org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
-121import 
org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
-122import 
org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
-123import 
org.apache.hadoop.hbase.util.hbck.ReplicationChecker;
-124import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler;
-125import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandlerImpl;
-126import org.apache.hadoop.hbase.wal.WAL;
-127import 
org.apache.hadoop.hbase.wal.WALFactory;
-128import 
org.apache.hadoop.hbase.wal.WALSplitter;
-129import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-130import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-131import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-132import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-133import 
org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-134import 

[51/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/de18d468
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/de18d468
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/de18d468

Branch: refs/heads/asf-site
Commit: de18d46872487a6f1db9134c2b9dd348767134c3
Parents: a1e6bc2
Author: jenkins 
Authored: Fri May 4 14:47:40 2018 +
Committer: jenkins 
Committed: Fri May 4 14:47:40 2018 +

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 4 +-
 book.html   | 2 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 28332 -
 checkstyle.rss  |38 +-
 coc.html| 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html | 4 +-
 dependency-info.html| 4 +-
 dependency-management.html  | 4 +-
 devapidocs/constant-values.html | 6 +-
 devapidocs/index-all.html   |39 +-
 .../hadoop/hbase/backup/package-tree.html   | 2 +-
 .../hadoop/hbase/class-use/TableName.html   | 4 +
 .../hadoop/hbase/client/package-tree.html   |26 +-
 .../hadoop/hbase/executor/package-tree.html | 2 +-
 .../hadoop/hbase/filter/package-tree.html   | 8 +-
 .../hadoop/hbase/io/hfile/package-tree.html | 6 +-
 .../hadoop/hbase/mapreduce/package-tree.html| 4 +-
 .../hbase/master/balancer/package-tree.html | 2 +-
 .../hadoop/hbase/master/package-tree.html   | 4 +-
 .../hadoop/hbase/monitoring/package-tree.html   | 2 +-
 .../org/apache/hadoop/hbase/package-tree.html   |16 +-
 .../hadoop/hbase/procedure2/package-tree.html   | 6 +-
 .../hadoop/hbase/quotas/package-tree.html   | 8 +-
 .../hadoop/hbase/regionserver/package-tree.html |14 +-
 .../hbase/regionserver/wal/package-tree.html| 2 +-
 .../replication/BaseReplicationEndpoint.html| 2 +-
 .../hbase/replication/ReplicationEndpoint.html  | 2 +-
 .../ReplicationEndpoint.ReplicateContext.html   | 8 +-
 ...erClusterReplicationEndpoint.Replicator.html |   365 -
 .../HBaseInterClusterReplicationEndpoint.html   |   267 +-
 .../regionserver/HFileReplicator.html   | 4 +-
 .../RecoveredReplicationSource.html | 6 +-
 .../RegionReplicaReplicationEndpoint.html   | 2 +-
 .../ReplicationSource.LogsComparator.html   | 8 +-
 .../regionserver/ReplicationSource.html |22 +-
 ...icationSourceManager.NodeFailoverWorker.html |12 +-
 ...SourceManager.ReplicationQueueOperation.html | 4 +-
 .../regionserver/ReplicationSourceManager.html  |58 +-
 ...erClusterReplicationEndpoint.Replicator.html |   166 -
 .../regionserver/package-summary.html   |74 +-
 .../replication/regionserver/package-tree.html  | 1 -
 .../replication/regionserver/package-use.html   |63 +-
 .../hadoop/hbase/rest/model/package-tree.html   | 2 +-
 .../hbase/security/access/package-tree.html | 2 +-
 .../hadoop/hbase/security/package-tree.html | 2 +-
 .../VisibilityReplicationEndpoint.html  | 2 +-
 .../hadoop/hbase/thrift/package-tree.html   | 2 +-
 ...BaseFsck.CheckRegionConsistencyWorkItem.html |10 +-
 .../HBaseFsck.ErrorReporter.ERROR_CODE.html |   146 +-
 .../hbase/util/HBaseFsck.ErrorReporter.html |30 +-
 .../hbase/util/HBaseFsck.FileLockCallable.html  |10 +-
 .../hbase/util/HBaseFsck.HBaseFsckTool.html | 6 +-
 .../hadoop/hbase/util/HBaseFsck.HbckInfo.html   |56 +-
 .../hadoop/hbase/util/HBaseFsck.HdfsEntry.html  |14 +-
 .../hadoop/hbase/util/HBaseFsck.MetaEntry.html  |18 +-
 .../hbase/util/HBaseFsck.OnlineEntry.html   |10 +-
 .../util/HBaseFsck.PrintingErrorReporter.html   |42 +-
 .../HBaseFsck.RegionBoundariesInformation.html  |16 +-
 .../util/HBaseFsck.RegionRepairException.html   | 8 +-
 .../HBaseFsck.TableInfo.HDFSIntegrityFixer.html |22 +-
 ...aseFsck.TableInfo.IntegrityFixSuggester.html |20 +-
 .../hadoop/hbase/util/HBaseFsck.TableInfo.html  |38 +-
 .../hbase/util/HBaseFsck.WorkItemHdfsDir.html   |12 +-
 .../util/HBaseFsck.WorkItemHdfsRegionInfo.html  |12 +-
 .../util/HBaseFsck.WorkItemOverlapMerge.html|10 +-
 .../hbase/util/HBaseFsck.WorkItemRegion.html|16 +-
 .../org/apache/hadoop/hbase/util/HBaseFsck.html |   744 +-
 .../apache/hadoop/hbase/util/package-tree.html  |10 +-
 .../hadoop/hbase/wal/class-use/WAL.Entry.html   |72 +-
 

[43/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
 
b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
index 4836401..5929a03 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
@@ -126,7 +126,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private class HBaseFsck.TableInfo.IntegrityFixSuggester
+private class HBaseFsck.TableInfo.IntegrityFixSuggester
 extends TableIntegrityErrorHandlerImpl
 
 
@@ -267,7 +267,7 @@ extends 
 
 errors
-HBaseFsck.ErrorReporter 
errors
+HBaseFsck.ErrorReporter 
errors
 
 
 
@@ -284,7 +284,7 @@ extends 
 
 IntegrityFixSuggester
-IntegrityFixSuggester(HBaseFsck.TableInfoti,
+IntegrityFixSuggester(HBaseFsck.TableInfoti,
   HBaseFsck.ErrorReportererrors)
 
 
@@ -302,7 +302,7 @@ extends 
 
 handleRegionStartKeyNotEmpty
-publicvoidhandleRegionStartKeyNotEmpty(HBaseFsck.HbckInfohi)
+publicvoidhandleRegionStartKeyNotEmpty(HBaseFsck.HbckInfohi)
   throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
class:TableIntegrityErrorHandlerImpl
 Callback for handling case where a Table has a first region 
that does not
@@ -327,7 +327,7 @@ extends 
 
 handleRegionEndKeyNotEmpty
-publicvoidhandleRegionEndKeyNotEmpty(byte[]curEndKey)
+publicvoidhandleRegionEndKeyNotEmpty(byte[]curEndKey)
 throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
class:TableIntegrityErrorHandlerImpl
 Callback for handling case where a Table has a last region 
that does not
@@ -351,7 +351,7 @@ extends 
 
 handleDegenerateRegion
-publicvoidhandleDegenerateRegion(HBaseFsck.HbckInfohi)
+publicvoidhandleDegenerateRegion(HBaseFsck.HbckInfohi)
 throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
class:TableIntegrityErrorHandlerImpl
 Callback for handling a region that has the same start and 
end key.
@@ -373,7 +373,7 @@ extends 
 
 handleDuplicateStartKeys
-publicvoidhandleDuplicateStartKeys(HBaseFsck.HbckInfor1,
+publicvoidhandleDuplicateStartKeys(HBaseFsck.HbckInfor1,
  HBaseFsck.HbckInfor2)
   throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
class:TableIntegrityErrorHandlerImpl
@@ -398,7 +398,7 @@ extends 
 
 handleSplit
-publicvoidhandleSplit(HBaseFsck.HbckInfor1,
+publicvoidhandleSplit(HBaseFsck.HbckInfor1,
 HBaseFsck.HbckInfor2)
  throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:TableIntegrityErrorHandler
@@ -419,7 +419,7 @@ extends 
 
 handleOverlapInRegionChain
-publicvoidhandleOverlapInRegionChain(HBaseFsck.HbckInfohi1,
+publicvoidhandleOverlapInRegionChain(HBaseFsck.HbckInfohi1,
HBaseFsck.HbckInfohi2)
 throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
class:TableIntegrityErrorHandlerImpl
@@ -446,7 +446,7 @@ extends 
 
 handleHoleInRegionChain
-publicvoidhandleHoleInRegionChain(byte[]holeStart,
+publicvoidhandleHoleInRegionChain(byte[]holeStart,
 byte[]holeStop)
  throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
class:TableIntegrityErrorHandlerImpl

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html 
b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
index 67a9310..52a06c6 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
@@ -113,7 +113,7 @@ var activeTableTab = 

[09/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.FailureInjectingReplicationEndpointForTest.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.FailureInjectingReplicationEndpointForTest.html
 
b/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.FailureInjectingReplicationEndpointForTest.html
index 386285f..ae1e807 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.FailureInjectingReplicationEndpointForTest.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.FailureInjectingReplicationEndpointForTest.html
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames
@@ -74,14 +74,14 @@ var activeTableTab = "activeTableTab";
 
 
 Summary:
-Nested|
-Field|
+Nested|
+Field|
 Constr|
 Method
 
 
 Detail:
-Field|
+Field|
 Constr|
 Method
 
@@ -142,7 +142,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class TestReplicator.FailureInjectingReplicationEndpointForTest
+public static class TestReplicator.FailureInjectingReplicationEndpointForTest
 extends TestReplicator.ReplicationEndpointForTest
 
 
@@ -156,35 +156,6 @@ extends 
-Nested Classes
-
-Modifier and Type
-Class and Description
-
-
-(package private) static class
-TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingBlockingInterface
-
-
-class
-TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingReplicatorForTest
-
-
-
-
-
-
-Nested classes/interfaces inherited from 
classorg.apache.hadoop.hbase.replication.regionserver.TestReplicator.ReplicationEndpointForTest
-TestReplicator.ReplicationEndpointForTest.ReplicatorForTest
-
-
-
-
-
-Nested classes/interfaces inherited from 
classorg.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint
-org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint.Replicator
-
 
 
 
@@ -214,6 +185,24 @@ extends 
+Fields
+
+Modifier and Type
+Field and Description
+
+
+private https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicBoolean
+failNext
+
+
+
+
+
+
+Fields inherited from 
classorg.apache.hadoop.hbase.replication.regionserver.TestReplicator.ReplicationEndpointForTest
+batchCount,
 entriesCount
+
 
 
 
@@ -253,7 +242,7 @@ extends Method and Description
 
 
-protected 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint.Replicator
+protected https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Callable.html?is-external=true;
 title="class or interface in java.util.concurrent">Callablehttps://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
 createReplicator(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.wal.WAL.Entryentries,
 intordinal)
 
@@ -270,7 +259,7 @@ extends 
 
@@ -315,6 +304,23 @@ extends 
 
 
+
+
+
+
+
+Field Detail
+
+
+
+
+
+failNext
+private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicBoolean failNext
+
+
+
+
 
 
 
@@ -327,7 +333,7 @@ extends 
 
 FailureInjectingReplicationEndpointForTest
-publicFailureInjectingReplicationEndpointForTest()
+publicFailureInjectingReplicationEndpointForTest()
 
 
 
@@ -344,8 +350,8 @@ extends 
 
 createReplicator
-protectedorg.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint.ReplicatorcreateReplicator(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.wal.WAL.Entryentries,
-   
 intordinal)
+protectedhttps://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Callable.html?is-external=true;
 title="class or interface in java.util.concurrent">Callablehttps://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">IntegercreateReplicator(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.wal.WAL.Entryentries,
+ intordinal)
 
 Overrides:
 createReplicatorin
 classTestReplicator.ReplicationEndpointForTest
@@ -381,7 +387,7 @@ extends 
 
 PrevClass
-NextClass
+NextClass
 
 
 

[07/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/package-frame.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/package-frame.html
 
b/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/package-frame.html
index 7303f51..c604218 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/package-frame.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/package-frame.html
@@ -32,9 +32,10 @@
 TestReplicationThrottler
 TestReplicator
 TestReplicator.FailureInjectingReplicationEndpointForTest
-TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingBlockingInterface
 TestReplicator.ReplicationEndpointForTest
 TestSerialReplicationChecker
+TestSerialReplicationEndpoint
+TestSerialReplicationEndpoint.TestEndpoint
 TestSourceFSConfigurationProvider
 TestWALEntrySinkFilter
 TestWALEntrySinkFilter.DevNullConnection

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/package-summary.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/package-summary.html
 
b/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/package-summary.html
index 9b27b42..fd9dcc8 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/package-summary.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/package-summary.html
@@ -173,40 +173,44 @@
 
 
 
-TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingBlockingInterface
+TestReplicator.ReplicationEndpointForTest
 
 
-
-TestReplicator.ReplicationEndpointForTest
+
+TestSerialReplicationChecker
 
 
 
-TestSerialReplicationChecker
+TestSerialReplicationEndpoint
 
 
 
-TestSourceFSConfigurationProvider
+TestSerialReplicationEndpoint.TestEndpoint
 
 
 
+TestSourceFSConfigurationProvider
+
+
+
 TestWALEntrySinkFilter
 
 Simple test of sink-side wal entry filter facility.
 
 
-
+
 TestWALEntrySinkFilter.DevNullConnection
 
 A DevNull Connection whose only purpose is checking what 
edits made it through.
 
 
-
+
 TestWALEntrySinkFilter.IfTimeIsGreaterThanBOUNDARYWALEntrySinkFilterImpl
 
 Simple filter that will filter out any entry wholse 
writeTime is <= 5.
 
 
-
+
 TestWALEntryStream
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html
 
b/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html
index f7c6455..b47d481 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html
@@ -94,6 +94,7 @@
 org.apache.hadoop.hbase.replication.regionserver.TestReplicator.FailureInjectingReplicationEndpointForTest
 
 
+org.apache.hadoop.hbase.replication.regionserver.TestSerialReplicationEndpoint.TestEndpoint
 
 
 
@@ -138,13 +139,8 @@
 
 org.apache.hadoop.hbase.replication.regionserver.TestReplicationSourceManager.DummyServer 
(implements org.apache.hadoop.hbase.Server)
 org.apache.hadoop.hbase.replication.regionserver.TestReplicationThrottler
-org.apache.hadoop.hbase.replication.regionserver.TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingBlockingInterface
 (implements 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface)
-org.apache.hadoop.hbase.replication.regionserver.TestReplicator.ReplicationEndpointForTest.ReplicatorForTest
-
-org.apache.hadoop.hbase.replication.regionserver.TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingReplicatorForTest
-
-
 org.apache.hadoop.hbase.replication.regionserver.TestSerialReplicationChecker
+org.apache.hadoop.hbase.replication.regionserver.TestSerialReplicationEndpoint
 org.apache.hadoop.hbase.replication.regionserver.TestSourceFSConfigurationProvider (implements 
org.apache.hadoop.hbase.replication.regionserver.SourceFSConfigurationProvider)
 org.apache.hadoop.hbase.replication.regionserver.TestWALEntrySinkFilter
 org.apache.hadoop.hbase.replication.regionserver.TestWALEntrySinkFilter.DevNullConnection 
(implements org.apache.hadoop.hbase.client.Connection)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/package-use.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/package-use.html
 

[49/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 151c494..e6f25dd 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
 en-us
 2007 - 2018 The Apache Software Foundation
 
-  File: 3607,
- Errors: 15867,
+  File: 3609,
+ Errors: 15864,
  Warnings: 0,
  Infos: 0
   
@@ -8250,6 +8250,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.util.TestHBaseFsckCleanReplicationBarriers.java;>org/apache/hadoop/hbase/util/TestHBaseFsckCleanReplicationBarriers.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.handler.WALSplitterHandler.java;>org/apache/hadoop/hbase/regionserver/handler/WALSplitterHandler.java
 
 
@@ -9631,7 +9645,7 @@ under the License.
   0
 
 
-  5
+  4
 
   
   
@@ -23160,6 +23174,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.replication.regionserver.TestSerialReplicationEndpoint.java;>org/apache/hadoop/hbase/replication/regionserver/TestSerialReplicationEndpoint.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.net.TestAddress.java;>org/apache/hadoop/hbase/net/TestAddress.java
 
 
@@ -43035,7 +43063,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -44743,7 +44771,7 @@ under the License.
   0
 
 
-  4
+  3
 
   
   

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/coc.html
--
diff --git a/coc.html b/coc.html
index 741545b..d4ae2af 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Code of Conduct Policy
@@ -375,7 +375,7 @@ email to mailto:priv...@hbase.apache.org;>the priv
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-05-03
+  Last Published: 
2018-05-04
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/dependencies.html
--
diff --git a/dependencies.html b/dependencies.html
index 2d6b9c2..78ca6ef 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Dependencies
 
@@ -440,7 +440,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-05-03
+  Last Published: 
2018-05-04
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/dependency-convergence.html
--
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 86fe4cf..05ba8a3 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Reactor Dependency Convergence
 
@@ -1105,7 +1105,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-05-03
+  Last Published: 
2018-05-04
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/dependency-info.html
--
diff --git a/dependency-info.html b/dependency-info.html

[15/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.html
index e1bc325..63e7421 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.html
@@ -66,5125 +66,5224 @@
 058import 
java.util.concurrent.TimeoutException;
 059import 
java.util.concurrent.atomic.AtomicBoolean;
 060import 
java.util.concurrent.atomic.AtomicInteger;
-061import org.apache.commons.io.IOUtils;
-062import 
org.apache.commons.lang3.RandomStringUtils;
-063import 
org.apache.commons.lang3.StringUtils;
-064import 
org.apache.hadoop.conf.Configuration;
-065import 
org.apache.hadoop.conf.Configured;
-066import 
org.apache.hadoop.fs.FSDataOutputStream;
-067import org.apache.hadoop.fs.FileStatus;
-068import org.apache.hadoop.fs.FileSystem;
-069import org.apache.hadoop.fs.Path;
-070import 
org.apache.hadoop.fs.permission.FsAction;
-071import 
org.apache.hadoop.fs.permission.FsPermission;
-072import 
org.apache.hadoop.hbase.Abortable;
-073import org.apache.hadoop.hbase.Cell;
-074import 
org.apache.hadoop.hbase.CellUtil;
-075import 
org.apache.hadoop.hbase.ClusterMetrics;
-076import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-077import 
org.apache.hadoop.hbase.HBaseConfiguration;
-078import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-079import 
org.apache.hadoop.hbase.HConstants;
-080import 
org.apache.hadoop.hbase.HRegionInfo;
-081import 
org.apache.hadoop.hbase.HRegionLocation;
-082import 
org.apache.hadoop.hbase.KeyValue;
-083import 
org.apache.hadoop.hbase.MasterNotRunningException;
-084import 
org.apache.hadoop.hbase.MetaTableAccessor;
-085import 
org.apache.hadoop.hbase.RegionLocations;
-086import 
org.apache.hadoop.hbase.ServerName;
-087import 
org.apache.hadoop.hbase.TableName;
-088import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-089import 
org.apache.hadoop.hbase.client.Admin;
-090import 
org.apache.hadoop.hbase.client.ClusterConnection;
-091import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-092import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-093import 
org.apache.hadoop.hbase.client.Connection;
-094import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-095import 
org.apache.hadoop.hbase.client.Delete;
-096import 
org.apache.hadoop.hbase.client.Get;
-097import 
org.apache.hadoop.hbase.client.Put;
-098import 
org.apache.hadoop.hbase.client.RegionInfo;
-099import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-100import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-101import 
org.apache.hadoop.hbase.client.Result;
-102import 
org.apache.hadoop.hbase.client.RowMutations;
-103import 
org.apache.hadoop.hbase.client.Table;
-104import 
org.apache.hadoop.hbase.client.TableDescriptor;
-105import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-106import 
org.apache.hadoop.hbase.client.TableState;
-107import 
org.apache.hadoop.hbase.io.FileLink;
-108import 
org.apache.hadoop.hbase.io.HFileLink;
-109import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-110import 
org.apache.hadoop.hbase.io.hfile.HFile;
-111import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-112import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-113import 
org.apache.hadoop.hbase.master.RegionState;
-114import 
org.apache.hadoop.hbase.regionserver.HRegion;
-115import 
org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-116import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-117import 
org.apache.hadoop.hbase.replication.ReplicationException;
-118import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-119import 
org.apache.hadoop.hbase.security.UserProvider;
-120import 
org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
-121import 
org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
-122import 
org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
-123import 
org.apache.hadoop.hbase.util.hbck.ReplicationChecker;
-124import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler;
-125import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandlerImpl;
-126import org.apache.hadoop.hbase.wal.WAL;
-127import 
org.apache.hadoop.hbase.wal.WALFactory;
-128import 
org.apache.hadoop.hbase.wal.WALSplitter;
-129import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-130import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-131import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-132import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-133import 
org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-134import 
org.apache.hadoop.ipc.RemoteException;
-135import 
org.apache.hadoop.security.UserGroupInformation;
-136import 
org.apache.hadoop.util.ReflectionUtils;
-137import 

[37/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.NodeFailoverWorker.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.NodeFailoverWorker.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.NodeFailoverWorker.html
index c282308..094b592 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.NodeFailoverWorker.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.NodeFailoverWorker.html
@@ -426,483 +426,481 @@
 418   */
 419  void 
removeRecoveredSource(ReplicationSourceInterface src) {
 420LOG.info("Done with the recovered 
queue " + src.getQueueId());
-421src.getSourceMetrics().clear();
-422this.oldsources.remove(src);
-423// Delete queue from storage and 
memory
-424deleteQueue(src.getQueueId());
-425
this.walsByIdRecoveredQueues.remove(src.getQueueId());
-426  }
-427
-428  /**
-429   * Clear the metrics and related 
replication queue of the specified old source
-430   * @param src source to clear
-431   */
-432  void 
removeSource(ReplicationSourceInterface src) {
-433LOG.info("Done with the queue " + 
src.getQueueId());
-434src.getSourceMetrics().clear();
-435
this.sources.remove(src.getPeerId());
-436// Delete queue from storage and 
memory
-437deleteQueue(src.getQueueId());
-438
this.walsById.remove(src.getQueueId());
-439  }
-440
-441  /**
-442   * Delete a complete queue of wals 
associated with a replication source
-443   * @param queueId the id of replication 
queue to delete
-444   */
-445  private void deleteQueue(String 
queueId) {
-446abortWhenFail(() - 
this.queueStorage.removeQueue(server.getServerName(), queueId));
-447  }
-448
-449  @FunctionalInterface
-450  private interface 
ReplicationQueueOperation {
-451void exec() throws 
ReplicationException;
-452  }
-453
-454  private void 
abortWhenFail(ReplicationQueueOperation op) {
-455try {
-456  op.exec();
-457} catch (ReplicationException e) {
-458  server.abort("Failed to operate on 
replication queue", e);
-459}
-460  }
-461
-462  private void 
throwIOExceptionWhenFail(ReplicationQueueOperation op) throws IOException {
-463try {
-464  op.exec();
-465} catch (ReplicationException e) {
-466  throw new IOException(e);
-467}
-468  }
-469
-470  private void 
abortAndThrowIOExceptionWhenFail(ReplicationQueueOperation op) throws 
IOException {
-471try {
-472  op.exec();
-473} catch (ReplicationException e) {
-474  server.abort("Failed to operate on 
replication queue", e);
-475  throw new IOException(e);
-476}
-477  }
-478
-479  /**
-480   * This method will log the current 
position to storage. And also clean old logs from the
-481   * replication queue.
-482   * @param queueId id of the replication 
queue
-483   * @param queueRecovered indicates if 
this queue comes from another region server
-484   * @param entryBatch the wal entry 
batch we just shipped
-485   */
-486  public void 
logPositionAndCleanOldLogs(String queueId, boolean queueRecovered,
-487  WALEntryBatch entryBatch) {
-488String fileName = 
entryBatch.getLastWalPath().getName();
-489abortWhenFail(() - 
this.queueStorage.setWALPosition(server.getServerName(), queueId, fileName,
-490  entryBatch.getLastWalPosition(), 
entryBatch.getLastSeqIds()));
-491cleanOldLogs(fileName, 
entryBatch.isEndOfFile(), queueId, queueRecovered);
-492  }
-493
-494  /**
-495   * Cleans a log file and all older logs 
from replication queue. Called when we are sure that a log
-496   * file is closed and has no more 
entries.
-497   * @param log Path to the log
-498   * @param inclusive whether we should 
also remove the given log file
-499   * @param queueId id of the replication 
queue
-500   * @param queueRecovered Whether this 
is a recovered queue
-501   */
-502  @VisibleForTesting
-503  void cleanOldLogs(String log, boolean 
inclusive, String queueId, boolean queueRecovered) {
-504String logPrefix = 
AbstractFSWALProvider.getWALPrefixFromWALName(log);
-505if (queueRecovered) {
-506  NavigableSetString wals = 
walsByIdRecoveredQueues.get(queueId).get(logPrefix);
-507  if (wals != null) {
-508cleanOldLogs(wals, log, 
inclusive, queueId);
-509  }
-510} else {
-511  // synchronized on walsById to 
avoid race with preLogRoll
-512  synchronized (this.walsById) {
-513NavigableSetString wals = 
walsById.get(queueId).get(logPrefix);
-514if (wals != null) {
-515  cleanOldLogs(wals, log, 
inclusive, queueId);
-516}
-517  }
-518}
-519  }
-520
-521  private void 
cleanOldLogs(NavigableSetString wals, 

[14/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/downloads.html
--
diff --git a/downloads.html b/downloads.html
index 099207f..7fad404 100644
--- a/downloads.html
+++ b/downloads.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Apache HBase Downloads
 
@@ -366,7 +366,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-05-03
+  Last Published: 
2018-05-04
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/export_control.html
--
diff --git a/export_control.html b/export_control.html
index 762a633..e0a5a62 100644
--- a/export_control.html
+++ b/export_control.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Export Control
@@ -331,7 +331,7 @@ for more details.
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-05-03
+  Last Published: 
2018-05-04
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/index.html
--
diff --git a/index.html b/index.html
index a2cff89..9fcef97 100644
--- a/index.html
+++ b/index.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Apache HBase™ Home
 
@@ -409,7 +409,7 @@ Apache HBase is an open-source, distributed, versioned, 
non-relational database
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-05-03
+  Last Published: 
2018-05-04
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/integration.html
--
diff --git a/integration.html b/integration.html
index 595dcba..f53166f 100644
--- a/integration.html
+++ b/integration.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  CI Management
 
@@ -291,7 +291,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-05-03
+  Last Published: 
2018-05-04
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/issue-tracking.html
--
diff --git a/issue-tracking.html b/issue-tracking.html
index 13aacbb..cf58b27 100644
--- a/issue-tracking.html
+++ b/issue-tracking.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Issue Management
 
@@ -288,7 +288,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-05-03
+  Last Published: 
2018-05-04
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/license.html
--
diff --git a/license.html b/license.html
index 2f9f8fa..435b1e2 100644
--- a/license.html
+++ b/license.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Licenses
 
@@ -491,7 +491,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-05-03
+  Last Published: 
2018-05-04
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/mail-lists.html
--
diff --git a/mail-lists.html b/mail-lists.html
index f41f35f..1133e92 100644
--- a/mail-lists.html
+++ b/mail-lists.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Mailing Lists
 
@@ -341,7 +341,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-05-03
+  Last Published: 
2018-05-04
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/metrics.html
--
diff --git a/metrics.html b/metrics.html
index fa63090..75f8027 100644
--- a/metrics.html
+++ b/metrics.html

[05/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.EverythingPassesWALEntryFilter.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.EverythingPassesWALEntryFilter.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.EverythingPassesWALEntryFilter.html
index 7a938de..43a87b6 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.EverythingPassesWALEntryFilter.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.EverythingPassesWALEntryFilter.html
@@ -33,539 +33,515 @@
 025import java.util.ArrayList;
 026import java.util.List;
 027import java.util.UUID;
-028import 
java.util.concurrent.atomic.AtomicBoolean;
-029import 
java.util.concurrent.atomic.AtomicInteger;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import org.apache.hadoop.hbase.Cell;
-032import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-033import org.apache.hadoop.hbase.Waiter;
-034import 
org.apache.hadoop.hbase.client.Connection;
-035import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-036import 
org.apache.hadoop.hbase.client.Put;
-037import 
org.apache.hadoop.hbase.client.RegionInfo;
-038import 
org.apache.hadoop.hbase.client.Table;
-039import 
org.apache.hadoop.hbase.regionserver.HRegion;
-040import 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint;
-041import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationGlobalSourceSource;
-042import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceImpl;
-043import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSource;
-044import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSourceImpl;
-045import 
org.apache.hadoop.hbase.replication.regionserver.MetricsSource;
-046import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-047import 
org.apache.hadoop.hbase.testclassification.ReplicationTests;
-048import 
org.apache.hadoop.hbase.util.Bytes;
-049import 
org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
-050import 
org.apache.hadoop.hbase.util.Threads;
-051import 
org.apache.hadoop.hbase.wal.WAL.Entry;
-052import 
org.apache.hadoop.hbase.zookeeper.ZKConfig;
-053import 
org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry;
-054import org.junit.AfterClass;
-055import org.junit.Assert;
-056import org.junit.Before;
-057import org.junit.BeforeClass;
-058import org.junit.ClassRule;
-059import org.junit.Test;
-060import 
org.junit.experimental.categories.Category;
-061import org.slf4j.Logger;
-062import org.slf4j.LoggerFactory;
-063
-064/**
-065 * Tests ReplicationSource and 
ReplicationEndpoint interactions
-066 */
-067@Category({ ReplicationTests.class, 
MediumTests.class })
-068public class TestReplicationEndpoint 
extends TestReplicationBase {
-069
-070  @ClassRule
-071  public static final HBaseClassTestRule 
CLASS_RULE =
-072  
HBaseClassTestRule.forClass(TestReplicationEndpoint.class);
-073
-074  private static final Logger LOG = 
LoggerFactory.getLogger(TestReplicationEndpoint.class);
-075
-076  static int numRegionServers;
-077
-078  @BeforeClass
-079  public static void setUpBeforeClass() 
throws Exception {
-080
TestReplicationBase.setUpBeforeClass();
-081numRegionServers = 
utility1.getHBaseCluster().getRegionServerThreads().size();
-082  }
-083
-084  @AfterClass
-085  public static void tearDownAfterClass() 
throws Exception {
-086
TestReplicationBase.tearDownAfterClass();
-087// check stop is called
-088
Assert.assertTrue(ReplicationEndpointForTest.stoppedCount.get()  0);
-089  }
-090
-091  @Before
-092  public void setup() throws Exception 
{
-093
ReplicationEndpointForTest.contructedCount.set(0);
-094
ReplicationEndpointForTest.startedCount.set(0);
-095
ReplicationEndpointForTest.replicateCount.set(0);
-096
ReplicationEndpointReturningFalse.replicated.set(false);
-097
ReplicationEndpointForTest.lastEntries = null;
-098final ListRegionServerThread 
rsThreads =
-099
utility1.getMiniHBaseCluster().getRegionServerThreads();
-100for (RegionServerThread rs : 
rsThreads) {
-101  
utility1.getAdmin().rollWALWriter(rs.getRegionServer().getServerName());
-102}
-103// Wait for  all log roll to finish
-104utility1.waitFor(3000, new 
Waiter.ExplainingPredicateException() {
-105  @Override
-106  public boolean evaluate() throws 
Exception {
-107for (RegionServerThread rs : 
rsThreads) {
-108  if 
(!rs.getRegionServer().walRollRequestFinished()) {
-109return false;
-110  }
-111}
-112return true;
-113  }
-114
-115  @Override
-116  public String 

[45/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
index 67b7e3a..dfa02b5 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
@@ -191,9 +191,9 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
+org.apache.hadoop.hbase.security.SaslStatus
 org.apache.hadoop.hbase.security.AuthMethod
 org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection
-org.apache.hadoop.hbase.security.SaslStatus
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.html
 
b/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.html
index d0f87bf..29b4f34 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.html
@@ -379,7 +379,7 @@ implements Parameters:
 context - replication context
 Throws:
-https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException - error occur 
when initialize the endpoint.
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
index 272b207..8e41b8e 100644
--- a/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
@@ -198,8 +198,8 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.thrift.ThriftMetrics.ThriftServerType
 org.apache.hadoop.hbase.thrift.ThriftServerRunner.ImplType
+org.apache.hadoop.hbase.thrift.ThriftMetrics.ThriftServerType
 org.apache.hadoop.hbase.thrift.MetricsThriftServerSourceFactoryImpl.FactoryStorage
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
 
b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
index 1adeadd..5fb98ee 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-class HBaseFsck.CheckRegionConsistencyWorkItem
+class HBaseFsck.CheckRegionConsistencyWorkItem
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Callable.html?is-external=true;
 title="class or interface in java.util.concurrent">Callablehttps://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
 
@@ -211,7 +211,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 
 
 key
-private finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String key
+private 

[34/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
index e1bc325..63e7421 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
@@ -66,5125 +66,5224 @@
 058import 
java.util.concurrent.TimeoutException;
 059import 
java.util.concurrent.atomic.AtomicBoolean;
 060import 
java.util.concurrent.atomic.AtomicInteger;
-061import org.apache.commons.io.IOUtils;
-062import 
org.apache.commons.lang3.RandomStringUtils;
-063import 
org.apache.commons.lang3.StringUtils;
-064import 
org.apache.hadoop.conf.Configuration;
-065import 
org.apache.hadoop.conf.Configured;
-066import 
org.apache.hadoop.fs.FSDataOutputStream;
-067import org.apache.hadoop.fs.FileStatus;
-068import org.apache.hadoop.fs.FileSystem;
-069import org.apache.hadoop.fs.Path;
-070import 
org.apache.hadoop.fs.permission.FsAction;
-071import 
org.apache.hadoop.fs.permission.FsPermission;
-072import 
org.apache.hadoop.hbase.Abortable;
-073import org.apache.hadoop.hbase.Cell;
-074import 
org.apache.hadoop.hbase.CellUtil;
-075import 
org.apache.hadoop.hbase.ClusterMetrics;
-076import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-077import 
org.apache.hadoop.hbase.HBaseConfiguration;
-078import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-079import 
org.apache.hadoop.hbase.HConstants;
-080import 
org.apache.hadoop.hbase.HRegionInfo;
-081import 
org.apache.hadoop.hbase.HRegionLocation;
-082import 
org.apache.hadoop.hbase.KeyValue;
-083import 
org.apache.hadoop.hbase.MasterNotRunningException;
-084import 
org.apache.hadoop.hbase.MetaTableAccessor;
-085import 
org.apache.hadoop.hbase.RegionLocations;
-086import 
org.apache.hadoop.hbase.ServerName;
-087import 
org.apache.hadoop.hbase.TableName;
-088import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-089import 
org.apache.hadoop.hbase.client.Admin;
-090import 
org.apache.hadoop.hbase.client.ClusterConnection;
-091import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-092import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-093import 
org.apache.hadoop.hbase.client.Connection;
-094import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-095import 
org.apache.hadoop.hbase.client.Delete;
-096import 
org.apache.hadoop.hbase.client.Get;
-097import 
org.apache.hadoop.hbase.client.Put;
-098import 
org.apache.hadoop.hbase.client.RegionInfo;
-099import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-100import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-101import 
org.apache.hadoop.hbase.client.Result;
-102import 
org.apache.hadoop.hbase.client.RowMutations;
-103import 
org.apache.hadoop.hbase.client.Table;
-104import 
org.apache.hadoop.hbase.client.TableDescriptor;
-105import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-106import 
org.apache.hadoop.hbase.client.TableState;
-107import 
org.apache.hadoop.hbase.io.FileLink;
-108import 
org.apache.hadoop.hbase.io.HFileLink;
-109import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-110import 
org.apache.hadoop.hbase.io.hfile.HFile;
-111import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-112import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-113import 
org.apache.hadoop.hbase.master.RegionState;
-114import 
org.apache.hadoop.hbase.regionserver.HRegion;
-115import 
org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-116import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-117import 
org.apache.hadoop.hbase.replication.ReplicationException;
-118import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-119import 
org.apache.hadoop.hbase.security.UserProvider;
-120import 
org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
-121import 
org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
-122import 
org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
-123import 
org.apache.hadoop.hbase.util.hbck.ReplicationChecker;
-124import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler;
-125import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandlerImpl;
-126import org.apache.hadoop.hbase.wal.WAL;
-127import 
org.apache.hadoop.hbase.wal.WALFactory;
-128import 
org.apache.hadoop.hbase.wal.WALSplitter;
-129import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-130import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-131import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-132import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-133import 
org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-134import 

[28/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html
index e1bc325..63e7421 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html
@@ -66,5125 +66,5224 @@
 058import 
java.util.concurrent.TimeoutException;
 059import 
java.util.concurrent.atomic.AtomicBoolean;
 060import 
java.util.concurrent.atomic.AtomicInteger;
-061import org.apache.commons.io.IOUtils;
-062import 
org.apache.commons.lang3.RandomStringUtils;
-063import 
org.apache.commons.lang3.StringUtils;
-064import 
org.apache.hadoop.conf.Configuration;
-065import 
org.apache.hadoop.conf.Configured;
-066import 
org.apache.hadoop.fs.FSDataOutputStream;
-067import org.apache.hadoop.fs.FileStatus;
-068import org.apache.hadoop.fs.FileSystem;
-069import org.apache.hadoop.fs.Path;
-070import 
org.apache.hadoop.fs.permission.FsAction;
-071import 
org.apache.hadoop.fs.permission.FsPermission;
-072import 
org.apache.hadoop.hbase.Abortable;
-073import org.apache.hadoop.hbase.Cell;
-074import 
org.apache.hadoop.hbase.CellUtil;
-075import 
org.apache.hadoop.hbase.ClusterMetrics;
-076import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-077import 
org.apache.hadoop.hbase.HBaseConfiguration;
-078import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-079import 
org.apache.hadoop.hbase.HConstants;
-080import 
org.apache.hadoop.hbase.HRegionInfo;
-081import 
org.apache.hadoop.hbase.HRegionLocation;
-082import 
org.apache.hadoop.hbase.KeyValue;
-083import 
org.apache.hadoop.hbase.MasterNotRunningException;
-084import 
org.apache.hadoop.hbase.MetaTableAccessor;
-085import 
org.apache.hadoop.hbase.RegionLocations;
-086import 
org.apache.hadoop.hbase.ServerName;
-087import 
org.apache.hadoop.hbase.TableName;
-088import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-089import 
org.apache.hadoop.hbase.client.Admin;
-090import 
org.apache.hadoop.hbase.client.ClusterConnection;
-091import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-092import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-093import 
org.apache.hadoop.hbase.client.Connection;
-094import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-095import 
org.apache.hadoop.hbase.client.Delete;
-096import 
org.apache.hadoop.hbase.client.Get;
-097import 
org.apache.hadoop.hbase.client.Put;
-098import 
org.apache.hadoop.hbase.client.RegionInfo;
-099import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-100import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-101import 
org.apache.hadoop.hbase.client.Result;
-102import 
org.apache.hadoop.hbase.client.RowMutations;
-103import 
org.apache.hadoop.hbase.client.Table;
-104import 
org.apache.hadoop.hbase.client.TableDescriptor;
-105import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-106import 
org.apache.hadoop.hbase.client.TableState;
-107import 
org.apache.hadoop.hbase.io.FileLink;
-108import 
org.apache.hadoop.hbase.io.HFileLink;
-109import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-110import 
org.apache.hadoop.hbase.io.hfile.HFile;
-111import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-112import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-113import 
org.apache.hadoop.hbase.master.RegionState;
-114import 
org.apache.hadoop.hbase.regionserver.HRegion;
-115import 
org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-116import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-117import 
org.apache.hadoop.hbase.replication.ReplicationException;
-118import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-119import 
org.apache.hadoop.hbase.security.UserProvider;
-120import 
org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
-121import 
org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
-122import 
org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
-123import 
org.apache.hadoop.hbase.util.hbck.ReplicationChecker;
-124import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler;
-125import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandlerImpl;
-126import org.apache.hadoop.hbase.wal.WAL;
-127import 
org.apache.hadoop.hbase.wal.WALFactory;
-128import 
org.apache.hadoop.hbase.wal.WALSplitter;
-129import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-130import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-131import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-132import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-133import 
org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-134import 
org.apache.hadoop.ipc.RemoteException;
-135import 
org.apache.hadoop.security.UserGroupInformation;
-136import 

[25/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
index e1bc325..63e7421 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
@@ -66,5125 +66,5224 @@
 058import 
java.util.concurrent.TimeoutException;
 059import 
java.util.concurrent.atomic.AtomicBoolean;
 060import 
java.util.concurrent.atomic.AtomicInteger;
-061import org.apache.commons.io.IOUtils;
-062import 
org.apache.commons.lang3.RandomStringUtils;
-063import 
org.apache.commons.lang3.StringUtils;
-064import 
org.apache.hadoop.conf.Configuration;
-065import 
org.apache.hadoop.conf.Configured;
-066import 
org.apache.hadoop.fs.FSDataOutputStream;
-067import org.apache.hadoop.fs.FileStatus;
-068import org.apache.hadoop.fs.FileSystem;
-069import org.apache.hadoop.fs.Path;
-070import 
org.apache.hadoop.fs.permission.FsAction;
-071import 
org.apache.hadoop.fs.permission.FsPermission;
-072import 
org.apache.hadoop.hbase.Abortable;
-073import org.apache.hadoop.hbase.Cell;
-074import 
org.apache.hadoop.hbase.CellUtil;
-075import 
org.apache.hadoop.hbase.ClusterMetrics;
-076import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-077import 
org.apache.hadoop.hbase.HBaseConfiguration;
-078import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-079import 
org.apache.hadoop.hbase.HConstants;
-080import 
org.apache.hadoop.hbase.HRegionInfo;
-081import 
org.apache.hadoop.hbase.HRegionLocation;
-082import 
org.apache.hadoop.hbase.KeyValue;
-083import 
org.apache.hadoop.hbase.MasterNotRunningException;
-084import 
org.apache.hadoop.hbase.MetaTableAccessor;
-085import 
org.apache.hadoop.hbase.RegionLocations;
-086import 
org.apache.hadoop.hbase.ServerName;
-087import 
org.apache.hadoop.hbase.TableName;
-088import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-089import 
org.apache.hadoop.hbase.client.Admin;
-090import 
org.apache.hadoop.hbase.client.ClusterConnection;
-091import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-092import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-093import 
org.apache.hadoop.hbase.client.Connection;
-094import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-095import 
org.apache.hadoop.hbase.client.Delete;
-096import 
org.apache.hadoop.hbase.client.Get;
-097import 
org.apache.hadoop.hbase.client.Put;
-098import 
org.apache.hadoop.hbase.client.RegionInfo;
-099import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-100import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-101import 
org.apache.hadoop.hbase.client.Result;
-102import 
org.apache.hadoop.hbase.client.RowMutations;
-103import 
org.apache.hadoop.hbase.client.Table;
-104import 
org.apache.hadoop.hbase.client.TableDescriptor;
-105import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-106import 
org.apache.hadoop.hbase.client.TableState;
-107import 
org.apache.hadoop.hbase.io.FileLink;
-108import 
org.apache.hadoop.hbase.io.HFileLink;
-109import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-110import 
org.apache.hadoop.hbase.io.hfile.HFile;
-111import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-112import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-113import 
org.apache.hadoop.hbase.master.RegionState;
-114import 
org.apache.hadoop.hbase.regionserver.HRegion;
-115import 
org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-116import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-117import 
org.apache.hadoop.hbase.replication.ReplicationException;
-118import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-119import 
org.apache.hadoop.hbase.security.UserProvider;
-120import 
org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
-121import 
org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
-122import 
org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
-123import 
org.apache.hadoop.hbase.util.hbck.ReplicationChecker;
-124import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler;
-125import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandlerImpl;
-126import org.apache.hadoop.hbase.wal.WAL;
-127import 
org.apache.hadoop.hbase.wal.WALFactory;
-128import 
org.apache.hadoop.hbase.wal.WALSplitter;
-129import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-130import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-131import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-132import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-133import 
org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-134import 
org.apache.hadoop.ipc.RemoteException;
-135import 

[13/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/index-all.html
--
diff --git a/testdevapidocs/index-all.html b/testdevapidocs/index-all.html
index 8c4060d..b1e54cc 100644
--- a/testdevapidocs/index-all.html
+++ b/testdevapidocs/index-all.html
@@ -835,6 +835,8 @@
 
 addStateAndBarrier(RegionInfo,
 RegionState.State, long...) - Method in class 
org.apache.hadoop.hbase.replication.regionserver.TestSerialReplicationChecker
 
+addStateAndBarrier(RegionInfo,
 RegionState.State, long...) - Method in class 
org.apache.hadoop.hbase.util.TestHBaseFsckCleanReplicationBarriers
+
 addStoreFile()
 - Method in class org.apache.hadoop.hbase.regionserver.TestHStore
 
 addThread(MultithreadedTestUtil.TestThread)
 - Method in class org.apache.hadoop.hbase.MultithreadedTestUtil.TestContext
@@ -3134,10 +3136,6 @@
 
 call()
 - Method in class org.apache.hadoop.hbase.procedure2.store.wal.ProcedureWALPerformanceEvaluation.Worker
 
-call()
 - Method in class org.apache.hadoop.hbase.replication.TestReplicationEndpoint.InterClusterReplicationEndpointForTest.DummyReplicator
-
-call()
 - Method in class org.apache.hadoop.hbase.replication.TestReplicationEndpoint.InterClusterReplicationEndpointForTest.FailingDummyReplicator
-
 call()
 - Method in class org.apache.hadoop.hbase.TestCompatibilitySingletonFactory.TestCompatibilitySingletonFactoryCallable
 
 call()
 - Method in class org.apache.hadoop.hbase.util.TestIdLock.IdLockTestThread
@@ -3204,6 +3202,8 @@
 
 canonicalizeMetricName(String)
 - Method in class org.apache.hadoop.hbase.test.MetricsAssertHelperImpl
 
+canReplicateToSameCluster()
 - Method in class org.apache.hadoop.hbase.replication.regionserver.TestSerialReplicationEndpoint.TestEndpoint
+
 canRollback
 - Variable in class org.apache.hadoop.hbase.client.TestGetProcedureResult.DummyProcedure
 
 CAPACITY
 - Static variable in class org.apache.hadoop.hbase.util.TestBoundedPriorityBlockingQueue
@@ -5743,6 +5743,8 @@
 
 CLASS_RULE
 - Static variable in class org.apache.hadoop.hbase.replication.regionserver.TestSerialReplicationChecker
 
+CLASS_RULE
 - Static variable in class org.apache.hadoop.hbase.replication.regionserver.TestSerialReplicationEndpoint
+
 CLASS_RULE
 - Static variable in class org.apache.hadoop.hbase.replication.regionserver.TestWALEntrySinkFilter
 
 CLASS_RULE
 - Static variable in class org.apache.hadoop.hbase.replication.regionserver.TestWALEntryStream
@@ -6251,6 +6253,8 @@
 
 CLASS_RULE
 - Static variable in class org.apache.hadoop.hbase.util.TestFSVisitor
 
+CLASS_RULE
 - Static variable in class org.apache.hadoop.hbase.util.TestHBaseFsckCleanReplicationBarriers
+
 CLASS_RULE
 - Static variable in class org.apache.hadoop.hbase.util.TestHBaseFsckComparator
 
 CLASS_RULE
 - Static variable in class org.apache.hadoop.hbase.util.TestHBaseFsckEncryption
@@ -6449,6 +6453,8 @@
 
 cleanRegionRootDir(FileSystem,
 Path) - Method in class org.apache.hadoop.hbase.wal.WALPerformanceEvaluation
 
+cleanReplicationBarrier(Configuration,
 TableName) - Static method in class 
org.apache.hadoop.hbase.util.hbck.HbckTestingUtil
+
 cleanup()
 - Method in class org.apache.hadoop.hbase.backup.TestBackupHFileCleaner
 
 cleanup()
 - Method in class org.apache.hadoop.hbase.client.TestAsyncSnapshotAdminApi
@@ -6643,8 +6649,6 @@
 
 clearCompactionQueues(RpcController,
 AdminProtos.ClearCompactionQueuesRequest) - Method in class 
org.apache.hadoop.hbase.master.MockRegionServer
 
-clearCompactionQueues(RpcController,
 AdminProtos.ClearCompactionQueuesRequest) - Method in class 
org.apache.hadoop.hbase.replication.regionserver.TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingBlockingInterface
-
 clearMapping()
 - Static method in class org.apache.hadoop.hbase.http.TestHttpServer.MyGroupsProvider
 
 clearOutput(Path)
 - Static method in class org.apache.hadoop.hbase.coprocessor.TestSecureExport
@@ -6657,8 +6661,6 @@
 
 clearRegionBlockCache(HRegionServer)
 - Method in class org.apache.hadoop.hbase.regionserver.TestClearRegionBlockCache
 
-clearRegionBlockCache(RpcController,
 AdminProtos.ClearRegionBlockCacheRequest) - Method in class 
org.apache.hadoop.hbase.replication.regionserver.TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingBlockingInterface
-
 clearSnapshots()
 - Method in class org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshotNotifierForTest
 
 clearTable()
 - Method in class org.apache.hadoop.hbase.coprocessor.TestPassCustomCellViaRegionObserver
@@ -6864,8 +6866,6 @@
 
 closeRegion(HBaseTestingUtility,
 HRegionServer, HRegionInfo) - Static method in class 
org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster
 
-closeRegion(RpcController,
 AdminProtos.CloseRegionRequest) - Method in class 
org.apache.hadoop.hbase.replication.regionserver.TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingBlockingInterface
-
 closeRegion(HRegion)
 - Method in class 

[08/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/TestSerialReplicationEndpoint.TestEndpoint.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/TestSerialReplicationEndpoint.TestEndpoint.html
 
b/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/TestSerialReplicationEndpoint.TestEndpoint.html
new file mode 100644
index 000..e1c873a
--- /dev/null
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/replication/regionserver/TestSerialReplicationEndpoint.TestEndpoint.html
@@ -0,0 +1,475 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+TestSerialReplicationEndpoint.TestEndpoint (Apache HBase 3.0.0-SNAPSHOT 
Test API)
+
+
+
+
+
+var methods = {"i0":10,"i1":10,"i2":9,"i3":10,"i4":9};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.replication.regionserver
+Class TestSerialReplicationEndpoint.TestEndpoint
+
+
+
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hbase.thirdparty.com.google.common.util.concurrent.AbstractService
+
+
+org.apache.hadoop.hbase.replication.BaseReplicationEndpoint
+
+
+org.apache.hadoop.hbase.replication.HBaseReplicationEndpoint
+
+
+org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint
+
+
+org.apache.hadoop.hbase.replication.regionserver.TestSerialReplicationEndpoint.TestEndpoint
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+All Implemented Interfaces:
+org.apache.hadoop.hbase.Abortable, 
org.apache.hadoop.hbase.replication.ReplicationEndpoint, 
org.apache.hadoop.hbase.replication.ReplicationPeerConfigListener, 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.Service
+
+
+Enclosing class:
+TestSerialReplicationEndpoint
+
+
+
+public static class TestSerialReplicationEndpoint.TestEndpoint
+extends 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint
+
+
+
+
+
+
+
+
+
+
+
+Nested Class Summary
+
+
+
+
+Nested classes/interfaces inherited from 
classorg.apache.hadoop.hbase.replication.HBaseReplicationEndpoint
+org.apache.hadoop.hbase.replication.HBaseReplicationEndpoint.PeerRegionServerListener
+
+
+
+
+
+Nested classes/interfaces inherited from 
interfaceorg.apache.hadoop.hbase.replication.ReplicationEndpoint
+org.apache.hadoop.hbase.replication.ReplicationEndpoint.Context, 
org.apache.hadoop.hbase.replication.ReplicationEndpoint.ReplicateContext
+
+
+
+
+
+Nested classes/interfaces inherited from 
interfaceorg.apache.hbase.thirdparty.com.google.common.util.concurrent.Service
+org.apache.hbase.thirdparty.com.google.common.util.concurrent.Service.Listener,
 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.Service.State
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+private static https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true;
 title="class or interface in 
java.util.concurrent">BlockingQueueorg.apache.hadoop.hbase.wal.WAL.Entry
+entryQueue
+
+
+
+
+
+
+Fields inherited from 
classorg.apache.hadoop.hbase.replication.BaseReplicationEndpoint
+ctx, REPLICATION_WALENTRYFILTER_CONFIG_KEY
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+TestEndpoint()
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsStatic MethodsInstance MethodsConcrete Methods
+
+Modifier and Type
+Method and Description
+
+
+boolean
+canReplicateToSameCluster()
+
+
+protected https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Callable.html?is-external=true;
 title="class or interface in java.util.concurrent">Callablehttps://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer

[04/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.EverythingPassesWALEntryFilterSubclass.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.EverythingPassesWALEntryFilterSubclass.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.EverythingPassesWALEntryFilterSubclass.html
index 7a938de..43a87b6 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.EverythingPassesWALEntryFilterSubclass.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.EverythingPassesWALEntryFilterSubclass.html
@@ -33,539 +33,515 @@
 025import java.util.ArrayList;
 026import java.util.List;
 027import java.util.UUID;
-028import 
java.util.concurrent.atomic.AtomicBoolean;
-029import 
java.util.concurrent.atomic.AtomicInteger;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import org.apache.hadoop.hbase.Cell;
-032import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-033import org.apache.hadoop.hbase.Waiter;
-034import 
org.apache.hadoop.hbase.client.Connection;
-035import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-036import 
org.apache.hadoop.hbase.client.Put;
-037import 
org.apache.hadoop.hbase.client.RegionInfo;
-038import 
org.apache.hadoop.hbase.client.Table;
-039import 
org.apache.hadoop.hbase.regionserver.HRegion;
-040import 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint;
-041import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationGlobalSourceSource;
-042import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceImpl;
-043import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSource;
-044import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSourceImpl;
-045import 
org.apache.hadoop.hbase.replication.regionserver.MetricsSource;
-046import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-047import 
org.apache.hadoop.hbase.testclassification.ReplicationTests;
-048import 
org.apache.hadoop.hbase.util.Bytes;
-049import 
org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
-050import 
org.apache.hadoop.hbase.util.Threads;
-051import 
org.apache.hadoop.hbase.wal.WAL.Entry;
-052import 
org.apache.hadoop.hbase.zookeeper.ZKConfig;
-053import 
org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry;
-054import org.junit.AfterClass;
-055import org.junit.Assert;
-056import org.junit.Before;
-057import org.junit.BeforeClass;
-058import org.junit.ClassRule;
-059import org.junit.Test;
-060import 
org.junit.experimental.categories.Category;
-061import org.slf4j.Logger;
-062import org.slf4j.LoggerFactory;
-063
-064/**
-065 * Tests ReplicationSource and 
ReplicationEndpoint interactions
-066 */
-067@Category({ ReplicationTests.class, 
MediumTests.class })
-068public class TestReplicationEndpoint 
extends TestReplicationBase {
-069
-070  @ClassRule
-071  public static final HBaseClassTestRule 
CLASS_RULE =
-072  
HBaseClassTestRule.forClass(TestReplicationEndpoint.class);
-073
-074  private static final Logger LOG = 
LoggerFactory.getLogger(TestReplicationEndpoint.class);
-075
-076  static int numRegionServers;
-077
-078  @BeforeClass
-079  public static void setUpBeforeClass() 
throws Exception {
-080
TestReplicationBase.setUpBeforeClass();
-081numRegionServers = 
utility1.getHBaseCluster().getRegionServerThreads().size();
-082  }
-083
-084  @AfterClass
-085  public static void tearDownAfterClass() 
throws Exception {
-086
TestReplicationBase.tearDownAfterClass();
-087// check stop is called
-088
Assert.assertTrue(ReplicationEndpointForTest.stoppedCount.get()  0);
-089  }
-090
-091  @Before
-092  public void setup() throws Exception 
{
-093
ReplicationEndpointForTest.contructedCount.set(0);
-094
ReplicationEndpointForTest.startedCount.set(0);
-095
ReplicationEndpointForTest.replicateCount.set(0);
-096
ReplicationEndpointReturningFalse.replicated.set(false);
-097
ReplicationEndpointForTest.lastEntries = null;
-098final ListRegionServerThread 
rsThreads =
-099
utility1.getMiniHBaseCluster().getRegionServerThreads();
-100for (RegionServerThread rs : 
rsThreads) {
-101  
utility1.getAdmin().rollWALWriter(rs.getRegionServer().getServerName());
-102}
-103// Wait for  all log roll to finish
-104utility1.waitFor(3000, new 
Waiter.ExplainingPredicateException() {
-105  @Override
-106  public boolean evaluate() throws 
Exception {
-107for (RegionServerThread rs : 
rsThreads) {
-108  if 
(!rs.getRegionServer().walRollRequestFinished()) {
-109return false;
-110  }
-111}
-112return true;
-113  }
-114
-115  

[26/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
index e1bc325..63e7421 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
@@ -66,5125 +66,5224 @@
 058import 
java.util.concurrent.TimeoutException;
 059import 
java.util.concurrent.atomic.AtomicBoolean;
 060import 
java.util.concurrent.atomic.AtomicInteger;
-061import org.apache.commons.io.IOUtils;
-062import 
org.apache.commons.lang3.RandomStringUtils;
-063import 
org.apache.commons.lang3.StringUtils;
-064import 
org.apache.hadoop.conf.Configuration;
-065import 
org.apache.hadoop.conf.Configured;
-066import 
org.apache.hadoop.fs.FSDataOutputStream;
-067import org.apache.hadoop.fs.FileStatus;
-068import org.apache.hadoop.fs.FileSystem;
-069import org.apache.hadoop.fs.Path;
-070import 
org.apache.hadoop.fs.permission.FsAction;
-071import 
org.apache.hadoop.fs.permission.FsPermission;
-072import 
org.apache.hadoop.hbase.Abortable;
-073import org.apache.hadoop.hbase.Cell;
-074import 
org.apache.hadoop.hbase.CellUtil;
-075import 
org.apache.hadoop.hbase.ClusterMetrics;
-076import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-077import 
org.apache.hadoop.hbase.HBaseConfiguration;
-078import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-079import 
org.apache.hadoop.hbase.HConstants;
-080import 
org.apache.hadoop.hbase.HRegionInfo;
-081import 
org.apache.hadoop.hbase.HRegionLocation;
-082import 
org.apache.hadoop.hbase.KeyValue;
-083import 
org.apache.hadoop.hbase.MasterNotRunningException;
-084import 
org.apache.hadoop.hbase.MetaTableAccessor;
-085import 
org.apache.hadoop.hbase.RegionLocations;
-086import 
org.apache.hadoop.hbase.ServerName;
-087import 
org.apache.hadoop.hbase.TableName;
-088import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-089import 
org.apache.hadoop.hbase.client.Admin;
-090import 
org.apache.hadoop.hbase.client.ClusterConnection;
-091import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-092import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-093import 
org.apache.hadoop.hbase.client.Connection;
-094import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-095import 
org.apache.hadoop.hbase.client.Delete;
-096import 
org.apache.hadoop.hbase.client.Get;
-097import 
org.apache.hadoop.hbase.client.Put;
-098import 
org.apache.hadoop.hbase.client.RegionInfo;
-099import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-100import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-101import 
org.apache.hadoop.hbase.client.Result;
-102import 
org.apache.hadoop.hbase.client.RowMutations;
-103import 
org.apache.hadoop.hbase.client.Table;
-104import 
org.apache.hadoop.hbase.client.TableDescriptor;
-105import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-106import 
org.apache.hadoop.hbase.client.TableState;
-107import 
org.apache.hadoop.hbase.io.FileLink;
-108import 
org.apache.hadoop.hbase.io.HFileLink;
-109import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-110import 
org.apache.hadoop.hbase.io.hfile.HFile;
-111import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-112import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-113import 
org.apache.hadoop.hbase.master.RegionState;
-114import 
org.apache.hadoop.hbase.regionserver.HRegion;
-115import 
org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-116import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-117import 
org.apache.hadoop.hbase.replication.ReplicationException;
-118import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-119import 
org.apache.hadoop.hbase.security.UserProvider;
-120import 
org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
-121import 
org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
-122import 
org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
-123import 
org.apache.hadoop.hbase.util.hbck.ReplicationChecker;
-124import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler;
-125import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandlerImpl;
-126import org.apache.hadoop.hbase.wal.WAL;
-127import 
org.apache.hadoop.hbase.wal.WALFactory;
-128import 
org.apache.hadoop.hbase.wal.WALSplitter;
-129import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-130import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-131import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-132import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-133import 
org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-134import 
org.apache.hadoop.ipc.RemoteException;
-135import 
org.apache.hadoop.security.UserGroupInformation;

[18/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html
index e1bc325..63e7421 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html
@@ -66,5125 +66,5224 @@
 058import 
java.util.concurrent.TimeoutException;
 059import 
java.util.concurrent.atomic.AtomicBoolean;
 060import 
java.util.concurrent.atomic.AtomicInteger;
-061import org.apache.commons.io.IOUtils;
-062import 
org.apache.commons.lang3.RandomStringUtils;
-063import 
org.apache.commons.lang3.StringUtils;
-064import 
org.apache.hadoop.conf.Configuration;
-065import 
org.apache.hadoop.conf.Configured;
-066import 
org.apache.hadoop.fs.FSDataOutputStream;
-067import org.apache.hadoop.fs.FileStatus;
-068import org.apache.hadoop.fs.FileSystem;
-069import org.apache.hadoop.fs.Path;
-070import 
org.apache.hadoop.fs.permission.FsAction;
-071import 
org.apache.hadoop.fs.permission.FsPermission;
-072import 
org.apache.hadoop.hbase.Abortable;
-073import org.apache.hadoop.hbase.Cell;
-074import 
org.apache.hadoop.hbase.CellUtil;
-075import 
org.apache.hadoop.hbase.ClusterMetrics;
-076import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-077import 
org.apache.hadoop.hbase.HBaseConfiguration;
-078import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-079import 
org.apache.hadoop.hbase.HConstants;
-080import 
org.apache.hadoop.hbase.HRegionInfo;
-081import 
org.apache.hadoop.hbase.HRegionLocation;
-082import 
org.apache.hadoop.hbase.KeyValue;
-083import 
org.apache.hadoop.hbase.MasterNotRunningException;
-084import 
org.apache.hadoop.hbase.MetaTableAccessor;
-085import 
org.apache.hadoop.hbase.RegionLocations;
-086import 
org.apache.hadoop.hbase.ServerName;
-087import 
org.apache.hadoop.hbase.TableName;
-088import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-089import 
org.apache.hadoop.hbase.client.Admin;
-090import 
org.apache.hadoop.hbase.client.ClusterConnection;
-091import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-092import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-093import 
org.apache.hadoop.hbase.client.Connection;
-094import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-095import 
org.apache.hadoop.hbase.client.Delete;
-096import 
org.apache.hadoop.hbase.client.Get;
-097import 
org.apache.hadoop.hbase.client.Put;
-098import 
org.apache.hadoop.hbase.client.RegionInfo;
-099import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-100import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-101import 
org.apache.hadoop.hbase.client.Result;
-102import 
org.apache.hadoop.hbase.client.RowMutations;
-103import 
org.apache.hadoop.hbase.client.Table;
-104import 
org.apache.hadoop.hbase.client.TableDescriptor;
-105import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-106import 
org.apache.hadoop.hbase.client.TableState;
-107import 
org.apache.hadoop.hbase.io.FileLink;
-108import 
org.apache.hadoop.hbase.io.HFileLink;
-109import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-110import 
org.apache.hadoop.hbase.io.hfile.HFile;
-111import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-112import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-113import 
org.apache.hadoop.hbase.master.RegionState;
-114import 
org.apache.hadoop.hbase.regionserver.HRegion;
-115import 
org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-116import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-117import 
org.apache.hadoop.hbase.replication.ReplicationException;
-118import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-119import 
org.apache.hadoop.hbase.security.UserProvider;
-120import 
org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
-121import 
org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
-122import 
org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
-123import 
org.apache.hadoop.hbase.util.hbck.ReplicationChecker;
-124import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler;
-125import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandlerImpl;
-126import org.apache.hadoop.hbase.wal.WAL;
-127import 
org.apache.hadoop.hbase.wal.WALFactory;
-128import 
org.apache.hadoop.hbase.wal.WALSplitter;
-129import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-130import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-131import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-132import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-133import 
org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-134import 
org.apache.hadoop.ipc.RemoteException;
-135import 

[36/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.ReplicationQueueOperation.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.ReplicationQueueOperation.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.ReplicationQueueOperation.html
index c282308..094b592 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.ReplicationQueueOperation.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.ReplicationQueueOperation.html
@@ -426,483 +426,481 @@
 418   */
 419  void 
removeRecoveredSource(ReplicationSourceInterface src) {
 420LOG.info("Done with the recovered 
queue " + src.getQueueId());
-421src.getSourceMetrics().clear();
-422this.oldsources.remove(src);
-423// Delete queue from storage and 
memory
-424deleteQueue(src.getQueueId());
-425
this.walsByIdRecoveredQueues.remove(src.getQueueId());
-426  }
-427
-428  /**
-429   * Clear the metrics and related 
replication queue of the specified old source
-430   * @param src source to clear
-431   */
-432  void 
removeSource(ReplicationSourceInterface src) {
-433LOG.info("Done with the queue " + 
src.getQueueId());
-434src.getSourceMetrics().clear();
-435
this.sources.remove(src.getPeerId());
-436// Delete queue from storage and 
memory
-437deleteQueue(src.getQueueId());
-438
this.walsById.remove(src.getQueueId());
-439  }
-440
-441  /**
-442   * Delete a complete queue of wals 
associated with a replication source
-443   * @param queueId the id of replication 
queue to delete
-444   */
-445  private void deleteQueue(String 
queueId) {
-446abortWhenFail(() - 
this.queueStorage.removeQueue(server.getServerName(), queueId));
-447  }
-448
-449  @FunctionalInterface
-450  private interface 
ReplicationQueueOperation {
-451void exec() throws 
ReplicationException;
-452  }
-453
-454  private void 
abortWhenFail(ReplicationQueueOperation op) {
-455try {
-456  op.exec();
-457} catch (ReplicationException e) {
-458  server.abort("Failed to operate on 
replication queue", e);
-459}
-460  }
-461
-462  private void 
throwIOExceptionWhenFail(ReplicationQueueOperation op) throws IOException {
-463try {
-464  op.exec();
-465} catch (ReplicationException e) {
-466  throw new IOException(e);
-467}
-468  }
-469
-470  private void 
abortAndThrowIOExceptionWhenFail(ReplicationQueueOperation op) throws 
IOException {
-471try {
-472  op.exec();
-473} catch (ReplicationException e) {
-474  server.abort("Failed to operate on 
replication queue", e);
-475  throw new IOException(e);
-476}
-477  }
-478
-479  /**
-480   * This method will log the current 
position to storage. And also clean old logs from the
-481   * replication queue.
-482   * @param queueId id of the replication 
queue
-483   * @param queueRecovered indicates if 
this queue comes from another region server
-484   * @param entryBatch the wal entry 
batch we just shipped
-485   */
-486  public void 
logPositionAndCleanOldLogs(String queueId, boolean queueRecovered,
-487  WALEntryBatch entryBatch) {
-488String fileName = 
entryBatch.getLastWalPath().getName();
-489abortWhenFail(() - 
this.queueStorage.setWALPosition(server.getServerName(), queueId, fileName,
-490  entryBatch.getLastWalPosition(), 
entryBatch.getLastSeqIds()));
-491cleanOldLogs(fileName, 
entryBatch.isEndOfFile(), queueId, queueRecovered);
-492  }
-493
-494  /**
-495   * Cleans a log file and all older logs 
from replication queue. Called when we are sure that a log
-496   * file is closed and has no more 
entries.
-497   * @param log Path to the log
-498   * @param inclusive whether we should 
also remove the given log file
-499   * @param queueId id of the replication 
queue
-500   * @param queueRecovered Whether this 
is a recovered queue
-501   */
-502  @VisibleForTesting
-503  void cleanOldLogs(String log, boolean 
inclusive, String queueId, boolean queueRecovered) {
-504String logPrefix = 
AbstractFSWALProvider.getWALPrefixFromWALName(log);
-505if (queueRecovered) {
-506  NavigableSetString wals = 
walsByIdRecoveredQueues.get(queueId).get(logPrefix);
-507  if (wals != null) {
-508cleanOldLogs(wals, log, 
inclusive, queueId);
-509  }
-510} else {
-511  // synchronized on walsById to 
avoid race with preLogRoll
-512  synchronized (this.walsById) {
-513NavigableSetString wals = 
walsById.get(queueId).get(logPrefix);
-514if (wals != null) {
-515  cleanOldLogs(wals, log, 
inclusive, queueId);
-516}
-517  }
-518}
-519  }
-520
-521  private void 

[01/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site a1e6bc211 -> de18d4687


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.html
index 7a938de..43a87b6 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.html
@@ -33,539 +33,515 @@
 025import java.util.ArrayList;
 026import java.util.List;
 027import java.util.UUID;
-028import 
java.util.concurrent.atomic.AtomicBoolean;
-029import 
java.util.concurrent.atomic.AtomicInteger;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import org.apache.hadoop.hbase.Cell;
-032import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-033import org.apache.hadoop.hbase.Waiter;
-034import 
org.apache.hadoop.hbase.client.Connection;
-035import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-036import 
org.apache.hadoop.hbase.client.Put;
-037import 
org.apache.hadoop.hbase.client.RegionInfo;
-038import 
org.apache.hadoop.hbase.client.Table;
-039import 
org.apache.hadoop.hbase.regionserver.HRegion;
-040import 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint;
-041import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationGlobalSourceSource;
-042import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceImpl;
-043import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSource;
-044import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSourceImpl;
-045import 
org.apache.hadoop.hbase.replication.regionserver.MetricsSource;
-046import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-047import 
org.apache.hadoop.hbase.testclassification.ReplicationTests;
-048import 
org.apache.hadoop.hbase.util.Bytes;
-049import 
org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
-050import 
org.apache.hadoop.hbase.util.Threads;
-051import 
org.apache.hadoop.hbase.wal.WAL.Entry;
-052import 
org.apache.hadoop.hbase.zookeeper.ZKConfig;
-053import 
org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry;
-054import org.junit.AfterClass;
-055import org.junit.Assert;
-056import org.junit.Before;
-057import org.junit.BeforeClass;
-058import org.junit.ClassRule;
-059import org.junit.Test;
-060import 
org.junit.experimental.categories.Category;
-061import org.slf4j.Logger;
-062import org.slf4j.LoggerFactory;
-063
-064/**
-065 * Tests ReplicationSource and 
ReplicationEndpoint interactions
-066 */
-067@Category({ ReplicationTests.class, 
MediumTests.class })
-068public class TestReplicationEndpoint 
extends TestReplicationBase {
-069
-070  @ClassRule
-071  public static final HBaseClassTestRule 
CLASS_RULE =
-072  
HBaseClassTestRule.forClass(TestReplicationEndpoint.class);
-073
-074  private static final Logger LOG = 
LoggerFactory.getLogger(TestReplicationEndpoint.class);
-075
-076  static int numRegionServers;
-077
-078  @BeforeClass
-079  public static void setUpBeforeClass() 
throws Exception {
-080
TestReplicationBase.setUpBeforeClass();
-081numRegionServers = 
utility1.getHBaseCluster().getRegionServerThreads().size();
-082  }
-083
-084  @AfterClass
-085  public static void tearDownAfterClass() 
throws Exception {
-086
TestReplicationBase.tearDownAfterClass();
-087// check stop is called
-088
Assert.assertTrue(ReplicationEndpointForTest.stoppedCount.get()  0);
-089  }
-090
-091  @Before
-092  public void setup() throws Exception 
{
-093
ReplicationEndpointForTest.contructedCount.set(0);
-094
ReplicationEndpointForTest.startedCount.set(0);
-095
ReplicationEndpointForTest.replicateCount.set(0);
-096
ReplicationEndpointReturningFalse.replicated.set(false);
-097
ReplicationEndpointForTest.lastEntries = null;
-098final ListRegionServerThread 
rsThreads =
-099
utility1.getMiniHBaseCluster().getRegionServerThreads();
-100for (RegionServerThread rs : 
rsThreads) {
-101  
utility1.getAdmin().rollWALWriter(rs.getRegionServer().getServerName());
-102}
-103// Wait for  all log roll to finish
-104utility1.waitFor(3000, new 
Waiter.ExplainingPredicateException() {
-105  @Override
-106  public boolean evaluate() throws 
Exception {
-107for (RegionServerThread rs : 
rsThreads) {
-108  if 
(!rs.getRegionServer().walRollRequestFinished()) {
-109return 

[06/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/org/apache/hadoop/hbase/wal/package-tree.html
--
diff --git a/testdevapidocs/org/apache/hadoop/hbase/wal/package-tree.html 
b/testdevapidocs/org/apache/hadoop/hbase/wal/package-tree.html
index 434a274..f7d6df6 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/wal/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/wal/package-tree.html
@@ -139,9 +139,9 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.wal.FaultyFSLog.FailureType
 org.apache.hadoop.hbase.wal.IOTestProvider.AllowedOperations
 org.apache.hadoop.hbase.wal.TestWALSplit.Corruptions
+org.apache.hadoop.hbase.wal.FaultyFSLog.FailureType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/overview-tree.html
--
diff --git a/testdevapidocs/overview-tree.html 
b/testdevapidocs/overview-tree.html
index a0bac17..a7e9eab 100644
--- a/testdevapidocs/overview-tree.html
+++ b/testdevapidocs/overview-tree.html
@@ -337,6 +337,7 @@
 org.apache.hadoop.hbase.replication.regionserver.TestReplicator.FailureInjectingReplicationEndpointForTest
 
 
+org.apache.hadoop.hbase.replication.regionserver.TestSerialReplicationEndpoint.TestEndpoint
 
 
 
@@ -2679,6 +2680,7 @@
 org.apache.hadoop.hbase.ipc.TestHBaseClient
 org.apache.hadoop.hbase.TestHBaseConfiguration
 org.apache.hadoop.hbase.TestHBaseConfiguration.ReflectiveCredentialProviderClient
+org.apache.hadoop.hbase.util.TestHBaseFsckCleanReplicationBarriers
 org.apache.hadoop.hbase.util.TestHBaseFsckComparator
 org.apache.hadoop.hbase.util.TestHBaseFsckEncryption
 org.apache.hadoop.hbase.util.TestHBaseFsckReplication
@@ -3258,11 +3260,6 @@
 org.apache.hadoop.hbase.replication.TestReplicationEndpoint.EverythingPassesWALEntryFilterSubclass
 
 
-org.apache.hadoop.hbase.replication.TestReplicationEndpoint.InterClusterReplicationEndpointForTest.DummyReplicator
-
-org.apache.hadoop.hbase.replication.TestReplicationEndpoint.InterClusterReplicationEndpointForTest.FailingDummyReplicator
-
-
 org.apache.hadoop.hbase.master.cleaner.TestReplicationHFileCleaner
 org.apache.hadoop.hbase.master.cleaner.TestReplicationHFileCleaner.DummyServer 
(implements org.apache.hadoop.hbase.Server)
 org.apache.hadoop.hbase.replication.TestReplicationPeerConfig
@@ -3290,12 +3287,6 @@
 org.apache.hadoop.hbase.replication.TestReplicationWithTags
 org.apache.hadoop.hbase.replication.TestReplicationWithTags.TestCoprocessorForTagsAtSink
 (implements org.apache.hadoop.hbase.coprocessor.RegionCoprocessor, 
org.apache.hadoop.hbase.coprocessor.RegionObserver)
 org.apache.hadoop.hbase.replication.TestReplicationWithTags.TestCoprocessorForTagsAtSource
 (implements org.apache.hadoop.hbase.coprocessor.RegionCoprocessor, 
org.apache.hadoop.hbase.coprocessor.RegionObserver)
-org.apache.hadoop.hbase.replication.regionserver.TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingBlockingInterface
 (implements 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface)
-org.apache.hadoop.hbase.replication.regionserver.TestReplicator.ReplicationEndpointForTest.ReplicatorForTest
-
-org.apache.hadoop.hbase.replication.regionserver.TestReplicator.FailureInjectingReplicationEndpointForTest.FailureInjectingReplicatorForTest
-
-
 org.apache.hadoop.hbase.client.TestReplicaWithCluster
 org.apache.hadoop.hbase.client.TestReplicaWithCluster.RegionServerHostingPrimayMetaRegionSlowOrStopCopro
 (implements org.apache.hadoop.hbase.coprocessor.RegionCoprocessor, 
org.apache.hadoop.hbase.coprocessor.RegionObserver)
 org.apache.hadoop.hbase.client.TestReplicaWithCluster.RegionServerStoppedCopro 
(implements org.apache.hadoop.hbase.coprocessor.RegionCoprocessor, 
org.apache.hadoop.hbase.coprocessor.RegionObserver)
@@ -3406,6 +3397,7 @@
 org.apache.hadoop.hbase.TestSequenceIdMonotonicallyIncreasing
 org.apache.hadoop.hbase.TestSerialization
 org.apache.hadoop.hbase.replication.regionserver.TestSerialReplicationChecker
+org.apache.hadoop.hbase.replication.regionserver.TestSerialReplicationEndpoint
 org.apache.hadoop.hbase.master.balancer.TestServerAndLoad
 org.apache.hadoop.hbase.client.TestServerBusyException
 org.apache.hadoop.hbase.client.TestServerBusyException.SleepCoprocessor 
(implements org.apache.hadoop.hbase.coprocessor.RegionCoprocessor, 
org.apache.hadoop.hbase.coprocessor.RegionObserver)


[16/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html
index e1bc325..63e7421 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html
@@ -66,5125 +66,5224 @@
 058import 
java.util.concurrent.TimeoutException;
 059import 
java.util.concurrent.atomic.AtomicBoolean;
 060import 
java.util.concurrent.atomic.AtomicInteger;
-061import org.apache.commons.io.IOUtils;
-062import 
org.apache.commons.lang3.RandomStringUtils;
-063import 
org.apache.commons.lang3.StringUtils;
-064import 
org.apache.hadoop.conf.Configuration;
-065import 
org.apache.hadoop.conf.Configured;
-066import 
org.apache.hadoop.fs.FSDataOutputStream;
-067import org.apache.hadoop.fs.FileStatus;
-068import org.apache.hadoop.fs.FileSystem;
-069import org.apache.hadoop.fs.Path;
-070import 
org.apache.hadoop.fs.permission.FsAction;
-071import 
org.apache.hadoop.fs.permission.FsPermission;
-072import 
org.apache.hadoop.hbase.Abortable;
-073import org.apache.hadoop.hbase.Cell;
-074import 
org.apache.hadoop.hbase.CellUtil;
-075import 
org.apache.hadoop.hbase.ClusterMetrics;
-076import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-077import 
org.apache.hadoop.hbase.HBaseConfiguration;
-078import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-079import 
org.apache.hadoop.hbase.HConstants;
-080import 
org.apache.hadoop.hbase.HRegionInfo;
-081import 
org.apache.hadoop.hbase.HRegionLocation;
-082import 
org.apache.hadoop.hbase.KeyValue;
-083import 
org.apache.hadoop.hbase.MasterNotRunningException;
-084import 
org.apache.hadoop.hbase.MetaTableAccessor;
-085import 
org.apache.hadoop.hbase.RegionLocations;
-086import 
org.apache.hadoop.hbase.ServerName;
-087import 
org.apache.hadoop.hbase.TableName;
-088import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-089import 
org.apache.hadoop.hbase.client.Admin;
-090import 
org.apache.hadoop.hbase.client.ClusterConnection;
-091import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-092import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-093import 
org.apache.hadoop.hbase.client.Connection;
-094import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-095import 
org.apache.hadoop.hbase.client.Delete;
-096import 
org.apache.hadoop.hbase.client.Get;
-097import 
org.apache.hadoop.hbase.client.Put;
-098import 
org.apache.hadoop.hbase.client.RegionInfo;
-099import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-100import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-101import 
org.apache.hadoop.hbase.client.Result;
-102import 
org.apache.hadoop.hbase.client.RowMutations;
-103import 
org.apache.hadoop.hbase.client.Table;
-104import 
org.apache.hadoop.hbase.client.TableDescriptor;
-105import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-106import 
org.apache.hadoop.hbase.client.TableState;
-107import 
org.apache.hadoop.hbase.io.FileLink;
-108import 
org.apache.hadoop.hbase.io.HFileLink;
-109import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-110import 
org.apache.hadoop.hbase.io.hfile.HFile;
-111import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-112import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-113import 
org.apache.hadoop.hbase.master.RegionState;
-114import 
org.apache.hadoop.hbase.regionserver.HRegion;
-115import 
org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-116import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-117import 
org.apache.hadoop.hbase.replication.ReplicationException;
-118import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-119import 
org.apache.hadoop.hbase.security.UserProvider;
-120import 
org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
-121import 
org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
-122import 
org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
-123import 
org.apache.hadoop.hbase.util.hbck.ReplicationChecker;
-124import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler;
-125import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandlerImpl;
-126import org.apache.hadoop.hbase.wal.WAL;
-127import 
org.apache.hadoop.hbase.wal.WALFactory;
-128import 
org.apache.hadoop.hbase.wal.WALSplitter;
-129import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-130import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-131import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-132import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-133import 
org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-134import 
org.apache.hadoop.ipc.RemoteException;
-135import 

[22/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html
index e1bc325..63e7421 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html
@@ -66,5125 +66,5224 @@
 058import 
java.util.concurrent.TimeoutException;
 059import 
java.util.concurrent.atomic.AtomicBoolean;
 060import 
java.util.concurrent.atomic.AtomicInteger;
-061import org.apache.commons.io.IOUtils;
-062import 
org.apache.commons.lang3.RandomStringUtils;
-063import 
org.apache.commons.lang3.StringUtils;
-064import 
org.apache.hadoop.conf.Configuration;
-065import 
org.apache.hadoop.conf.Configured;
-066import 
org.apache.hadoop.fs.FSDataOutputStream;
-067import org.apache.hadoop.fs.FileStatus;
-068import org.apache.hadoop.fs.FileSystem;
-069import org.apache.hadoop.fs.Path;
-070import 
org.apache.hadoop.fs.permission.FsAction;
-071import 
org.apache.hadoop.fs.permission.FsPermission;
-072import 
org.apache.hadoop.hbase.Abortable;
-073import org.apache.hadoop.hbase.Cell;
-074import 
org.apache.hadoop.hbase.CellUtil;
-075import 
org.apache.hadoop.hbase.ClusterMetrics;
-076import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-077import 
org.apache.hadoop.hbase.HBaseConfiguration;
-078import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-079import 
org.apache.hadoop.hbase.HConstants;
-080import 
org.apache.hadoop.hbase.HRegionInfo;
-081import 
org.apache.hadoop.hbase.HRegionLocation;
-082import 
org.apache.hadoop.hbase.KeyValue;
-083import 
org.apache.hadoop.hbase.MasterNotRunningException;
-084import 
org.apache.hadoop.hbase.MetaTableAccessor;
-085import 
org.apache.hadoop.hbase.RegionLocations;
-086import 
org.apache.hadoop.hbase.ServerName;
-087import 
org.apache.hadoop.hbase.TableName;
-088import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-089import 
org.apache.hadoop.hbase.client.Admin;
-090import 
org.apache.hadoop.hbase.client.ClusterConnection;
-091import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-092import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-093import 
org.apache.hadoop.hbase.client.Connection;
-094import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-095import 
org.apache.hadoop.hbase.client.Delete;
-096import 
org.apache.hadoop.hbase.client.Get;
-097import 
org.apache.hadoop.hbase.client.Put;
-098import 
org.apache.hadoop.hbase.client.RegionInfo;
-099import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-100import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-101import 
org.apache.hadoop.hbase.client.Result;
-102import 
org.apache.hadoop.hbase.client.RowMutations;
-103import 
org.apache.hadoop.hbase.client.Table;
-104import 
org.apache.hadoop.hbase.client.TableDescriptor;
-105import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-106import 
org.apache.hadoop.hbase.client.TableState;
-107import 
org.apache.hadoop.hbase.io.FileLink;
-108import 
org.apache.hadoop.hbase.io.HFileLink;
-109import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-110import 
org.apache.hadoop.hbase.io.hfile.HFile;
-111import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-112import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-113import 
org.apache.hadoop.hbase.master.RegionState;
-114import 
org.apache.hadoop.hbase.regionserver.HRegion;
-115import 
org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-116import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-117import 
org.apache.hadoop.hbase.replication.ReplicationException;
-118import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-119import 
org.apache.hadoop.hbase.security.UserProvider;
-120import 
org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
-121import 
org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
-122import 
org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
-123import 
org.apache.hadoop.hbase.util.hbck.ReplicationChecker;
-124import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler;
-125import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandlerImpl;
-126import org.apache.hadoop.hbase.wal.WAL;
-127import 
org.apache.hadoop.hbase.wal.WALFactory;
-128import 
org.apache.hadoop.hbase.wal.WALSplitter;
-129import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-130import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-131import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-132import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-133import 
org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-134import 

[31/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html
index e1bc325..63e7421 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html
@@ -66,5125 +66,5224 @@
 058import 
java.util.concurrent.TimeoutException;
 059import 
java.util.concurrent.atomic.AtomicBoolean;
 060import 
java.util.concurrent.atomic.AtomicInteger;
-061import org.apache.commons.io.IOUtils;
-062import 
org.apache.commons.lang3.RandomStringUtils;
-063import 
org.apache.commons.lang3.StringUtils;
-064import 
org.apache.hadoop.conf.Configuration;
-065import 
org.apache.hadoop.conf.Configured;
-066import 
org.apache.hadoop.fs.FSDataOutputStream;
-067import org.apache.hadoop.fs.FileStatus;
-068import org.apache.hadoop.fs.FileSystem;
-069import org.apache.hadoop.fs.Path;
-070import 
org.apache.hadoop.fs.permission.FsAction;
-071import 
org.apache.hadoop.fs.permission.FsPermission;
-072import 
org.apache.hadoop.hbase.Abortable;
-073import org.apache.hadoop.hbase.Cell;
-074import 
org.apache.hadoop.hbase.CellUtil;
-075import 
org.apache.hadoop.hbase.ClusterMetrics;
-076import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-077import 
org.apache.hadoop.hbase.HBaseConfiguration;
-078import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-079import 
org.apache.hadoop.hbase.HConstants;
-080import 
org.apache.hadoop.hbase.HRegionInfo;
-081import 
org.apache.hadoop.hbase.HRegionLocation;
-082import 
org.apache.hadoop.hbase.KeyValue;
-083import 
org.apache.hadoop.hbase.MasterNotRunningException;
-084import 
org.apache.hadoop.hbase.MetaTableAccessor;
-085import 
org.apache.hadoop.hbase.RegionLocations;
-086import 
org.apache.hadoop.hbase.ServerName;
-087import 
org.apache.hadoop.hbase.TableName;
-088import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-089import 
org.apache.hadoop.hbase.client.Admin;
-090import 
org.apache.hadoop.hbase.client.ClusterConnection;
-091import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-092import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-093import 
org.apache.hadoop.hbase.client.Connection;
-094import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-095import 
org.apache.hadoop.hbase.client.Delete;
-096import 
org.apache.hadoop.hbase.client.Get;
-097import 
org.apache.hadoop.hbase.client.Put;
-098import 
org.apache.hadoop.hbase.client.RegionInfo;
-099import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-100import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-101import 
org.apache.hadoop.hbase.client.Result;
-102import 
org.apache.hadoop.hbase.client.RowMutations;
-103import 
org.apache.hadoop.hbase.client.Table;
-104import 
org.apache.hadoop.hbase.client.TableDescriptor;
-105import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-106import 
org.apache.hadoop.hbase.client.TableState;
-107import 
org.apache.hadoop.hbase.io.FileLink;
-108import 
org.apache.hadoop.hbase.io.HFileLink;
-109import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-110import 
org.apache.hadoop.hbase.io.hfile.HFile;
-111import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-112import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-113import 
org.apache.hadoop.hbase.master.RegionState;
-114import 
org.apache.hadoop.hbase.regionserver.HRegion;
-115import 
org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-116import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-117import 
org.apache.hadoop.hbase.replication.ReplicationException;
-118import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-119import 
org.apache.hadoop.hbase.security.UserProvider;
-120import 
org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
-121import 
org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
-122import 
org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
-123import 
org.apache.hadoop.hbase.util.hbck.ReplicationChecker;
-124import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler;
-125import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandlerImpl;
-126import org.apache.hadoop.hbase.wal.WAL;
-127import 
org.apache.hadoop.hbase.wal.WALFactory;
-128import 
org.apache.hadoop.hbase.wal.WALSplitter;
-129import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-130import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-131import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-132import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-133import 
org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-134import 
org.apache.hadoop.ipc.RemoteException;
-135import 

[38/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.html
index c2e0da8..16d68f4 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.html
@@ -157,27 +157,28 @@
 149  Threads.sleep(100);// wait a short 
while for other worker thread to fully exit
 150  boolean allTasksDone = 
workerThreads.values().stream().allMatch(w - w.isFinished());
 151  if (allTasksDone) {
-152
manager.removeRecoveredSource(this);
-153LOG.info("Finished recovering 
queue {} with the following stats: {}", queueId, getStats());
-154  }
-155}
-156  }
-157
-158  @Override
-159  public String getPeerId() {
-160return this.actualPeerId;
-161  }
-162
-163  @Override
-164  public ServerName 
getServerWALsBelongTo() {
-165return 
this.replicationQueueInfo.getDeadRegionServers().get(0);
-166  }
-167
-168  @Override
-169  public boolean isRecovered() {
-170return true;
-171  }
-172}
+152
this.getSourceMetrics().clear();
+153
manager.removeRecoveredSource(this);
+154LOG.info("Finished recovering 
queue {} with the following stats: {}", queueId, getStats());
+155  }
+156}
+157  }
+158
+159  @Override
+160  public String getPeerId() {
+161return this.actualPeerId;
+162  }
+163
+164  @Override
+165  public ServerName 
getServerWALsBelongTo() {
+166return 
this.replicationQueueInfo.getDeadRegionServers().get(0);
+167  }
+168
+169  @Override
+170  public boolean isRecovered() {
+171return true;
+172  }
+173}
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.LogsComparator.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.LogsComparator.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.LogsComparator.html
index 4eb9011..a99b4a7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.LogsComparator.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.LogsComparator.html
@@ -527,109 +527,110 @@
 519}
 520  }
 521}
-522  }
-523
-524  @Override
-525  public String getQueueId() {
-526return this.queueId;
-527  }
-528
-529  @Override
-530  public String getPeerId() {
-531return this.peerId;
-532  }
-533
-534  @Override
-535  public Path getCurrentPath() {
-536// only for testing
-537for (ReplicationSourceShipper worker 
: workerThreads.values()) {
-538  if (worker.getCurrentPath() != 
null) {
-539return worker.getCurrentPath();
-540  }
-541}
-542return null;
-543  }
-544
-545  @Override
-546  public boolean isSourceActive() {
-547return !this.server.isStopped() 
 this.sourceRunning;
-548  }
-549
-550  /**
-551   * Comparator used to compare logs 
together based on their start time
-552   */
-553  public static class LogsComparator 
implements ComparatorPath {
-554
-555@Override
-556public int compare(Path o1, Path o2) 
{
-557  return Long.compare(getTS(o1), 
getTS(o2));
-558}
-559
-560/**
-561 * Split a path to get the start 
time
-562 * For example: 
10.20.20.171%3A60020.1277499063250
-563 * @param p path to split
-564 * @return start time
-565 */
-566private static long getTS(Path p) {
-567  int tsIndex = 
p.getName().lastIndexOf('.') + 1;
-568  return 
Long.parseLong(p.getName().substring(tsIndex));
-569}
-570  }
-571
-572  @Override
-573  public String getStats() {
-574StringBuilder sb = new 
StringBuilder();
-575sb.append("Total replicated edits: 
").append(totalReplicatedEdits)
-576.append(", current progress: 
\n");
-577for (Map.EntryString, 
ReplicationSourceShipper entry : workerThreads.entrySet()) {
-578  String walGroupId = 
entry.getKey();
-579  ReplicationSourceShipper worker = 
entry.getValue();
-580  long position = 
worker.getCurrentPosition();
-581  Path currentPath = 
worker.getCurrentPath();
-582  sb.append("walGroup 
[").append(walGroupId).append("]: ");
-583  if (currentPath != null) {
-584sb.append("currently replicating 
from: ").append(currentPath).append(" at position: ")
-585
.append(position).append("\n");
-586  } else {
-587

[12/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html
--
diff --git a/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html 
b/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html
index 3168ee3..e159b3f 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html
@@ -145,8 +145,8 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.backup.TestIncrementalBackupMergeWithFailures.FailurePhase
 org.apache.hadoop.hbase.backup.TestBackupDeleteWithFailures.Failure
+org.apache.hadoop.hbase.backup.TestIncrementalBackupMergeWithFailures.FailurePhase
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html 
b/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html
index fc45e57..d865969 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html
@@ -5307,18 +5307,22 @@
 
 
 static HBaseClassTestRule
-TestWALEntrySinkFilter.CLASS_RULE
+TestSerialReplicationEndpoint.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestRegionReplicaReplicationEndpoint.CLASS_RULE
+TestWALEntrySinkFilter.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestReplicator.CLASS_RULE
+TestRegionReplicaReplicationEndpoint.CLASS_RULE
 
 
 static HBaseClassTestRule
+TestReplicator.CLASS_RULE
+
+
+static HBaseClassTestRule
 TestRegionReplicaReplicationEndpointNoMaster.CLASS_RULE
 
 
@@ -6225,42 +6229,46 @@
 
 
 static HBaseClassTestRule
-TestCoprocessorScanPolicy.CLASS_RULE
+TestHBaseFsckCleanReplicationBarriers.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestMiniClusterLoadSequential.CLASS_RULE
+TestCoprocessorScanPolicy.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestFromClientSide3WoUnsafe.CLASS_RULE
+TestMiniClusterLoadSequential.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestEncryptionTest.CLASS_RULE
+TestFromClientSide3WoUnsafe.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestCompressionTest.CLASS_RULE
+TestEncryptionTest.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestIdReadWriteLock.CLASS_RULE
+TestCompressionTest.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestBoundedPriorityBlockingQueue.CLASS_RULE
+TestIdReadWriteLock.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestMiniClusterLoadParallel.CLASS_RULE
+TestBoundedPriorityBlockingQueue.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestRegionSplitCalculator.CLASS_RULE
+TestMiniClusterLoadParallel.CLASS_RULE
 
 
 static HBaseClassTestRule
+TestRegionSplitCalculator.CLASS_RULE
+
+
+static HBaseClassTestRule
 TestIncrementingEnvironmentEdge.CLASS_RULE
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseTestingUtility.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseTestingUtility.html 
b/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseTestingUtility.html
index 48f4ec2..bcd3437 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseTestingUtility.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseTestingUtility.html
@@ -3358,14 +3358,18 @@
 TestSerialReplicationChecker.UTIL
 
 
+private static HBaseTestingUtility
+TestSerialReplicationEndpoint.UTIL
+
+
 protected static HBaseTestingUtility
 TestReplicationSourceManager.utility
 
-
+
 private static HBaseTestingUtility
 TestGlobalThrottler.utility1
 
-
+
 private static HBaseTestingUtility
 TestGlobalThrottler.utility2
 
@@ -4307,6 +4311,10 @@
 private static HBaseTestingUtility
 TestConnectionCache.UTIL
 
+
+private static HBaseTestingUtility
+TestHBaseFsckCleanReplicationBarriers.UTIL
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.TestUpdatableReplicationEndpoint.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.TestUpdatableReplicationEndpoint.html
 

[33/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
index e1bc325..63e7421 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
@@ -66,5125 +66,5224 @@
 058import 
java.util.concurrent.TimeoutException;
 059import 
java.util.concurrent.atomic.AtomicBoolean;
 060import 
java.util.concurrent.atomic.AtomicInteger;
-061import org.apache.commons.io.IOUtils;
-062import 
org.apache.commons.lang3.RandomStringUtils;
-063import 
org.apache.commons.lang3.StringUtils;
-064import 
org.apache.hadoop.conf.Configuration;
-065import 
org.apache.hadoop.conf.Configured;
-066import 
org.apache.hadoop.fs.FSDataOutputStream;
-067import org.apache.hadoop.fs.FileStatus;
-068import org.apache.hadoop.fs.FileSystem;
-069import org.apache.hadoop.fs.Path;
-070import 
org.apache.hadoop.fs.permission.FsAction;
-071import 
org.apache.hadoop.fs.permission.FsPermission;
-072import 
org.apache.hadoop.hbase.Abortable;
-073import org.apache.hadoop.hbase.Cell;
-074import 
org.apache.hadoop.hbase.CellUtil;
-075import 
org.apache.hadoop.hbase.ClusterMetrics;
-076import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-077import 
org.apache.hadoop.hbase.HBaseConfiguration;
-078import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-079import 
org.apache.hadoop.hbase.HConstants;
-080import 
org.apache.hadoop.hbase.HRegionInfo;
-081import 
org.apache.hadoop.hbase.HRegionLocation;
-082import 
org.apache.hadoop.hbase.KeyValue;
-083import 
org.apache.hadoop.hbase.MasterNotRunningException;
-084import 
org.apache.hadoop.hbase.MetaTableAccessor;
-085import 
org.apache.hadoop.hbase.RegionLocations;
-086import 
org.apache.hadoop.hbase.ServerName;
-087import 
org.apache.hadoop.hbase.TableName;
-088import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-089import 
org.apache.hadoop.hbase.client.Admin;
-090import 
org.apache.hadoop.hbase.client.ClusterConnection;
-091import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-092import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-093import 
org.apache.hadoop.hbase.client.Connection;
-094import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-095import 
org.apache.hadoop.hbase.client.Delete;
-096import 
org.apache.hadoop.hbase.client.Get;
-097import 
org.apache.hadoop.hbase.client.Put;
-098import 
org.apache.hadoop.hbase.client.RegionInfo;
-099import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-100import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-101import 
org.apache.hadoop.hbase.client.Result;
-102import 
org.apache.hadoop.hbase.client.RowMutations;
-103import 
org.apache.hadoop.hbase.client.Table;
-104import 
org.apache.hadoop.hbase.client.TableDescriptor;
-105import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-106import 
org.apache.hadoop.hbase.client.TableState;
-107import 
org.apache.hadoop.hbase.io.FileLink;
-108import 
org.apache.hadoop.hbase.io.HFileLink;
-109import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-110import 
org.apache.hadoop.hbase.io.hfile.HFile;
-111import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-112import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-113import 
org.apache.hadoop.hbase.master.RegionState;
-114import 
org.apache.hadoop.hbase.regionserver.HRegion;
-115import 
org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-116import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-117import 
org.apache.hadoop.hbase.replication.ReplicationException;
-118import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-119import 
org.apache.hadoop.hbase.security.UserProvider;
-120import 
org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
-121import 
org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
-122import 
org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
-123import 
org.apache.hadoop.hbase.util.hbck.ReplicationChecker;
-124import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler;
-125import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandlerImpl;
-126import org.apache.hadoop.hbase.wal.WAL;
-127import 
org.apache.hadoop.hbase.wal.WALFactory;
-128import 
org.apache.hadoop.hbase.wal.WALSplitter;
-129import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-130import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-131import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-132import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-133import 
org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-134import 
org.apache.hadoop.ipc.RemoteException;

[29/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
index e1bc325..63e7421 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
@@ -66,5125 +66,5224 @@
 058import 
java.util.concurrent.TimeoutException;
 059import 
java.util.concurrent.atomic.AtomicBoolean;
 060import 
java.util.concurrent.atomic.AtomicInteger;
-061import org.apache.commons.io.IOUtils;
-062import 
org.apache.commons.lang3.RandomStringUtils;
-063import 
org.apache.commons.lang3.StringUtils;
-064import 
org.apache.hadoop.conf.Configuration;
-065import 
org.apache.hadoop.conf.Configured;
-066import 
org.apache.hadoop.fs.FSDataOutputStream;
-067import org.apache.hadoop.fs.FileStatus;
-068import org.apache.hadoop.fs.FileSystem;
-069import org.apache.hadoop.fs.Path;
-070import 
org.apache.hadoop.fs.permission.FsAction;
-071import 
org.apache.hadoop.fs.permission.FsPermission;
-072import 
org.apache.hadoop.hbase.Abortable;
-073import org.apache.hadoop.hbase.Cell;
-074import 
org.apache.hadoop.hbase.CellUtil;
-075import 
org.apache.hadoop.hbase.ClusterMetrics;
-076import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-077import 
org.apache.hadoop.hbase.HBaseConfiguration;
-078import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-079import 
org.apache.hadoop.hbase.HConstants;
-080import 
org.apache.hadoop.hbase.HRegionInfo;
-081import 
org.apache.hadoop.hbase.HRegionLocation;
-082import 
org.apache.hadoop.hbase.KeyValue;
-083import 
org.apache.hadoop.hbase.MasterNotRunningException;
-084import 
org.apache.hadoop.hbase.MetaTableAccessor;
-085import 
org.apache.hadoop.hbase.RegionLocations;
-086import 
org.apache.hadoop.hbase.ServerName;
-087import 
org.apache.hadoop.hbase.TableName;
-088import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-089import 
org.apache.hadoop.hbase.client.Admin;
-090import 
org.apache.hadoop.hbase.client.ClusterConnection;
-091import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-092import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-093import 
org.apache.hadoop.hbase.client.Connection;
-094import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-095import 
org.apache.hadoop.hbase.client.Delete;
-096import 
org.apache.hadoop.hbase.client.Get;
-097import 
org.apache.hadoop.hbase.client.Put;
-098import 
org.apache.hadoop.hbase.client.RegionInfo;
-099import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-100import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-101import 
org.apache.hadoop.hbase.client.Result;
-102import 
org.apache.hadoop.hbase.client.RowMutations;
-103import 
org.apache.hadoop.hbase.client.Table;
-104import 
org.apache.hadoop.hbase.client.TableDescriptor;
-105import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-106import 
org.apache.hadoop.hbase.client.TableState;
-107import 
org.apache.hadoop.hbase.io.FileLink;
-108import 
org.apache.hadoop.hbase.io.HFileLink;
-109import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-110import 
org.apache.hadoop.hbase.io.hfile.HFile;
-111import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-112import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-113import 
org.apache.hadoop.hbase.master.RegionState;
-114import 
org.apache.hadoop.hbase.regionserver.HRegion;
-115import 
org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-116import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-117import 
org.apache.hadoop.hbase.replication.ReplicationException;
-118import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-119import 
org.apache.hadoop.hbase.security.UserProvider;
-120import 
org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
-121import 
org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
-122import 
org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
-123import 
org.apache.hadoop.hbase.util.hbck.ReplicationChecker;
-124import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler;
-125import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandlerImpl;
-126import org.apache.hadoop.hbase.wal.WAL;
-127import 
org.apache.hadoop.hbase.wal.WALFactory;
-128import 
org.apache.hadoop.hbase.wal.WALSplitter;
-129import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-130import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-131import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-132import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-133import 
org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-134import 
org.apache.hadoop.ipc.RemoteException;
-135import 
org.apache.hadoop.security.UserGroupInformation;
-136import 

[23/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
index e1bc325..63e7421 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
@@ -66,5125 +66,5224 @@
 058import 
java.util.concurrent.TimeoutException;
 059import 
java.util.concurrent.atomic.AtomicBoolean;
 060import 
java.util.concurrent.atomic.AtomicInteger;
-061import org.apache.commons.io.IOUtils;
-062import 
org.apache.commons.lang3.RandomStringUtils;
-063import 
org.apache.commons.lang3.StringUtils;
-064import 
org.apache.hadoop.conf.Configuration;
-065import 
org.apache.hadoop.conf.Configured;
-066import 
org.apache.hadoop.fs.FSDataOutputStream;
-067import org.apache.hadoop.fs.FileStatus;
-068import org.apache.hadoop.fs.FileSystem;
-069import org.apache.hadoop.fs.Path;
-070import 
org.apache.hadoop.fs.permission.FsAction;
-071import 
org.apache.hadoop.fs.permission.FsPermission;
-072import 
org.apache.hadoop.hbase.Abortable;
-073import org.apache.hadoop.hbase.Cell;
-074import 
org.apache.hadoop.hbase.CellUtil;
-075import 
org.apache.hadoop.hbase.ClusterMetrics;
-076import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-077import 
org.apache.hadoop.hbase.HBaseConfiguration;
-078import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-079import 
org.apache.hadoop.hbase.HConstants;
-080import 
org.apache.hadoop.hbase.HRegionInfo;
-081import 
org.apache.hadoop.hbase.HRegionLocation;
-082import 
org.apache.hadoop.hbase.KeyValue;
-083import 
org.apache.hadoop.hbase.MasterNotRunningException;
-084import 
org.apache.hadoop.hbase.MetaTableAccessor;
-085import 
org.apache.hadoop.hbase.RegionLocations;
-086import 
org.apache.hadoop.hbase.ServerName;
-087import 
org.apache.hadoop.hbase.TableName;
-088import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-089import 
org.apache.hadoop.hbase.client.Admin;
-090import 
org.apache.hadoop.hbase.client.ClusterConnection;
-091import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-092import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-093import 
org.apache.hadoop.hbase.client.Connection;
-094import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-095import 
org.apache.hadoop.hbase.client.Delete;
-096import 
org.apache.hadoop.hbase.client.Get;
-097import 
org.apache.hadoop.hbase.client.Put;
-098import 
org.apache.hadoop.hbase.client.RegionInfo;
-099import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-100import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-101import 
org.apache.hadoop.hbase.client.Result;
-102import 
org.apache.hadoop.hbase.client.RowMutations;
-103import 
org.apache.hadoop.hbase.client.Table;
-104import 
org.apache.hadoop.hbase.client.TableDescriptor;
-105import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-106import 
org.apache.hadoop.hbase.client.TableState;
-107import 
org.apache.hadoop.hbase.io.FileLink;
-108import 
org.apache.hadoop.hbase.io.HFileLink;
-109import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-110import 
org.apache.hadoop.hbase.io.hfile.HFile;
-111import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-112import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-113import 
org.apache.hadoop.hbase.master.RegionState;
-114import 
org.apache.hadoop.hbase.regionserver.HRegion;
-115import 
org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-116import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-117import 
org.apache.hadoop.hbase.replication.ReplicationException;
-118import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-119import 
org.apache.hadoop.hbase.security.UserProvider;
-120import 
org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
-121import 
org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
-122import 
org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
-123import 
org.apache.hadoop.hbase.util.hbck.ReplicationChecker;
-124import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler;
-125import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandlerImpl;
-126import org.apache.hadoop.hbase.wal.WAL;
-127import 
org.apache.hadoop.hbase.wal.WALFactory;
-128import 
org.apache.hadoop.hbase.wal.WALSplitter;
-129import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-130import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-131import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-132import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-133import 
org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-134import 
org.apache.hadoop.ipc.RemoteException;
-135import 

[32/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
index e1bc325..63e7421 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
@@ -66,5125 +66,5224 @@
 058import 
java.util.concurrent.TimeoutException;
 059import 
java.util.concurrent.atomic.AtomicBoolean;
 060import 
java.util.concurrent.atomic.AtomicInteger;
-061import org.apache.commons.io.IOUtils;
-062import 
org.apache.commons.lang3.RandomStringUtils;
-063import 
org.apache.commons.lang3.StringUtils;
-064import 
org.apache.hadoop.conf.Configuration;
-065import 
org.apache.hadoop.conf.Configured;
-066import 
org.apache.hadoop.fs.FSDataOutputStream;
-067import org.apache.hadoop.fs.FileStatus;
-068import org.apache.hadoop.fs.FileSystem;
-069import org.apache.hadoop.fs.Path;
-070import 
org.apache.hadoop.fs.permission.FsAction;
-071import 
org.apache.hadoop.fs.permission.FsPermission;
-072import 
org.apache.hadoop.hbase.Abortable;
-073import org.apache.hadoop.hbase.Cell;
-074import 
org.apache.hadoop.hbase.CellUtil;
-075import 
org.apache.hadoop.hbase.ClusterMetrics;
-076import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-077import 
org.apache.hadoop.hbase.HBaseConfiguration;
-078import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-079import 
org.apache.hadoop.hbase.HConstants;
-080import 
org.apache.hadoop.hbase.HRegionInfo;
-081import 
org.apache.hadoop.hbase.HRegionLocation;
-082import 
org.apache.hadoop.hbase.KeyValue;
-083import 
org.apache.hadoop.hbase.MasterNotRunningException;
-084import 
org.apache.hadoop.hbase.MetaTableAccessor;
-085import 
org.apache.hadoop.hbase.RegionLocations;
-086import 
org.apache.hadoop.hbase.ServerName;
-087import 
org.apache.hadoop.hbase.TableName;
-088import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-089import 
org.apache.hadoop.hbase.client.Admin;
-090import 
org.apache.hadoop.hbase.client.ClusterConnection;
-091import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-092import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-093import 
org.apache.hadoop.hbase.client.Connection;
-094import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-095import 
org.apache.hadoop.hbase.client.Delete;
-096import 
org.apache.hadoop.hbase.client.Get;
-097import 
org.apache.hadoop.hbase.client.Put;
-098import 
org.apache.hadoop.hbase.client.RegionInfo;
-099import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-100import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-101import 
org.apache.hadoop.hbase.client.Result;
-102import 
org.apache.hadoop.hbase.client.RowMutations;
-103import 
org.apache.hadoop.hbase.client.Table;
-104import 
org.apache.hadoop.hbase.client.TableDescriptor;
-105import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-106import 
org.apache.hadoop.hbase.client.TableState;
-107import 
org.apache.hadoop.hbase.io.FileLink;
-108import 
org.apache.hadoop.hbase.io.HFileLink;
-109import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-110import 
org.apache.hadoop.hbase.io.hfile.HFile;
-111import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-112import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-113import 
org.apache.hadoop.hbase.master.RegionState;
-114import 
org.apache.hadoop.hbase.regionserver.HRegion;
-115import 
org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-116import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-117import 
org.apache.hadoop.hbase.replication.ReplicationException;
-118import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-119import 
org.apache.hadoop.hbase.security.UserProvider;
-120import 
org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
-121import 
org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
-122import 
org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
-123import 
org.apache.hadoop.hbase.util.hbck.ReplicationChecker;
-124import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler;
-125import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandlerImpl;
-126import org.apache.hadoop.hbase.wal.WAL;
-127import 
org.apache.hadoop.hbase.wal.WALFactory;
-128import 
org.apache.hadoop.hbase.wal.WALSplitter;
-129import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-130import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-131import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-132import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-133import 
org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-134import 
org.apache.hadoop.ipc.RemoteException;
-135import 

[21/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
index e1bc325..63e7421 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
@@ -66,5125 +66,5224 @@
 058import 
java.util.concurrent.TimeoutException;
 059import 
java.util.concurrent.atomic.AtomicBoolean;
 060import 
java.util.concurrent.atomic.AtomicInteger;
-061import org.apache.commons.io.IOUtils;
-062import 
org.apache.commons.lang3.RandomStringUtils;
-063import 
org.apache.commons.lang3.StringUtils;
-064import 
org.apache.hadoop.conf.Configuration;
-065import 
org.apache.hadoop.conf.Configured;
-066import 
org.apache.hadoop.fs.FSDataOutputStream;
-067import org.apache.hadoop.fs.FileStatus;
-068import org.apache.hadoop.fs.FileSystem;
-069import org.apache.hadoop.fs.Path;
-070import 
org.apache.hadoop.fs.permission.FsAction;
-071import 
org.apache.hadoop.fs.permission.FsPermission;
-072import 
org.apache.hadoop.hbase.Abortable;
-073import org.apache.hadoop.hbase.Cell;
-074import 
org.apache.hadoop.hbase.CellUtil;
-075import 
org.apache.hadoop.hbase.ClusterMetrics;
-076import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-077import 
org.apache.hadoop.hbase.HBaseConfiguration;
-078import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-079import 
org.apache.hadoop.hbase.HConstants;
-080import 
org.apache.hadoop.hbase.HRegionInfo;
-081import 
org.apache.hadoop.hbase.HRegionLocation;
-082import 
org.apache.hadoop.hbase.KeyValue;
-083import 
org.apache.hadoop.hbase.MasterNotRunningException;
-084import 
org.apache.hadoop.hbase.MetaTableAccessor;
-085import 
org.apache.hadoop.hbase.RegionLocations;
-086import 
org.apache.hadoop.hbase.ServerName;
-087import 
org.apache.hadoop.hbase.TableName;
-088import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-089import 
org.apache.hadoop.hbase.client.Admin;
-090import 
org.apache.hadoop.hbase.client.ClusterConnection;
-091import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-092import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-093import 
org.apache.hadoop.hbase.client.Connection;
-094import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-095import 
org.apache.hadoop.hbase.client.Delete;
-096import 
org.apache.hadoop.hbase.client.Get;
-097import 
org.apache.hadoop.hbase.client.Put;
-098import 
org.apache.hadoop.hbase.client.RegionInfo;
-099import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-100import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-101import 
org.apache.hadoop.hbase.client.Result;
-102import 
org.apache.hadoop.hbase.client.RowMutations;
-103import 
org.apache.hadoop.hbase.client.Table;
-104import 
org.apache.hadoop.hbase.client.TableDescriptor;
-105import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-106import 
org.apache.hadoop.hbase.client.TableState;
-107import 
org.apache.hadoop.hbase.io.FileLink;
-108import 
org.apache.hadoop.hbase.io.HFileLink;
-109import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-110import 
org.apache.hadoop.hbase.io.hfile.HFile;
-111import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-112import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-113import 
org.apache.hadoop.hbase.master.RegionState;
-114import 
org.apache.hadoop.hbase.regionserver.HRegion;
-115import 
org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-116import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-117import 
org.apache.hadoop.hbase.replication.ReplicationException;
-118import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-119import 
org.apache.hadoop.hbase.security.UserProvider;
-120import 
org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
-121import 
org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
-122import 
org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
-123import 
org.apache.hadoop.hbase.util.hbck.ReplicationChecker;
-124import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler;
-125import 
org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandlerImpl;
-126import org.apache.hadoop.hbase.wal.WAL;
-127import 
org.apache.hadoop.hbase.wal.WALFactory;
-128import 
org.apache.hadoop.hbase.wal.WALSplitter;
-129import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-130import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-131import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-132import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-133import 
org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-134import 

[02/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.FailingDummyReplicator.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.FailingDummyReplicator.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.FailingDummyReplicator.html
deleted file mode 100644
index 7a938de..000
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.InterClusterReplicationEndpointForTest.FailingDummyReplicator.html
+++ /dev/null
@@ -1,632 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-Source code
-
-
-
-
-001/**
-002 * Licensed to the Apache Software 
Foundation (ASF) under one
-003 * or more contributor license 
agreements.  See the NOTICE file
-004 * distributed with this work for 
additional information
-005 * regarding copyright ownership.  The 
ASF licenses this file
-006 * to you under the Apache License, 
Version 2.0 (the
-007 * "License"); you may not use this file 
except in compliance
-008 * with the License.  You may obtain a 
copy of the License at
-009 *
-010 * 
http://www.apache.org/licenses/LICENSE-2.0
-011 *
-012 * Unless required by applicable law or 
agreed to in writing, software
-013 * distributed under the License is 
distributed on an "AS IS" BASIS,
-014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
-015 * See the License for the specific 
language governing permissions and
-016 * limitations under the License.
-017 */
-018package 
org.apache.hadoop.hbase.replication;
-019
-020import static org.mockito.Mockito.mock;
-021import static 
org.mockito.Mockito.verify;
-022import static org.mockito.Mockito.when;
-023
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.List;
-027import java.util.UUID;
-028import 
java.util.concurrent.atomic.AtomicBoolean;
-029import 
java.util.concurrent.atomic.AtomicInteger;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import org.apache.hadoop.hbase.Cell;
-032import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-033import org.apache.hadoop.hbase.Waiter;
-034import 
org.apache.hadoop.hbase.client.Connection;
-035import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-036import 
org.apache.hadoop.hbase.client.Put;
-037import 
org.apache.hadoop.hbase.client.RegionInfo;
-038import 
org.apache.hadoop.hbase.client.Table;
-039import 
org.apache.hadoop.hbase.regionserver.HRegion;
-040import 
org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint;
-041import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationGlobalSourceSource;
-042import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceImpl;
-043import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSource;
-044import 
org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSourceImpl;
-045import 
org.apache.hadoop.hbase.replication.regionserver.MetricsSource;
-046import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-047import 
org.apache.hadoop.hbase.testclassification.ReplicationTests;
-048import 
org.apache.hadoop.hbase.util.Bytes;
-049import 
org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
-050import 
org.apache.hadoop.hbase.util.Threads;
-051import 
org.apache.hadoop.hbase.wal.WAL.Entry;
-052import 
org.apache.hadoop.hbase.zookeeper.ZKConfig;
-053import 
org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry;
-054import org.junit.AfterClass;
-055import org.junit.Assert;
-056import org.junit.Before;
-057import org.junit.BeforeClass;
-058import org.junit.ClassRule;
-059import org.junit.Test;
-060import 
org.junit.experimental.categories.Category;
-061import org.slf4j.Logger;
-062import org.slf4j.LoggerFactory;
-063
-064/**
-065 * Tests ReplicationSource and 
ReplicationEndpoint interactions
-066 */
-067@Category({ ReplicationTests.class, 
MediumTests.class })
-068public class TestReplicationEndpoint 
extends TestReplicationBase {
-069
-070  @ClassRule
-071  public static final HBaseClassTestRule 
CLASS_RULE =
-072  
HBaseClassTestRule.forClass(TestReplicationEndpoint.class);
-073
-074  private static final Logger LOG = 
LoggerFactory.getLogger(TestReplicationEndpoint.class);
-075
-076  static int numRegionServers;
-077
-078  @BeforeClass
-079  public static void setUpBeforeClass() 
throws Exception {
-080
TestReplicationBase.setUpBeforeClass();
-081numRegionServers = 
utility1.getHBaseCluster().getRegionServerThreads().size();
-082  }
-083
-084  @AfterClass
-085  public static void tearDownAfterClass() 
throws Exception {
-086
TestReplicationBase.tearDownAfterClass();
-087// 

[08/29] hbase git commit: HBASE-19083 Introduce a new log writer which can write to two HDFSes

2018-05-04 Thread zhangduo
HBASE-19083 Introduce a new log writer which can write to two HDFSes


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0893bb54
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0893bb54
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0893bb54

Branch: refs/heads/HBASE-19064
Commit: 0893bb5477ebaf546aeed930e3f3d7a14c733a12
Parents: 87f5b5f
Author: zhangduo 
Authored: Thu Jan 11 21:08:02 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 17:46:18 2018 +0800

--
 .../hbase/regionserver/wal/AsyncFSWAL.java  |  21 +--
 .../regionserver/wal/CombinedAsyncWriter.java   | 134 ++
 .../hbase/regionserver/wal/DualAsyncFSWAL.java  |  67 +
 .../wal/AbstractTestProtobufLog.java| 110 +++
 .../regionserver/wal/ProtobufLogTestHelper.java |  99 ++
 .../regionserver/wal/TestAsyncProtobufLog.java  |  32 +
 .../wal/TestCombinedAsyncWriter.java| 136 +++
 .../hbase/regionserver/wal/TestProtobufLog.java |  14 +-
 .../regionserver/wal/WriterOverAsyncWriter.java |  63 +
 9 files changed, 533 insertions(+), 143 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0893bb54/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
index e34818f..0bee9d6 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
@@ -607,12 +607,16 @@ public class AsyncFSWAL extends 
AbstractFSWAL {
 }
   }
 
-  @Override
-  protected AsyncWriter createWriterInstance(Path path) throws IOException {
+  protected final AsyncWriter createAsyncWriter(FileSystem fs, Path path) 
throws IOException {
 return AsyncFSWALProvider.createAsyncWriter(conf, fs, path, false, 
eventLoopGroup,
   channelClass);
   }
 
+  @Override
+  protected AsyncWriter createWriterInstance(Path path) throws IOException {
+return createAsyncWriter(fs, path);
+  }
+
   private void waitForSafePoint() {
 consumeLock.lock();
 try {
@@ -632,13 +636,12 @@ public class AsyncFSWAL extends 
AbstractFSWAL {
 }
   }
 
-  private long closeWriter() {
-AsyncWriter oldWriter = this.writer;
-if (oldWriter != null) {
-  long fileLength = oldWriter.getLength();
+  protected final long closeWriter(AsyncWriter writer) {
+if (writer != null) {
+  long fileLength = writer.getLength();
   closeExecutor.execute(() -> {
 try {
-  oldWriter.close();
+  writer.close();
 } catch (IOException e) {
   LOG.warn("close old writer failed", e);
 }
@@ -654,7 +657,7 @@ public class AsyncFSWAL extends AbstractFSWAL {
   throws IOException {
 Preconditions.checkNotNull(nextWriter);
 waitForSafePoint();
-long oldFileLen = closeWriter();
+long oldFileLen = closeWriter(this.writer);
 logRollAndSetupWalProps(oldPath, newPath, oldFileLen);
 this.writer = nextWriter;
 if (nextWriter instanceof AsyncProtobufLogWriter) {
@@ -679,7 +682,7 @@ public class AsyncFSWAL extends AbstractFSWAL {
   @Override
   protected void doShutdown() throws IOException {
 waitForSafePoint();
-closeWriter();
+closeWriter(this.writer);
 closeExecutor.shutdown();
 try {
   if (!closeExecutor.awaitTermination(waitOnShutdownInSeconds, 
TimeUnit.SECONDS)) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/0893bb54/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/CombinedAsyncWriter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/CombinedAsyncWriter.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/CombinedAsyncWriter.java
new file mode 100644
index 000..8ecfede
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/CombinedAsyncWriter.java
@@ -0,0 +1,134 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * 

[25/29] hbase git commit: HBASE-20458 Support removing a WAL from LogRoller

2018-05-04 Thread zhangduo
HBASE-20458 Support removing a WAL from LogRoller


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/20840cc1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/20840cc1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/20840cc1

Branch: refs/heads/HBASE-19064
Commit: 20840cc1c05d558246daa36b5b16022209bbb807
Parents: 007618c
Author: Guanghao Zhang 
Authored: Mon Apr 23 16:31:54 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 17:54:52 2018 +0800

--
 .../hadoop/hbase/regionserver/LogRoller.java| 29 +--
 .../hbase/regionserver/wal/AbstractFSWAL.java   |  7 +-
 .../regionserver/wal/WALClosedException.java| 47 ++
 .../hbase/regionserver/TestLogRoller.java   | 90 
 .../regionserver/wal/AbstractTestFSWAL.java |  9 ++
 5 files changed, 171 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/20840cc1/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
index 55c5219..ab0083f 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.hbase.regionserver;
 
 import java.io.Closeable;
 import java.io.IOException;
+import java.util.Iterator;
+import java.util.Map;
 import java.util.Map.Entry;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.atomic.AtomicBoolean;
@@ -30,6 +32,7 @@ import org.apache.hadoop.hbase.Server;
 import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;
 import org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
 import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
+import org.apache.hadoop.hbase.regionserver.wal.WALClosedException;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.HasThread;
 import org.apache.hadoop.hbase.wal.WAL;
@@ -177,17 +180,24 @@ public class LogRoller extends HasThread implements 
Closeable {
   rollLock.lock(); // FindBugs UL_UNRELEASED_LOCK_EXCEPTION_PATH
   try {
 this.lastrolltime = now;
-for (Entry entry : walNeedsRoll.entrySet()) {
+for (Iterator> iter = 
walNeedsRoll.entrySet().iterator(); iter
+.hasNext();) {
+  Entry entry = iter.next();
   final WAL wal = entry.getKey();
   // Force the roll if the logroll.period is elapsed or if a roll was 
requested.
   // The returned value is an array of actual region names.
-  final byte [][] regionsToFlush = wal.rollWriter(periodic ||
-  entry.getValue().booleanValue());
-  walNeedsRoll.put(wal, Boolean.FALSE);
-  if (regionsToFlush != null) {
-for (byte[] r : regionsToFlush) {
-  scheduleFlush(r);
+  try {
+final byte[][] regionsToFlush =
+wal.rollWriter(periodic || entry.getValue().booleanValue());
+walNeedsRoll.put(wal, Boolean.FALSE);
+if (regionsToFlush != null) {
+  for (byte[] r : regionsToFlush) {
+scheduleFlush(r);
+  }
 }
+  } catch (WALClosedException e) {
+LOG.warn("WAL has been closed. Skipping rolling of writer and just 
remove it", e);
+iter.remove();
   }
 }
   } catch (FailedLogCloseException e) {
@@ -252,4 +262,9 @@ public class LogRoller extends HasThread implements 
Closeable {
 running = false;
 interrupt();
   }
+
+  @VisibleForTesting
+  Map getWalNeedsRoll() {
+return this.walNeedsRoll;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/20840cc1/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
index 4816d77..2c0c72b1 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
@@ -750,15 +750,14 @@ public abstract class AbstractFSWAL 
implements WAL {
   public byte[][] rollWriter(boolean force) throws FailedLogCloseException, 
IOException 

[03/29] hbase git commit: HBASE-20481 Replicate entries from same region serially in ReplicationEndpoint for serial replication

2018-05-04 Thread zhangduo
HBASE-20481 Replicate entries from same region serially in ReplicationEndpoint 
for serial replication


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6225b4a4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6225b4a4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6225b4a4

Branch: refs/heads/HBASE-19064
Commit: 6225b4a492c40a03475b666915b96984b25b3c47
Parents: 9b9f851
Author: huzheng 
Authored: Wed May 2 10:44:42 2018 +0800
Committer: huzheng 
Committed: Fri May 4 15:22:02 2018 +0800

--
 .../hbase/replication/ReplicationEndpoint.java  |   2 +-
 .../HBaseInterClusterReplicationEndpoint.java   | 281 +-
 .../TestReplicationAdminWithClusters.java   |   1 -
 .../replication/TestReplicationEndpoint.java|  36 +--
 .../regionserver/TestReplicator.java| 288 +++
 .../TestSerialReplicationEndpoint.java  | 188 
 6 files changed, 384 insertions(+), 412 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6225b4a4/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ReplicationEndpoint.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ReplicationEndpoint.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ReplicationEndpoint.java
index 543dc2f..f4c37b1 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ReplicationEndpoint.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ReplicationEndpoint.java
@@ -117,7 +117,7 @@ public interface ReplicationEndpoint extends 
ReplicationPeerConfigListener {
   /**
* Initialize the replication endpoint with the given context.
* @param context replication context
-   * @throws IOException
+   * @throws IOException error occur when initialize the endpoint.
*/
   void init(Context context) throws IOException;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/6225b4a4/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java
index fd3c671..7db53aa 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java
@@ -24,9 +24,9 @@ import java.net.SocketTimeoutException;
 import java.net.UnknownHostException;
 import java.util.ArrayList;
 import java.util.Collections;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.TreeMap;
 import java.util.concurrent.Callable;
 import java.util.concurrent.CompletionService;
 import java.util.concurrent.ExecutionException;
@@ -37,6 +37,9 @@ import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
@@ -108,6 +111,7 @@ public class HBaseInterClusterReplicationEndpoint extends 
HBaseReplicationEndpoi
   private boolean replicationBulkLoadDataEnabled;
   private Abortable abortable;
   private boolean dropOnDeletedTables;
+  private boolean isSerial = false;
 
   @Override
   public void init(Context context) throws IOException {
@@ -160,6 +164,7 @@ public class HBaseInterClusterReplicationEndpoint extends 
HBaseReplicationEndpoi
 Path baseNSDir = new Path(HConstants.BASE_NAMESPACE_DIR);
 baseNamespaceDir = new Path(rootDir, baseNSDir);
 hfileArchiveDir = new Path(rootDir, new 
Path(HConstants.HFILE_ARCHIVE_DIRECTORY, baseNSDir));
+isSerial = context.getPeerConfig().isSerial();
   }
 
   private void decorateConf() {
@@ -203,40 +208,60 @@ public class HBaseInterClusterReplicationEndpoint extends 
HBaseReplicationEndpoi
 return sleepMultiplier < maxRetriesMultiplier;
   }
 
-  private List createBatches(final List entries) {
+  private int getEstimatedEntrySize(Entry e) {
+long size = e.getKey().estimatedSerializedSizeOf() + 
e.getEdit().estimatedSerializedSizeOf();
+return (int) size;
+  }
+
+  private List createParallelBatches(final List entries) {
 int numSinks = 

[16/29] hbase git commit: HBASE-20370 Also remove the wal file in remote cluster when we finish replicating a file

2018-05-04 Thread zhangduo
HBASE-20370 Also remove the wal file in remote cluster when we finish 
replicating a file


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d6fbe7e7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d6fbe7e7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d6fbe7e7

Branch: refs/heads/HBASE-19064
Commit: d6fbe7e742142da2fb11f9a6288d642f0a9d2e7e
Parents: f16878e
Author: zhangduo 
Authored: Tue Apr 17 09:04:56 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 17:46:18 2018 +0800

--
 .../hbase/replication/ReplicationUtils.java |  36 ++-
 .../regionserver/ReplicationSource.java |  38 +++
 .../ReplicationSourceInterface.java |  21 +++-
 .../regionserver/ReplicationSourceManager.java  | 108 ++-
 .../regionserver/ReplicationSourceShipper.java  |  27 ++---
 .../hbase/wal/SyncReplicationWALProvider.java   |  11 +-
 .../replication/ReplicationSourceDummy.java |  20 ++--
 .../TestReplicationSourceManager.java   | 101 -
 8 files changed, 246 insertions(+), 116 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d6fbe7e7/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
--
diff --git 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
index cb22f57..66e9b01 100644
--- 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
+++ 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
@@ -22,14 +22,17 @@ import java.util.Collection;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.CompoundConfiguration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Helper class for replication.
@@ -37,6 +40,8 @@ import org.apache.yetus.audience.InterfaceAudience;
 @InterfaceAudience.Private
 public final class ReplicationUtils {
 
+  private static final Logger LOG = 
LoggerFactory.getLogger(ReplicationUtils.class);
+
   public static final String REPLICATION_ATTR_NAME = "__rep__";
 
   public static final String REMOTE_WAL_DIR_NAME = "remoteWALs";
@@ -176,4 +181,33 @@ public final class ReplicationUtils {
   return tableCFs != null && tableCFs.containsKey(tableName);
 }
   }
+
+  public static FileSystem getRemoteWALFileSystem(Configuration conf, String 
remoteWALDir)
+  throws IOException {
+return new Path(remoteWALDir).getFileSystem(conf);
+  }
+
+  public static Path getRemoteWALDirForPeer(String remoteWALDir, String 
peerId) {
+return new Path(remoteWALDir, peerId);
+  }
+
+  /**
+   * Do the sleeping logic
+   * @param msg Why we sleep
+   * @param sleepForRetries the base sleep time.
+   * @param sleepMultiplier by how many times the default sleeping time is 
augmented
+   * @param maxRetriesMultiplier the max retry multiplier
+   * @return True if sleepMultiplier is  
maxRetriesMultiplier
+   */
+  public static boolean sleepForRetries(String msg, long sleepForRetries, int 
sleepMultiplier,
+  int maxRetriesMultiplier) {
+try {
+  LOG.trace("{}, sleeping {} times {}", msg, sleepForRetries, 
sleepMultiplier);
+  Thread.sleep(sleepForRetries * sleepMultiplier);
+} catch (InterruptedException e) {
+  LOG.debug("Interrupted while sleeping between retries");
+  Thread.currentThread().interrupt();
+}
+return sleepMultiplier < maxRetriesMultiplier;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/d6fbe7e7/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
index b05a673..01ccb11 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
@@ -89,8 +89,6 @@ public class ReplicationSource implements 

[24/29] hbase git commit: HBASE-19957 General framework to transit sync replication state

2018-05-04 Thread zhangduo
HBASE-19957 General framework to transit sync replication state


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0b2ece74
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0b2ece74
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0b2ece74

Branch: refs/heads/HBASE-19064
Commit: 0b2ece74799e5257144d2abb6543dafd4c46b481
Parents: a857a6b
Author: zhangduo 
Authored: Fri Feb 9 18:33:28 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 17:46:18 2018 +0800

--
 .../replication/ReplicationPeerConfig.java  |   2 -
 .../replication/ReplicationPeerDescription.java |   5 +-
 .../hbase/replication/SyncReplicationState.java |  19 +-
 .../org/apache/hadoop/hbase/HConstants.java |   3 +
 .../src/main/protobuf/MasterProcedure.proto |  20 +-
 .../hbase/replication/ReplicationPeerImpl.java  |  45 -
 .../replication/ReplicationPeerStorage.java |  25 ++-
 .../hbase/replication/ReplicationPeers.java |  27 ++-
 .../replication/ZKReplicationPeerStorage.java   |  63 +--
 .../hbase/coprocessor/MasterObserver.java   |   7 +-
 .../org/apache/hadoop/hbase/master/HMaster.java |   4 +-
 .../hbase/master/MasterCoprocessorHost.java |  12 +-
 .../replication/AbstractPeerProcedure.java  |  14 +-
 .../master/replication/ModifyPeerProcedure.java |  11 --
 .../replication/RefreshPeerProcedure.java   |  18 +-
 .../replication/ReplicationPeerManager.java |  89 +
 ...ransitPeerSyncReplicationStateProcedure.java | 181 ---
 .../hbase/regionserver/HRegionServer.java   |  35 ++--
 .../regionserver/ReplicationSourceService.java  |  11 +-
 .../regionserver/PeerActionListener.java|   4 +-
 .../regionserver/PeerProcedureHandler.java  |  16 +-
 .../regionserver/PeerProcedureHandlerImpl.java  |  52 +-
 .../regionserver/RefreshPeerCallable.java   |   7 +
 .../replication/regionserver/Replication.java   |  22 ++-
 .../regionserver/ReplicationSourceManager.java  |  41 +++--
 .../SyncReplicationPeerInfoProvider.java|  43 +
 .../SyncReplicationPeerInfoProviderImpl.java|  71 
 .../SyncReplicationPeerMappingManager.java  |  48 +
 .../SyncReplicationPeerProvider.java|  35 
 .../hbase/wal/SyncReplicationWALProvider.java   |  35 ++--
 .../org/apache/hadoop/hbase/wal/WALFactory.java |  47 ++---
 .../replication/TestReplicationAdmin.java   |   3 +-
 .../TestReplicationSourceManager.java   |   5 +-
 .../wal/TestSyncReplicationWALProvider.java |  36 ++--
 34 files changed, 743 insertions(+), 313 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0b2ece74/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
index 997a155..cc7b4bc 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
@@ -15,7 +15,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.replication;
 
 import java.util.Collection;
@@ -25,7 +24,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.TreeMap;
-
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.util.Bytes;

http://git-wip-us.apache.org/repos/asf/hbase/blob/0b2ece74/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerDescription.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerDescription.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerDescription.java
index 2d077c5..b0c27bb 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerDescription.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerDescription.java
@@ -20,7 +20,10 @@ package org.apache.hadoop.hbase.replication;
 import org.apache.yetus.audience.InterfaceAudience;
 
 /**
- * The POJO equivalent of ReplicationProtos.ReplicationPeerDescription
+ * The POJO equivalent of ReplicationProtos.ReplicationPeerDescription.
+ * 
+ * To developer, here we do not store the new sync replication state since it 
is just an
+ * intermediate state and this class is 

[19/29] hbase git commit: HBASE-19781 Add a new cluster state flag for synchronous replication

2018-05-04 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/502a9543/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckReplication.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckReplication.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckReplication.java
index 8911982..f5eca39 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckReplication.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckReplication.java
@@ -28,6 +28,7 @@ import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
 import org.apache.hadoop.hbase.replication.ReplicationPeerStorage;
 import org.apache.hadoop.hbase.replication.ReplicationQueueStorage;
 import org.apache.hadoop.hbase.replication.ReplicationStorageFactory;
+import org.apache.hadoop.hbase.replication.SyncReplicationState;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.ReplicationTests;
 import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
@@ -67,9 +68,9 @@ public class TestHBaseFsckReplication {
 String peerId1 = "1";
 String peerId2 = "2";
 peerStorage.addPeer(peerId1, 
ReplicationPeerConfig.newBuilder().setClusterKey("key").build(),
-  true);
+  true, SyncReplicationState.NONE);
 peerStorage.addPeer(peerId2, 
ReplicationPeerConfig.newBuilder().setClusterKey("key").build(),
-  true);
+  true, SyncReplicationState.NONE);
 for (int i = 0; i < 10; i++) {
   queueStorage.addWAL(ServerName.valueOf("localhost", 1 + i, 10 + 
i), peerId1,
 "file-" + i);

http://git-wip-us.apache.org/repos/asf/hbase/blob/502a9543/hbase-shell/src/main/ruby/hbase/replication_admin.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/replication_admin.rb 
b/hbase-shell/src/main/ruby/hbase/replication_admin.rb
index d1f1344..5f86365 100644
--- a/hbase-shell/src/main/ruby/hbase/replication_admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/replication_admin.rb
@@ -20,6 +20,7 @@
 include Java
 
 java_import 
org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil
+java_import org.apache.hadoop.hbase.replication.SyncReplicationState
 java_import org.apache.hadoop.hbase.replication.ReplicationPeerConfig
 java_import org.apache.hadoop.hbase.util.Bytes
 java_import org.apache.hadoop.hbase.zookeeper.ZKConfig
@@ -338,6 +339,20 @@ module Hbase
   '!' + ReplicationPeerConfigUtil.convertToString(tableCFs)
 end
 
+# Transit current cluster to a new state in the specified synchronous
+# replication peer
+def transit_peer_sync_replication_state(id, state)
+  if 'ACTIVE'.eql?(state)
+@admin.transitReplicationPeerSyncReplicationState(id, 
SyncReplicationState::ACTIVE)
+  elsif 'DOWNGRADE_ACTIVE'.eql?(state)
+@admin.transitReplicationPeerSyncReplicationState(id, 
SyncReplicationState::DOWNGRADE_ACTIVE)
+  elsif 'STANDBY'.eql?(state)
+@admin.transitReplicationPeerSyncReplicationState(id, 
SyncReplicationState::STANDBY)
+  else
+raise(ArgumentError, 'synchronous replication state must be ACTIVE, 
DOWNGRADE_ACTIVE or STANDBY')
+  end
+end
+
 
#--
 # Enables a table's replication switch
 def enable_tablerep(table_name)

http://git-wip-us.apache.org/repos/asf/hbase/blob/502a9543/hbase-shell/src/main/ruby/shell.rb
--
diff --git a/hbase-shell/src/main/ruby/shell.rb 
b/hbase-shell/src/main/ruby/shell.rb
index ab07a79..ba4d154 100644
--- a/hbase-shell/src/main/ruby/shell.rb
+++ b/hbase-shell/src/main/ruby/shell.rb
@@ -391,6 +391,7 @@ Shell.load_command_group(
 get_peer_config
 list_peer_configs
 update_peer_config
+transit_peer_sync_replication_state
   ]
 )
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/502a9543/hbase-shell/src/main/ruby/shell/commands/list_peers.rb
--
diff --git a/hbase-shell/src/main/ruby/shell/commands/list_peers.rb 
b/hbase-shell/src/main/ruby/shell/commands/list_peers.rb
index f3ab749..f2ec014 100644
--- a/hbase-shell/src/main/ruby/shell/commands/list_peers.rb
+++ b/hbase-shell/src/main/ruby/shell/commands/list_peers.rb
@@ -39,8 +39,8 @@ EOF
 peers = replication_admin.list_peers
 
 formatter.header(%w[PEER_ID CLUSTER_KEY ENDPOINT_CLASSNAME
-REMOTE_ROOT_DIR STATE REPLICATE_ALL 
-NAMESPACES TABLE_CFS BANDWIDTH
+REMOTE_ROOT_DIR SYNC_REPLICATION_STATE STATE
+REPLICATE_ALL NAMESPACES TABLE_CFS BANDWIDTH

[07/29] hbase git commit: HBASE-19973 Implement a procedure to replay sync replication wal for standby cluster

2018-05-04 Thread zhangduo
HBASE-19973 Implement a procedure to replay sync replication wal for standby 
cluster


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/91d4eff4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/91d4eff4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/91d4eff4

Branch: refs/heads/HBASE-19064
Commit: 91d4eff40ded296a7e86c523f28f9bf3eac115ad
Parents: b5568d3
Author: Guanghao Zhang 
Authored: Fri Mar 2 18:43:25 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 17:46:18 2018 +0800

--
 .../src/main/protobuf/MasterProcedure.proto |  22 +++
 .../apache/hadoop/hbase/executor/EventType.java |   9 +-
 .../hadoop/hbase/executor/ExecutorType.java |   3 +-
 .../org/apache/hadoop/hbase/master/HMaster.java |   9 +
 .../hadoop/hbase/master/MasterServices.java |   6 +
 .../procedure/PeerProcedureInterface.java   |   3 +-
 .../hbase/master/procedure/PeerQueue.java   |   3 +-
 .../replication/RecoverStandbyProcedure.java| 114 +++
 .../ReplaySyncReplicationWALManager.java| 139 +
 .../ReplaySyncReplicationWALProcedure.java  | 193 +++
 .../hbase/regionserver/HRegionServer.java   |   9 +-
 .../ReplaySyncReplicationWALCallable.java   | 149 ++
 .../SyncReplicationPeerInfoProviderImpl.java|   3 +
 .../org/apache/hadoop/hbase/util/FSUtils.java   |   5 +
 .../hbase/master/MockNoopMasterServices.java|   8 +-
 .../master/TestRecoverStandbyProcedure.java | 186 ++
 16 files changed, 854 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/91d4eff4/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
--
diff --git a/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto 
b/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
index e8b940e..01e4dae 100644
--- a/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
@@ -459,3 +459,25 @@ message TransitPeerSyncReplicationStateStateData {
   optional SyncReplicationState fromState = 1;
   required SyncReplicationState toState = 2;
 }
+
+enum RecoverStandbyState {
+  RENAME_SYNC_REPLICATION_WALS_DIR = 1;
+  INIT_WORKERS = 2;
+  DISPATCH_TASKS = 3;
+  REMOVE_SYNC_REPLICATION_WALS_DIR = 4;
+}
+
+message RecoverStandbyStateData {
+  required string peer_id = 1;
+}
+
+message ReplaySyncReplicationWALStateData {
+  required string peer_id = 1;
+  required string wal = 2;
+  optional ServerName target_server = 3;
+}
+
+message ReplaySyncReplicationWALParameter {
+  required string peer_id = 1;
+  required string wal = 2;
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/91d4eff4/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventType.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventType.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventType.java
index 922deb8..ad38d1c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventType.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventType.java
@@ -281,7 +281,14 @@ public enum EventType {
*
* RS_REFRESH_PEER
*/
-  RS_REFRESH_PEER (84, ExecutorType.RS_REFRESH_PEER);
+  RS_REFRESH_PEER(84, ExecutorType.RS_REFRESH_PEER),
+
+  /**
+   * RS replay sync replication wal.
+   *
+   * RS_REPLAY_SYNC_REPLICATION_WAL
+   */
+  RS_REPLAY_SYNC_REPLICATION_WAL(85, 
ExecutorType.RS_REPLAY_SYNC_REPLICATION_WAL);
 
   private final int code;
   private final ExecutorType executor;

http://git-wip-us.apache.org/repos/asf/hbase/blob/91d4eff4/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorType.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorType.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorType.java
index 7f130d1..ea97354 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorType.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorType.java
@@ -47,7 +47,8 @@ public enum ExecutorType {
   RS_REGION_REPLICA_FLUSH_OPS  (28),
   RS_COMPACTED_FILES_DISCHARGER (29),
   RS_OPEN_PRIORITY_REGION(30),
-  RS_REFRESH_PEER   (31);
+  RS_REFRESH_PEER(31),
+  RS_REPLAY_SYNC_REPLICATION_WAL(32);
 
   ExecutorType(int value) {
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/91d4eff4/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java

[05/29] hbase git commit: HBASE-19990 Create remote wal directory when transitting to state S

2018-05-04 Thread zhangduo
HBASE-19990 Create remote wal directory when transitting to state S


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ff1e79a1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ff1e79a1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ff1e79a1

Branch: refs/heads/HBASE-19064
Commit: ff1e79a19a3e9ad80ba857028dff716a36f32dae
Parents: 22e1b9f
Author: zhangduo 
Authored: Wed Feb 14 16:01:16 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 17:46:18 2018 +0800

--
 .../procedure2/ProcedureYieldException.java |  9 --
 .../hbase/replication/ReplicationUtils.java |  2 ++
 .../hadoop/hbase/master/MasterFileSystem.java   | 19 ++---
 .../master/procedure/MasterProcedureEnv.java|  5 
 ...ransitPeerSyncReplicationStateProcedure.java | 29 
 .../hbase/replication/TestSyncReplication.java  |  8 ++
 6 files changed, 55 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ff1e79a1/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureYieldException.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureYieldException.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureYieldException.java
index 0487ac5b..dbb9981 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureYieldException.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureYieldException.java
@@ -15,16 +15,21 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.procedure2;
 
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.yetus.audience.InterfaceStability;
 
-// TODO: Not used yet
+/**
+ * Indicate that a procedure wants to be rescheduled. Usually because there 
are something wrong but
+ * we do not want to fail the procedure.
+ * 
+ * TODO: need to support scheduling after a delay.
+ */
 @InterfaceAudience.Private
 @InterfaceStability.Stable
 public class ProcedureYieldException extends ProcedureException {
+
   /** default constructor */
   public ProcedureYieldException() {
 super();

http://git-wip-us.apache.org/repos/asf/hbase/blob/ff1e79a1/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
--
diff --git 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
index d94cb00..e402d0f 100644
--- 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
+++ 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
@@ -41,6 +41,8 @@ public final class ReplicationUtils {
 
   public static final String REPLICATION_ATTR_NAME = "__rep__";
 
+  public static final String REMOTE_WAL_DIR_NAME = "remoteWALs";
+
   private ReplicationUtils() {
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/ff1e79a1/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
index 864be02..7ccbd71 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
@@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.log.HBaseMarkers;
 import org.apache.hadoop.hbase.mob.MobConstants;
 import org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore;
 import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.replication.ReplicationUtils;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSTableDescriptors;
 import org.apache.hadoop.hbase.util.FSUtils;
@@ -133,7 +134,6 @@ public class MasterFileSystem {
* Idempotent.
*/
   private void createInitialFileSystemLayout() throws IOException {
-
 final String[] protectedSubDirs = new String[] {
 HConstants.BASE_NAMESPACE_DIR,
 HConstants.HFILE_ARCHIVE_DIRECTORY,
@@ -145,7 +145,8 @@ public class MasterFileSystem {
   HConstants.HREGION_LOGDIR_NAME,
   HConstants.HREGION_OLDLOGDIR_NAME,
   HConstants.CORRUPT_DIR_NAME,
-  WALProcedureStore.MASTER_PROCEDURE_LOGDIR
+  

[14/29] hbase git commit: HBASE-19782 Reject the replication request when peer is DA or A state

2018-05-04 Thread zhangduo
HBASE-19782 Reject the replication request when peer is DA or A state


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/315e4873
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/315e4873
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/315e4873

Branch: refs/heads/HBASE-19064
Commit: 315e4873e64bda243ca06edd0e4962a98793d892
Parents: d6fbe7e
Author: huzheng 
Authored: Fri Mar 2 18:05:29 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 17:46:18 2018 +0800

--
 .../hbase/protobuf/ReplicationProtbufUtil.java  |  2 +-
 .../hadoop/hbase/regionserver/HRegion.java  |  2 +-
 .../hbase/regionserver/HRegionServer.java   |  5 +--
 .../hbase/regionserver/RSRpcServices.java   | 25 +--
 .../RejectReplicationRequestStateChecker.java   | 45 
 .../ReplaySyncReplicationWALCallable.java   | 24 ++-
 .../replication/regionserver/Replication.java   |  2 +-
 .../regionserver/ReplicationSink.java   | 16 +++
 .../SyncReplicationPeerInfoProvider.java| 11 ++---
 .../SyncReplicationPeerInfoProviderImpl.java| 13 +++---
 .../SyncReplicationPeerMappingManager.java  |  5 +--
 .../hbase/wal/SyncReplicationWALProvider.java   |  7 +--
 .../replication/SyncReplicationTestBase.java| 32 ++
 .../replication/TestSyncReplicationActive.java  | 13 +-
 .../regionserver/TestReplicationSink.java   |  5 +--
 .../regionserver/TestWALEntrySinkFilter.java|  3 +-
 .../wal/TestSyncReplicationWALProvider.java |  6 +--
 17 files changed, 163 insertions(+), 53 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/315e4873/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
index 81dd59e..e01f881 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellScanner;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.PrivateCellUtil;
+import org.apache.hadoop.hbase.wal.WAL.Entry;
 import org.apache.hadoop.hbase.wal.WALKeyImpl;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.io.SizedCellScanner;
@@ -45,7 +46,6 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminServic
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
 import org.apache.hadoop.hbase.util.Pair;
-import org.apache.hadoop.hbase.wal.WAL.Entry;
 
 @InterfaceAudience.Private
 public class ReplicationProtbufUtil {

http://git-wip-us.apache.org/repos/asf/hbase/blob/315e4873/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 1865144..cb7ba6e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -1981,7 +1981,7 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   private boolean shouldForbidMajorCompaction() {
 if (rsServices != null && rsServices.getReplicationSourceService() != 
null) {
   return 
rsServices.getReplicationSourceService().getSyncReplicationPeerInfoProvider()
-  .checkState(getRegionInfo(), ForbidMajorCompactionChecker.get());
+  .checkState(getRegionInfo().getTable(), 
ForbidMajorCompactionChecker.get());
 }
 return false;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/315e4873/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index 4dd8f09..5963cd9 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -2478,10 +2478,9 @@ public class 

[10/29] hbase git commit: HBASE-19747 Introduce a special WALProvider for synchronous replication

2018-05-04 Thread zhangduo
HBASE-19747 Introduce a special WALProvider for synchronous replication


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/70212cec
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/70212cec
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/70212cec

Branch: refs/heads/HBASE-19064
Commit: 70212cec1a60597e824fd7d062ebc2f684ee449b
Parents: 47694cf
Author: zhangduo 
Authored: Fri Jan 19 18:38:39 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 17:46:18 2018 +0800

--
 .../hbase/regionserver/wal/AbstractFSWAL.java   |   7 +
 .../hbase/regionserver/wal/AsyncFSWAL.java  |   1 -
 .../hbase/regionserver/wal/DualAsyncFSWAL.java  |   4 +-
 .../hadoop/hbase/regionserver/wal/FSHLog.java   |   4 -
 .../regionserver/PeerActionListener.java|  33 +++
 .../SynchronousReplicationPeerProvider.java |  35 +++
 .../hadoop/hbase/wal/AbstractFSWALProvider.java |   1 +
 .../hadoop/hbase/wal/AsyncFSWALProvider.java|  18 +-
 .../hbase/wal/NettyAsyncFSWALConfigHelper.java  |   8 +-
 .../hbase/wal/RegionGroupingProvider.java   |  13 +-
 .../wal/SynchronousReplicationWALProvider.java  | 225 +++
 .../org/apache/hadoop/hbase/wal/WALFactory.java |  37 ++-
 .../org/apache/hadoop/hbase/wal/WALKeyImpl.java |  16 +-
 .../regionserver/TestCompactionPolicy.java  |   1 +
 .../regionserver/TestFailedAppendAndSync.java   | 122 +-
 .../hadoop/hbase/regionserver/TestHRegion.java  |  24 +-
 .../TestHRegionWithInMemoryFlush.java   |   7 -
 .../hbase/regionserver/TestRegionIncrement.java |  20 +-
 .../hbase/regionserver/TestWALLockup.java   |   1 +
 .../regionserver/wal/AbstractTestWALReplay.java |   1 +
 .../regionserver/wal/ProtobufLogTestHelper.java |  44 +++-
 .../hbase/regionserver/wal/TestAsyncFSWAL.java  |  13 +-
 .../regionserver/wal/TestAsyncWALReplay.java|   4 +-
 .../wal/TestCombinedAsyncWriter.java|   3 +-
 .../hbase/regionserver/wal/TestFSHLog.java  |  15 +-
 .../hbase/regionserver/wal/TestWALReplay.java   |   1 +
 .../apache/hadoop/hbase/wal/IOTestProvider.java |   2 -
 .../TestSynchronousReplicationWALProvider.java  | 153 +
 28 files changed, 659 insertions(+), 154 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/70212cec/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
index ce8dafa..4816d77 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
@@ -430,6 +430,13 @@ public abstract class AbstractFSWAL 
implements WAL {
 this.implClassName = getClass().getSimpleName();
   }
 
+  /**
+   * Used to initialize the WAL. Usually just call rollWriter to create the 
first log writer.
+   */
+  public void init() throws IOException {
+rollWriter();
+  }
+
   @Override
   public void registerWALActionsListener(WALActionsListener listener) {
 this.listeners.add(listener);

http://git-wip-us.apache.org/repos/asf/hbase/blob/70212cec/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
index 0bee9d6..17133ed 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
@@ -248,7 +248,6 @@ public class AsyncFSWAL extends AbstractFSWAL {
 batchSize = conf.getLong(WAL_BATCH_SIZE, DEFAULT_WAL_BATCH_SIZE);
 waitOnShutdownInSeconds = 
conf.getInt(ASYNC_WAL_WAIT_ON_SHUTDOWN_IN_SECONDS,
   DEFAULT_ASYNC_WAL_WAIT_ON_SHUTDOWN_IN_SECONDS);
-rollWriter();
   }
 
   private static boolean waitingRoll(int epochAndState) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/70212cec/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/DualAsyncFSWAL.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/DualAsyncFSWAL.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/DualAsyncFSWAL.java
index 42b0dae..0495337 100644
--- 

[29/29] hbase git commit: HBASE-20456 Support removing a ReplicationSourceShipper for a special wal group

2018-05-04 Thread zhangduo
HBASE-20456 Support removing a ReplicationSourceShipper for a special wal group


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/75046eeb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/75046eeb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/75046eeb

Branch: refs/heads/HBASE-19064
Commit: 75046eebf6d8bfaabdbdb8c2f16f8930ce892441
Parents: b564486
Author: zhangduo 
Authored: Tue Apr 24 22:01:21 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 17:54:52 2018 +0800

--
 .../hbase/regionserver/wal/AsyncFSWAL.java  |  1 +
 .../RecoveredReplicationSource.java | 13 +---
 .../RecoveredReplicationSourceShipper.java  |  7 --
 .../regionserver/ReplicationSource.java | 13 +++-
 .../regionserver/ReplicationSourceManager.java  | 19 -
 .../regionserver/ReplicationSourceShipper.java  | 20 +++--
 .../ReplicationSourceWALReader.java |  9 ++-
 .../regionserver/WALEntryStream.java|  3 +-
 .../hadoop/hbase/wal/AbstractFSWALProvider.java | 28 ---
 .../hbase/wal/SyncReplicationWALProvider.java   | 10 ++-
 .../TestReplicationSourceManager.java   |  5 +-
 .../TestSyncReplicationShipperQuit.java | 81 
 .../regionserver/TestWALEntryStream.java|  4 +-
 13 files changed, 163 insertions(+), 50 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/75046eeb/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
index 17133ed..f630e63 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
@@ -682,6 +682,7 @@ public class AsyncFSWAL extends AbstractFSWAL {
   protected void doShutdown() throws IOException {
 waitForSafePoint();
 closeWriter(this.writer);
+this.writer = null;
 closeExecutor.shutdown();
 try {
   if (!closeExecutor.awaitTermination(waitOnShutdownInSeconds, 
TimeUnit.SECONDS)) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/75046eeb/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
index a21ca44..f1bb538 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
@@ -30,7 +30,6 @@ import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.replication.ReplicationPeer;
 import org.apache.hadoop.hbase.replication.ReplicationQueueStorage;
 import org.apache.hadoop.hbase.util.FSUtils;
-import org.apache.hadoop.hbase.util.Threads;
 import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
@@ -144,15 +143,9 @@ public class RecoveredReplicationSource extends 
ReplicationSource {
   }
 
   void tryFinish() {
-// use synchronize to make sure one last thread will clean the queue
-synchronized (workerThreads) {
-  Threads.sleep(100);// wait a short while for other worker thread to 
fully exit
-  boolean allTasksDone = workerThreads.values().stream().allMatch(w -> 
w.isFinished());
-  if (allTasksDone) {
-this.getSourceMetrics().clear();
-manager.removeRecoveredSource(this);
-LOG.info("Finished recovering queue {} with the following stats: {}", 
queueId, getStats());
-  }
+if (workerThreads.isEmpty()) {
+  this.getSourceMetrics().clear();
+  manager.finishRecoveredSource(this);
 }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/75046eeb/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceShipper.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceShipper.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceShipper.java
index 91109cf..b0d4db0 100644
--- 

[23/29] hbase git commit: HBASE-19957 General framework to transit sync replication state

2018-05-04 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/0b2ece74/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/SyncReplicationPeerInfoProvider.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/SyncReplicationPeerInfoProvider.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/SyncReplicationPeerInfoProvider.java
new file mode 100644
index 000..92f2c52
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/SyncReplicationPeerInfoProvider.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.replication.regionserver;
+
+import java.util.Optional;
+import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.replication.SyncReplicationState;
+import org.apache.hadoop.hbase.util.Pair;
+import org.apache.yetus.audience.InterfaceAudience;
+
+/**
+ * Get the information for a sync replication peer.
+ */
+@InterfaceAudience.Private
+public interface SyncReplicationPeerInfoProvider {
+
+  /**
+   * Return the peer id and remote WAL directory if the region is 
synchronously replicated and the
+   * state is {@link SyncReplicationState#ACTIVE}.
+   */
+  Optional> getPeerIdAndRemoteWALDir(RegionInfo info);
+
+  /**
+   * Check whether the give region is contained in a sync replication peer 
which is in the given
+   * state.
+   */
+  boolean isInState(RegionInfo info, SyncReplicationState state);
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/0b2ece74/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/SyncReplicationPeerInfoProviderImpl.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/SyncReplicationPeerInfoProviderImpl.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/SyncReplicationPeerInfoProviderImpl.java
new file mode 100644
index 000..32159e6
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/SyncReplicationPeerInfoProviderImpl.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.replication.regionserver;
+
+import java.util.Optional;
+import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.replication.ReplicationPeer;
+import org.apache.hadoop.hbase.replication.ReplicationPeers;
+import org.apache.hadoop.hbase.replication.SyncReplicationState;
+import org.apache.hadoop.hbase.util.Pair;
+import org.apache.yetus.audience.InterfaceAudience;
+
+@InterfaceAudience.Private
+class SyncReplicationPeerInfoProviderImpl implements 
SyncReplicationPeerInfoProvider {
+
+  private final ReplicationPeers replicationPeers;
+
+  private final SyncReplicationPeerMappingManager mapping;
+
+  SyncReplicationPeerInfoProviderImpl(ReplicationPeers replicationPeers,
+  SyncReplicationPeerMappingManager mapping) {
+this.replicationPeers = replicationPeers;
+this.mapping = mapping;
+  }
+
+  @Override
+  public Optional> getPeerIdAndRemoteWALDir(RegionInfo 
info) {
+String peerId = mapping.getPeerId(info);
+if (peerId == null) {
+  return Optional.empty();
+   

[17/29] hbase git commit: HBASE-19079 Support setting up two clusters with A and S stat

2018-05-04 Thread zhangduo
HBASE-19079 Support setting up two clusters with A and S stat


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/982e5566
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/982e5566
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/982e5566

Branch: refs/heads/HBASE-19064
Commit: 982e5566dbefb08e49eb7f1b82e312ec247deffd
Parents: 9f3b31b
Author: zhangduo 
Authored: Tue Apr 10 22:35:19 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 17:46:18 2018 +0800

--
 .../replication/ReplicationPeerManager.java |   5 +-
 ...ransitPeerSyncReplicationStateProcedure.java |   2 +-
 .../hbase/regionserver/wal/DualAsyncFSWAL.java  |  14 ++
 .../hadoop/hbase/regionserver/wal/WALUtil.java  |  25 ++-
 .../hbase/replication/ChainWALEntryFilter.java  |  28 +--
 .../ReplaySyncReplicationWALCallable.java   |  27 ++-
 .../SyncReplicationPeerInfoProviderImpl.java|   6 +-
 .../hadoop/hbase/wal/AbstractFSWALProvider.java |  10 +-
 .../hbase/wal/SyncReplicationWALProvider.java   |  94 ++---
 .../org/apache/hadoop/hbase/wal/WALEdit.java|   8 +-
 .../org/apache/hadoop/hbase/wal/WALFactory.java |   2 +-
 .../replication/TestReplicationAdmin.java   |  33 +--
 .../regionserver/wal/TestWALDurability.java |   2 +
 .../replication/SyncReplicationTestBase.java| 185 +
 .../hbase/replication/TestSyncReplication.java  | 207 ---
 .../replication/TestSyncReplicationActive.java  |  64 ++
 .../replication/TestSyncReplicationStandBy.java |  96 +
 17 files changed, 521 insertions(+), 287 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/982e5566/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
index 41dd6e3..229549e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
@@ -68,8 +68,9 @@ public class ReplicationPeerManager {
 
   private final ImmutableMap
 allowedTransition = 
Maps.immutableEnumMap(ImmutableMap.of(SyncReplicationState.ACTIVE,
-  EnumSet.of(SyncReplicationState.DOWNGRADE_ACTIVE), 
SyncReplicationState.STANDBY,
-  EnumSet.of(SyncReplicationState.DOWNGRADE_ACTIVE), 
SyncReplicationState.DOWNGRADE_ACTIVE,
+  EnumSet.of(SyncReplicationState.DOWNGRADE_ACTIVE, 
SyncReplicationState.STANDBY),
+  SyncReplicationState.STANDBY, 
EnumSet.of(SyncReplicationState.DOWNGRADE_ACTIVE),
+  SyncReplicationState.DOWNGRADE_ACTIVE,
   EnumSet.of(SyncReplicationState.STANDBY, SyncReplicationState.ACTIVE)));
 
   ReplicationPeerManager(ReplicationPeerStorage peerStorage, 
ReplicationQueueStorage queueStorage,

http://git-wip-us.apache.org/repos/asf/hbase/blob/982e5566/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/TransitPeerSyncReplicationStateProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/TransitPeerSyncReplicationStateProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/TransitPeerSyncReplicationStateProcedure.java
index cc51890..5da2b0c 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/TransitPeerSyncReplicationStateProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/TransitPeerSyncReplicationStateProcedure.java
@@ -171,7 +171,7 @@ public class TransitPeerSyncReplicationStateProcedure
 }
 return Flow.HAS_MORE_STATE;
   case REPLAY_REMOTE_WAL_IN_PEER:
-// TODO: replay remote wal when transiting from S to DA.
+addChildProcedure(new RecoverStandbyProcedure(peerId));
 
setNextState(PeerSyncReplicationStateTransitionState.REOPEN_ALL_REGIONS_IN_PEER);
 return Flow.HAS_MORE_STATE;
   case REOPEN_ALL_REGIONS_IN_PEER:

http://git-wip-us.apache.org/repos/asf/hbase/blob/982e5566/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/DualAsyncFSWAL.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/DualAsyncFSWAL.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/DualAsyncFSWAL.java
index 0495337..a98567a 100644
--- 

[01/29] hbase git commit: HBASE-20507 Do not need to call recoverLease on the broken file when we fail to create a wal writer [Forced Update!]

2018-05-04 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/HBASE-19064 ab8380eaf -> 6b0c20ffd (forced update)


HBASE-20507 Do not need to call recoverLease on the broken file when we fail to 
create a wal writer


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/09ca6193
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/09ca6193
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/09ca6193

Branch: refs/heads/HBASE-19064
Commit: 09ca6193890e8caffeb2951340b6a6fc10b5cfa0
Parents: 4cb444e
Author: Michael Stack 
Authored: Thu May 3 12:26:12 2018 -0700
Committer: Michael Stack 
Committed: Thu May 3 12:30:13 2018 -0700

--
 .../hbase/io/asyncfs/TestOverwriteFileUnderConstruction.java  | 3 +++
 1 file changed, 3 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/09ca6193/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestOverwriteFileUnderConstruction.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestOverwriteFileUnderConstruction.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestOverwriteFileUnderConstruction.java
index 7f75fdb..aaeed15 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestOverwriteFileUnderConstruction.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestOverwriteFileUnderConstruction.java
@@ -22,6 +22,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertThat;
 import static org.junit.Assert.fail;
 
+import java.io.FileNotFoundException;
 import java.io.IOException;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -95,6 +96,8 @@ public class TestOverwriteFileUnderConstruction {
   out1.close();
   // a successful close is also OK for us so no assertion here, we just 
need to confirm that the
   // data in the file are correct.
+} catch (FileNotFoundException fnfe) {
+  // hadoop3 throws one of these.
 } catch (RemoteException e) {
   // expected
   assertThat(e.unwrapRemoteException(), 
instanceOf(LeaseExpiredException.class));



[21/29] hbase git commit: HBASE-20163 Forbid major compaction when standby cluster replay the remote wals

2018-05-04 Thread zhangduo
HBASE-20163 Forbid major compaction when standby cluster replay the remote wals


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f16878e0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f16878e0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f16878e0

Branch: refs/heads/HBASE-19064
Commit: f16878e03f8075a41645cba75d1ba2ee010c0297
Parents: 982e556
Author: Guanghao Zhang 
Authored: Thu Apr 12 14:44:25 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 17:46:18 2018 +0800

--
 .../hadoop/hbase/regionserver/HRegion.java  | 18 
 .../hbase/regionserver/HRegionServer.java   |  2 +-
 .../regionserver/RegionServerServices.java  |  5 +++
 .../ForbidMajorCompactionChecker.java   | 44 
 .../hadoop/hbase/MockRegionServerServices.java  |  6 +++
 .../hadoop/hbase/master/MockRegionServer.java   |  6 +++
 6 files changed, 80 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f16878e0/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index ea72cfe..1865144 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -144,6 +144,7 @@ import 
org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;
 import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;
 import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
 import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
+import 
org.apache.hadoop.hbase.regionserver.compactions.ForbidMajorCompactionChecker;
 import 
org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;
 import 
org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;
 import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;
@@ -1977,6 +1978,14 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 return compact(compaction, store, throughputController, null);
   }
 
+  private boolean shouldForbidMajorCompaction() {
+if (rsServices != null && rsServices.getReplicationSourceService() != 
null) {
+  return 
rsServices.getReplicationSourceService().getSyncReplicationPeerInfoProvider()
+  .checkState(getRegionInfo(), ForbidMajorCompactionChecker.get());
+}
+return false;
+  }
+
   public boolean compact(CompactionContext compaction, HStore store,
   ThroughputController throughputController, User user) throws IOException 
{
 assert compaction != null && compaction.hasSelection();
@@ -1986,6 +1995,15 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   store.cancelRequestedCompaction(compaction);
   return false;
 }
+
+if (compaction.getRequest().isAllFiles() && shouldForbidMajorCompaction()) 
{
+  LOG.warn("Skipping major compaction on " + this
+  + " because this cluster is transiting sync replication state"
+  + " from STANDBY to DOWNGRADE_ACTIVE");
+  store.cancelRequestedCompaction(compaction);
+  return false;
+}
+
 MonitoredTask status = null;
 boolean requestNeedsCancellation = true;
 /*

http://git-wip-us.apache.org/repos/asf/hbase/blob/f16878e0/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index 25bc6be..4dd8f09 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -2472,7 +2472,7 @@ public class HRegionServer extends HasThread implements
* @return Return the object that implements the replication
* source executorService.
*/
-  @VisibleForTesting
+  @Override
   public ReplicationSourceService getReplicationSourceService() {
 return replicationSourceHandler;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/f16878e0/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java
--
diff --git 

[26/29] hbase git commit: HBASE-20432 Cleanup related resources when remove a sync replication peer

2018-05-04 Thread zhangduo
HBASE-20432 Cleanup related resources when remove a sync replication peer


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/94712957
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/94712957
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/94712957

Branch: refs/heads/HBASE-19064
Commit: 9471295786c56165804d605ddb33b582b00b7cdb
Parents: 20840cc
Author: huzheng 
Authored: Wed Apr 18 20:38:33 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 17:54:52 2018 +0800

--
 .../master/replication/RemovePeerProcedure.java | 10 +
 .../ReplaySyncReplicationWALManager.java|  8 
 .../replication/SyncReplicationTestBase.java| 45 +---
 .../replication/TestSyncReplicationActive.java  |  9 ++--
 .../replication/TestSyncReplicationStandBy.java | 31 --
 5 files changed, 89 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/94712957/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/RemovePeerProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/RemovePeerProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/RemovePeerProcedure.java
index 82dc07e..7335fe0 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/RemovePeerProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/RemovePeerProcedure.java
@@ -66,9 +66,19 @@ public class RemovePeerProcedure extends ModifyPeerProcedure 
{
 env.getReplicationPeerManager().removePeer(peerId);
   }
 
+  private void removeRemoteWALs(MasterProcedureEnv env) throws IOException {
+ReplaySyncReplicationWALManager remoteWALManager =
+env.getMasterServices().getReplaySyncReplicationWALManager();
+remoteWALManager.removePeerRemoteWALs(peerId);
+remoteWALManager.removePeerReplayWALDir(peerId);
+  }
+
   @Override
   protected void postPeerModification(MasterProcedureEnv env)
   throws IOException, ReplicationException {
+if (peerConfig.isSyncReplication()) {
+  removeRemoteWALs(env);
+}
 env.getReplicationPeerManager().removeAllQueuesAndHFileRefs(peerId);
 if (peerConfig.isSerial()) {
   env.getReplicationPeerManager().removeAllLastPushedSeqIds(peerId);

http://git-wip-us.apache.org/repos/asf/hbase/blob/94712957/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplaySyncReplicationWALManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplaySyncReplicationWALManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplaySyncReplicationWALManager.java
index 72f5c37..eac5aa4 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplaySyncReplicationWALManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplaySyncReplicationWALManager.java
@@ -115,6 +115,14 @@ public class ReplaySyncReplicationWALManager {
 }
   }
 
+  public void removePeerRemoteWALs(String peerId) throws IOException {
+Path remoteWALDir = getPeerRemoteWALDir(peerId);
+if (fs.exists(remoteWALDir) && !fs.delete(remoteWALDir, true)) {
+  throw new IOException(
+  "Failed to remove remote WALs dir " + remoteWALDir + " for peer id=" 
+ peerId);
+}
+  }
+
   public void initPeerWorkers(String peerId) {
 BlockingQueue servers = new LinkedBlockingQueue<>();
 services.getServerManager().getOnlineServers().keySet()

http://git-wip-us.apache.org/repos/asf/hbase/blob/94712957/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/SyncReplicationTestBase.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/SyncReplicationTestBase.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/SyncReplicationTestBase.java
index 0d5fce8..de679be 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/SyncReplicationTestBase.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/SyncReplicationTestBase.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.replication;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -71,6 +72,10 @@ public class SyncReplicationTestBase {
 
   protected static String PEER_ID = "1";
 
+  protected static Path remoteWALDir1;
+
+  

[15/29] hbase git commit: HBASE-19935 Only allow table replication for sync replication for now

2018-05-04 Thread zhangduo
HBASE-19935 Only allow table replication for sync replication for now


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a857a6bb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a857a6bb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a857a6bb

Branch: refs/heads/HBASE-19064
Commit: a857a6bba7c8d33dcdcdcf66c10f2f2810ce7dcb
Parents: 59b7230
Author: Guanghao Zhang 
Authored: Tue Feb 6 16:00:59 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 17:46:18 2018 +0800

--
 .../replication/ReplicationPeerConfig.java  |  9 +++
 .../replication/ReplicationPeerManager.java | 34 -
 .../replication/TestReplicationAdmin.java   | 73 ++--
 .../wal/TestCombinedAsyncWriter.java|  6 ++
 .../wal/TestSyncReplicationWALProvider.java |  6 ++
 5 files changed, 102 insertions(+), 26 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a857a6bb/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
index 97abc74..997a155 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
@@ -25,6 +25,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.TreeMap;
+
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.yetus.audience.InterfaceAudience;
@@ -220,6 +222,13 @@ public class ReplicationPeerConfig {
 return this.remoteWALDir;
   }
 
+  /**
+   * Use remote wal dir to decide whether a peer is sync replication peer
+   */
+  public boolean isSyncReplication() {
+return !StringUtils.isBlank(this.remoteWALDir);
+  }
+
   public static ReplicationPeerConfigBuilder newBuilder() {
 return new ReplicationPeerConfigBuilderImpl();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a857a6bb/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
index f07a0d8..ff778a8 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
@@ -170,7 +170,7 @@ public class ReplicationPeerManager {
   " does not match new remote wal dir '" + 
peerConfig.getRemoteWALDir() + "'");
 }
 
-if (oldPeerConfig.getRemoteWALDir() != null) {
+if (oldPeerConfig.isSyncReplication()) {
   if (!ReplicationUtils.isNamespacesAndTableCFsEqual(oldPeerConfig, 
peerConfig)) {
 throw new DoNotRetryIOException(
   "Changing the replicated namespace/table config on a synchronous 
replication " +
@@ -199,8 +199,8 @@ public class ReplicationPeerManager {
 }
 ReplicationPeerConfig copiedPeerConfig = 
ReplicationPeerConfig.newBuilder(peerConfig).build();
 SyncReplicationState syncReplicationState =
-StringUtils.isBlank(peerConfig.getRemoteWALDir()) ? 
SyncReplicationState.NONE
-: SyncReplicationState.DOWNGRADE_ACTIVE;
+copiedPeerConfig.isSyncReplication() ? 
SyncReplicationState.DOWNGRADE_ACTIVE
+: SyncReplicationState.NONE;
 peerStorage.addPeer(peerId, copiedPeerConfig, enabled, 
syncReplicationState);
 peers.put(peerId,
   new ReplicationPeerDescription(peerId, enabled, copiedPeerConfig, 
syncReplicationState));
@@ -324,9 +324,37 @@ public class ReplicationPeerManager {
 peerConfig.getTableCFsMap());
 }
 
+if (peerConfig.isSyncReplication()) {
+  checkPeerConfigForSyncReplication(peerConfig);
+}
+
 checkConfiguredWALEntryFilters(peerConfig);
   }
 
+  private void checkPeerConfigForSyncReplication(ReplicationPeerConfig 
peerConfig)
+  throws DoNotRetryIOException {
+// This is used to reduce the difficulty for implementing the sync 
replication state transition
+// as we need to reopen all the related regions.
+// TODO: Add namespace, replicat_all flag back
+if (peerConfig.replicateAllUserTables()) {
+  throw new DoNotRetryIOException(
+

[28/29] hbase git commit: HBASE-20434 Also remove remote wals when peer is in DA state

2018-05-04 Thread zhangduo
HBASE-20434 Also remove remote wals when peer is in DA state


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/007618cf
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/007618cf
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/007618cf

Branch: refs/heads/HBASE-19064
Commit: 007618cfbd27ec3e6cbba301d2d79f31583279fb
Parents: 75046ee
Author: zhangduo 
Authored: Wed Apr 25 17:12:23 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 17:54:52 2018 +0800

--
 .../hbase/replication/ReplicationUtils.java |   4 +
 ...ransitPeerSyncReplicationStateProcedure.java |   2 +-
 .../regionserver/ReplicationSource.java |   7 +-
 .../regionserver/ReplicationSourceManager.java  |  86 ++--
 .../hadoop/hbase/wal/AbstractFSWALProvider.java |  19 ++--
 .../hbase/wal/SyncReplicationWALProvider.java   |  30 +-
 .../TestSyncReplicationRemoveRemoteWAL.java | 101 +++
 .../TestReplicationSourceManager.java   |  68 -
 8 files changed, 251 insertions(+), 66 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/007618cf/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
--
diff --git 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
index 66e9b01..069db7a 100644
--- 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
+++ 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
@@ -191,6 +191,10 @@ public final class ReplicationUtils {
 return new Path(remoteWALDir, peerId);
   }
 
+  public static Path getRemoteWALDirForPeer(Path remoteWALDir, String peerId) {
+return new Path(remoteWALDir, peerId);
+  }
+
   /**
* Do the sleeping logic
* @param msg Why we sleep

http://git-wip-us.apache.org/repos/asf/hbase/blob/007618cf/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/TransitPeerSyncReplicationStateProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/TransitPeerSyncReplicationStateProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/TransitPeerSyncReplicationStateProcedure.java
index 5da2b0c..99fd615 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/TransitPeerSyncReplicationStateProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/TransitPeerSyncReplicationStateProcedure.java
@@ -211,7 +211,7 @@ public class TransitPeerSyncReplicationStateProcedure
   case CREATE_DIR_FOR_REMOTE_WAL:
 MasterFileSystem mfs = env.getMasterFileSystem();
 Path remoteWALDir = new Path(mfs.getWALRootDir(), 
ReplicationUtils.REMOTE_WAL_DIR_NAME);
-Path remoteWALDirForPeer = new Path(remoteWALDir, peerId);
+Path remoteWALDirForPeer = 
ReplicationUtils.getRemoteWALDirForPeer(remoteWALDir, peerId);
 FileSystem walFs = mfs.getWALFileSystem();
 try {
   if (walFs.exists(remoteWALDirForPeer)) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/007618cf/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
index 1a27fc1..7313f13 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
@@ -549,14 +549,17 @@ public class ReplicationSource implements 
ReplicationSourceInterface {
 }
 
 /**
+ * 
  * Split a path to get the start time
+ * 
+ * 
  * For example: 10.20.20.171%3A60020.1277499063250
+ * 
  * @param p path to split
  * @return start time
  */
 private static long getTS(Path p) {
-  int tsIndex = p.getName().lastIndexOf('.') + 1;
-  return Long.parseLong(p.getName().substring(tsIndex));
+  return AbstractFSWALProvider.getWALStartTimeFromWALName(p.getName());
 }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/007618cf/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java

[22/29] hbase git commit: HBASE-19864 Use protobuf instead of enum.ordinal to store SyncReplicationState

2018-05-04 Thread zhangduo
HBASE-19864 Use protobuf instead of enum.ordinal to store SyncReplicationState

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/59b7230d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/59b7230d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/59b7230d

Branch: refs/heads/HBASE-19064
Commit: 59b7230d123d55e9c367450599b351a6427451a4
Parents: 50d2b51
Author: Guanghao Zhang 
Authored: Fri Jan 26 16:50:48 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 17:46:18 2018 +0800

--
 .../replication/ReplicationPeerConfigUtil.java  | 22 +++---
 .../hbase/replication/SyncReplicationState.java | 17 ++
 .../hbase/shaded/protobuf/RequestConverter.java |  7 +++---
 .../src/main/protobuf/Replication.proto | 13 +++
 .../replication/ZKReplicationPeerStorage.java   | 24 +---
 .../hadoop/hbase/master/MasterRpcServices.java  |  9 
 ...ransitPeerSyncReplicationStateProcedure.java |  9 
 .../TestReplicationSourceManager.java   |  2 +-
 8 files changed, 67 insertions(+), 36 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/59b7230d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
index 6cbe05b..331795c 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
@@ -403,7 +403,7 @@ public final class ReplicationPeerConfigUtil {
 ReplicationProtos.ReplicationState.State.ENABLED == 
desc.getState().getState();
 ReplicationPeerConfig config = convert(desc.getConfig());
 return new ReplicationPeerDescription(desc.getId(), enabled, config,
-
SyncReplicationState.valueOf(desc.getSyncReplicationState().getNumber()));
+  toSyncReplicationState(desc.getSyncReplicationState()));
   }
 
   public static ReplicationProtos.ReplicationPeerDescription
@@ -411,17 +411,33 @@ public final class ReplicationPeerConfigUtil {
 ReplicationProtos.ReplicationPeerDescription.Builder builder =
 ReplicationProtos.ReplicationPeerDescription.newBuilder();
 builder.setId(desc.getPeerId());
+
 ReplicationProtos.ReplicationState.Builder stateBuilder =
 ReplicationProtos.ReplicationState.newBuilder();
 stateBuilder.setState(desc.isEnabled() ? 
ReplicationProtos.ReplicationState.State.ENABLED :
 ReplicationProtos.ReplicationState.State.DISABLED);
 builder.setState(stateBuilder.build());
+
 builder.setConfig(convert(desc.getPeerConfig()));
-builder.setSyncReplicationState(
-  
ReplicationProtos.SyncReplicationState.forNumber(desc.getSyncReplicationState().ordinal()));
+
builder.setSyncReplicationState(toSyncReplicationState(desc.getSyncReplicationState()));
+
 return builder.build();
   }
 
+  public static ReplicationProtos.SyncReplicationState
+  toSyncReplicationState(SyncReplicationState state) {
+ReplicationProtos.SyncReplicationState.Builder syncReplicationStateBuilder 
=
+ReplicationProtos.SyncReplicationState.newBuilder();
+syncReplicationStateBuilder
+
.setState(ReplicationProtos.SyncReplicationState.State.forNumber(state.ordinal()));
+return syncReplicationStateBuilder.build();
+  }
+
+  public static SyncReplicationState
+  toSyncReplicationState(ReplicationProtos.SyncReplicationState state) {
+return SyncReplicationState.valueOf(state.getState().getNumber());
+  }
+
   public static ReplicationPeerConfig appendTableCFsToReplicationPeerConfig(
   Map tableCfs, ReplicationPeerConfig peerConfig) 
{
 ReplicationPeerConfigBuilder builder = 
ReplicationPeerConfig.newBuilder(peerConfig);

http://git-wip-us.apache.org/repos/asf/hbase/blob/59b7230d/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/SyncReplicationState.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/SyncReplicationState.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/SyncReplicationState.java
index bd144e9..a65b144 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/SyncReplicationState.java
+++ 

[27/29] hbase git commit: HBASE-20426 Give up replicating anything in S state

2018-05-04 Thread zhangduo
HBASE-20426 Give up replicating anything in S state


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6b0c20ff
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6b0c20ff
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6b0c20ff

Branch: refs/heads/HBASE-19064
Commit: 6b0c20ffdbe83604212e40d946fd9fb6c7f41300
Parents: 9471295
Author: zhangduo 
Authored: Thu May 3 15:51:35 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 17:54:52 2018 +0800

--
 .../src/main/protobuf/MasterProcedure.proto |  13 +-
 .../replication/AbstractPeerProcedure.java  |   4 +
 .../master/replication/ModifyPeerProcedure.java |   6 -
 .../replication/ReplicationPeerManager.java |  13 +-
 ...ransitPeerSyncReplicationStateProcedure.java |  94 +++
 .../hadoop/hbase/regionserver/LogRoller.java|  11 +-
 .../regionserver/PeerProcedureHandlerImpl.java  |  63 --
 .../regionserver/ReplicationSource.java |   1 +
 .../regionserver/ReplicationSourceManager.java  | 118 ---
 .../TestDrainReplicationQueuesForStandBy.java   | 118 +++
 10 files changed, 379 insertions(+), 62 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6b0c20ff/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
--
diff --git a/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto 
b/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
index 01e4dae..f15cb04 100644
--- a/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
@@ -394,11 +394,14 @@ enum PeerSyncReplicationStateTransitionState {
   SET_PEER_NEW_SYNC_REPLICATION_STATE = 2;
   REFRESH_PEER_SYNC_REPLICATION_STATE_ON_RS_BEGIN = 3;
   REPLAY_REMOTE_WAL_IN_PEER = 4;
-  REOPEN_ALL_REGIONS_IN_PEER = 5;
-  TRANSIT_PEER_NEW_SYNC_REPLICATION_STATE = 6;
-  REFRESH_PEER_SYNC_REPLICATION_STATE_ON_RS_END = 7;
-  CREATE_DIR_FOR_REMOTE_WAL = 8;
-  POST_PEER_SYNC_REPLICATION_STATE_TRANSITION = 9;
+  REMOVE_ALL_REPLICATION_QUEUES_IN_PEER = 5;
+  REOPEN_ALL_REGIONS_IN_PEER = 6;
+  TRANSIT_PEER_NEW_SYNC_REPLICATION_STATE = 7;
+  REFRESH_PEER_SYNC_REPLICATION_STATE_ON_RS_END = 8;
+  SYNC_REPLICATION_SET_PEER_ENABLED = 9;
+  SYNC_REPLICATION_ENABLE_PEER_REFRESH_PEER_ON_RS = 10;
+  CREATE_DIR_FOR_REMOTE_WAL = 11;
+  POST_PEER_SYNC_REPLICATION_STATE_TRANSITION = 12;
 }
 
 message PeerModificationStateData {

http://git-wip-us.apache.org/repos/asf/hbase/blob/6b0c20ff/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/AbstractPeerProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/AbstractPeerProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/AbstractPeerProcedure.java
index 6679d78..458e073 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/AbstractPeerProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/AbstractPeerProcedure.java
@@ -106,4 +106,8 @@ public abstract class AbstractPeerProcedure
 throw new UnsupportedOperationException();
   }
 
+  protected final void refreshPeer(MasterProcedureEnv env, PeerOperationType 
type) {
+
addChildProcedure(env.getMasterServices().getServerManager().getOnlineServersList().stream()
+  .map(sn -> new RefreshPeerProcedure(peerId, type, 
sn)).toArray(RefreshPeerProcedure[]::new));
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/6b0c20ff/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ModifyPeerProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ModifyPeerProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ModifyPeerProcedure.java
index 32b8ea1..56462ca 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ModifyPeerProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ModifyPeerProcedure.java
@@ -108,12 +108,6 @@ public abstract class ModifyPeerProcedure extends 
AbstractPeerProcedure new RefreshPeerProcedure(peerId, type, sn))
-  .toArray(RefreshPeerProcedure[]::new));
-  }
-
   protected ReplicationPeerConfig getOldPeerConfig() {
 return null;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/6b0c20ff/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java

[06/29] hbase git commit: HBASE-19082 Reject read/write from client but accept write from replication in state S

2018-05-04 Thread zhangduo
HBASE-19082 Reject read/write from client but accept write from replication in 
state S


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/22e1b9f1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/22e1b9f1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/22e1b9f1

Branch: refs/heads/HBASE-19064
Commit: 22e1b9f1aff861e995029507d7b68fe99c771ce8
Parents: 0b2ece7
Author: zhangduo 
Authored: Mon Feb 12 18:20:18 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 17:46:18 2018 +0800

--
 .../org/apache/hadoop/hbase/HConstants.java |   3 -
 .../src/main/protobuf/MasterProcedure.proto |   3 +-
 .../hbase/replication/ReplicationUtils.java |   4 +
 ...ransitPeerSyncReplicationStateProcedure.java |  10 +
 .../hadoop/hbase/regionserver/HRegion.java  |   5 +-
 .../hbase/regionserver/HRegionServer.java   |   2 +-
 .../hbase/regionserver/RSRpcServices.java   |  88 ++--
 .../RejectRequestsFromClientStateChecker.java   |  44 
 .../regionserver/ReplicationSink.java   |  72 ---
 .../SyncReplicationPeerInfoProvider.java|  10 +-
 .../SyncReplicationPeerInfoProviderImpl.java|  19 +-
 .../hbase/wal/SyncReplicationWALProvider.java   |   3 +
 .../org/apache/hadoop/hbase/wal/WALFactory.java |   4 +-
 .../hbase/replication/TestSyncReplication.java  | 200 +++
 .../wal/TestSyncReplicationWALProvider.java |   8 +-
 15 files changed, 401 insertions(+), 74 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/22e1b9f1/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
index 522c2cf..9241682 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
@@ -1355,9 +1355,6 @@ public final class HConstants {
 
   public static final String NOT_IMPLEMENTED = "Not implemented";
 
-  // TODO: need to find a better place to hold it.
-  public static final String SYNC_REPLICATION_ENABLED = 
"hbase.replication.sync.enabled";
-
   private HConstants() {
 // Can't be instantiated with this ctor.
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/22e1b9f1/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
--
diff --git a/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto 
b/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
index 67c1b43..e8b940e 100644
--- a/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
@@ -397,7 +397,8 @@ enum PeerSyncReplicationStateTransitionState {
   REOPEN_ALL_REGIONS_IN_PEER = 5;
   TRANSIT_PEER_NEW_SYNC_REPLICATION_STATE = 6;
   REFRESH_PEER_SYNC_REPLICATION_STATE_ON_RS_END = 7;
-  POST_PEER_SYNC_REPLICATION_STATE_TRANSITION = 8;
+  CREATE_DIR_FOR_REMOTE_WAL = 8;
+  POST_PEER_SYNC_REPLICATION_STATE_TRANSITION = 9;
 }
 
 message PeerModificationStateData {

http://git-wip-us.apache.org/repos/asf/hbase/blob/22e1b9f1/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
--
diff --git 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
index e4dea83..d94cb00 100644
--- 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
+++ 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
@@ -37,6 +37,10 @@ import org.apache.yetus.audience.InterfaceAudience;
 @InterfaceAudience.Private
 public final class ReplicationUtils {
 
+  public static final String SYNC_REPLICATION_ENABLED = 
"hbase.replication.sync.enabled";
+
+  public static final String REPLICATION_ATTR_NAME = "__rep__";
+
   private ReplicationUtils() {
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/22e1b9f1/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/TransitPeerSyncReplicationStateProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/TransitPeerSyncReplicationStateProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/TransitPeerSyncReplicationStateProcedure.java
index 8fc932f..69404a0 100644
--- 

[04/29] hbase git commit: HBASE-20378 Provide a hbck option to cleanup replication barrier for a table

2018-05-04 Thread zhangduo
HBASE-20378 Provide a hbck option to cleanup replication barrier for a table

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/87f5b5f3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/87f5b5f3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/87f5b5f3

Branch: refs/heads/HBASE-19064
Commit: 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f
Parents: 6225b4a
Author: jingyuntian 
Authored: Sat Apr 28 11:34:29 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 15:27:33 2018 +0800

--
 .../org/apache/hadoop/hbase/util/HBaseFsck.java | 131 ++--
 .../TestHBaseFsckCleanReplicationBarriers.java  | 205 +++
 .../hadoop/hbase/util/hbck/HbckTestingUtil.java |  20 +-
 3 files changed, 336 insertions(+), 20 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/87f5b5f3/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
index 9fcf320..6d9ca9a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
@@ -58,6 +58,7 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
+import java.util.stream.Collectors;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang3.RandomStringUtils;
 import org.apache.commons.lang3.StringUtils;
@@ -85,6 +86,7 @@ import org.apache.hadoop.hbase.MetaTableAccessor;
 import org.apache.hadoop.hbase.RegionLocations;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.ZooKeeperConnectionException;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.ClusterConnection;
@@ -99,7 +101,9 @@ import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.client.RegionInfoBuilder;
 import org.apache.hadoop.hbase.client.RegionReplicaUtil;
 import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.RowMutations;
+import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
@@ -115,6 +119,10 @@ import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
 import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
 import org.apache.hadoop.hbase.replication.ReplicationException;
+import org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
+import org.apache.hadoop.hbase.replication.ReplicationQueueStorage;
+import org.apache.hadoop.hbase.replication.ReplicationStorageFactory;
+import org.apache.hadoop.hbase.replication.ReplicationUtils;
 import org.apache.hadoop.hbase.security.AccessDeniedException;
 import org.apache.hadoop.hbase.security.UserProvider;
 import org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
@@ -268,11 +276,13 @@ public class HBaseFsck extends Configured implements 
Closeable {
   private boolean fixHFileLinks = false; // fix lingering HFileLinks
   private boolean fixEmptyMetaCells = false; // fix (remove) empty 
REGIONINFO_QUALIFIER rows
   private boolean fixReplication = false; // fix undeleted replication queues 
for removed peer
+  private boolean cleanReplicationBarrier = false; // clean replication 
barriers of a table
   private boolean fixAny = false; // Set to true if any of the fix is required.
 
   // limit checking/fixes to listed tables, if empty attempt to check/fix all
   // hbase:meta are always checked
   private Set tablesIncluded = new HashSet<>();
+  private TableName cleanReplicationBarrierTable;
   private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping 
regions to merge
   // maximum number of overlapping regions to sideline
   private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;
@@ -786,6 +796,8 @@ public class HBaseFsck extends Configured implements 
Closeable {
 
 checkAndFixReplication();
 
+cleanReplicationBarrier();
+
 // Remove the hbck znode
 cleanupHbckZnode();
 
@@ -4118,14 +4130,13 @@ public class HBaseFsck extends Configured implements 
Closeable {
 enum ERROR_CODE {
   UNKNOWN, NO_META_REGION, 

[12/29] hbase git commit: HBASE-20425 Do not write the cluster id of the current active cluster when writing remote WAL

2018-05-04 Thread zhangduo
HBASE-20425 Do not write the cluster id of the current active cluster when 
writing remote WAL


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b5644868
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b5644868
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b5644868

Branch: refs/heads/HBASE-19064
Commit: b5644868f949f0d03406ddd287b391bebafa15c8
Parents: 315e487
Author: huzheng 
Authored: Mon Apr 23 17:20:55 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 17:46:18 2018 +0800

--
 .../replication/TestSyncReplicationActive.java  | 32 
 1 file changed, 32 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b5644868/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestSyncReplicationActive.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestSyncReplicationActive.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestSyncReplicationActive.java
index bff4572..f9020a0 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestSyncReplicationActive.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestSyncReplicationActive.java
@@ -17,9 +17,17 @@
  */
 package org.apache.hadoop.hbase.replication;
 
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.ReplicationTests;
+import org.apache.hadoop.hbase.wal.WAL.Entry;
+import org.apache.hadoop.hbase.wal.WAL.Reader;
+import org.apache.hadoop.hbase.wal.WALFactory;
+import org.junit.Assert;
 import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -49,6 +57,9 @@ public class TestSyncReplicationActive extends 
SyncReplicationTestBase {
 // peer is disabled so no data have been replicated
 verifyNotReplicatedThroughRegion(UTIL2, 0, 100);
 
+// Ensure that there's no cluster id in remote log entries.
+verifyNoClusterIdInRemoteLog(UTIL2, PEER_ID);
+
 UTIL2.getAdmin().transitReplicationPeerSyncReplicationState(PEER_ID,
   SyncReplicationState.DOWNGRADE_ACTIVE);
 // confirm that peer with state DA will reject replication request.
@@ -72,4 +83,25 @@ public class TestSyncReplicationActive extends 
SyncReplicationTestBase {
 verifyReplicationRequestRejection(UTIL2, true);
 write(UTIL2, 200, 300);
   }
+
+  private void verifyNoClusterIdInRemoteLog(HBaseTestingUtility utility, 
String peerId)
+  throws Exception {
+FileSystem fs2 = utility.getTestFileSystem();
+Path remoteDir =
+new 
Path(utility.getMiniHBaseCluster().getMaster().getMasterFileSystem().getRootDir(),
+"remoteWALs").makeQualified(fs2.getUri(), 
fs2.getWorkingDirectory());
+FileStatus[] files = fs2.listStatus(new Path(remoteDir, peerId));
+Assert.assertTrue(files.length > 0);
+for (FileStatus file : files) {
+  try (Reader reader =
+  WALFactory.createReader(fs2, file.getPath(), 
utility.getConfiguration())) {
+Entry entry = reader.next();
+Assert.assertTrue(entry != null);
+while (entry != null) {
+  Assert.assertEquals(entry.getKey().getClusterIds().size(), 0);
+  entry = reader.next();
+}
+  }
+}
+  }
 }



[09/29] hbase git commit: HBASE-19078 Add a remote peer cluster wal directory config for synchronous replication

2018-05-04 Thread zhangduo
HBASE-19078 Add a remote peer cluster wal directory config for synchronous 
replication

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/47694cf8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/47694cf8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/47694cf8

Branch: refs/heads/HBASE-19064
Commit: 47694cf8e1220afab1c69d2e63922743d1fecde2
Parents: 0893bb5
Author: Guanghao Zhang 
Authored: Sat Jan 13 18:55:28 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 17:46:18 2018 +0800

--
 .../replication/ReplicationPeerConfigUtil.java  |  6 ++
 .../replication/ReplicationPeerConfig.java  | 20 -
 .../ReplicationPeerConfigBuilder.java   |  7 ++
 .../src/main/protobuf/Replication.proto |  1 +
 .../replication/ReplicationPeerManager.java | 15 
 .../replication/TestReplicationAdmin.java   | 77 
 .../src/main/ruby/hbase/replication_admin.rb| 14 ++--
 hbase-shell/src/main/ruby/hbase_constants.rb|  1 +
 .../src/main/ruby/shell/commands/add_peer.rb| 21 +-
 .../src/main/ruby/shell/commands/list_peers.rb  | 19 -
 .../test/ruby/hbase/replication_admin_test.rb   | 16 
 11 files changed, 186 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/47694cf8/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
index b1c1713..474ded3 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
@@ -319,6 +319,9 @@ public final class ReplicationPeerConfigUtil {
 
excludeNamespacesList.stream().map(ByteString::toStringUtf8).collect(Collectors.toSet()));
 }
 
+if (peer.hasRemoteWALDir()) {
+  builder.setRemoteWALDir(peer.getRemoteWALDir());
+}
 return builder.build();
   }
 
@@ -376,6 +379,9 @@ public final class ReplicationPeerConfigUtil {
   }
 }
 
+if (peerConfig.getRemoteWALDir() != null) {
+  builder.setRemoteWALDir(peerConfig.getRemoteWALDir());
+}
 return builder.build();
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/47694cf8/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
index e0d9a4c..97abc74 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
@@ -47,6 +47,8 @@ public class ReplicationPeerConfig {
   private Set excludeNamespaces = null;
   private long bandwidth = 0;
   private final boolean serial;
+  // Used by synchronous replication
+  private String remoteWALDir;
 
   private ReplicationPeerConfig(ReplicationPeerConfigBuilderImpl builder) {
 this.clusterKey = builder.clusterKey;
@@ -66,6 +68,7 @@ public class ReplicationPeerConfig {
 : null;
 this.bandwidth = builder.bandwidth;
 this.serial = builder.serial;
+this.remoteWALDir = builder.remoteWALDir;
   }
 
   private Map
@@ -213,6 +216,10 @@ public class ReplicationPeerConfig {
 return this;
   }
 
+  public String getRemoteWALDir() {
+return this.remoteWALDir;
+  }
+
   public static ReplicationPeerConfigBuilder newBuilder() {
 return new ReplicationPeerConfigBuilderImpl();
   }
@@ -230,7 +237,8 @@ public class ReplicationPeerConfig {
   .setReplicateAllUserTables(peerConfig.replicateAllUserTables())
   .setExcludeTableCFsMap(peerConfig.getExcludeTableCFsMap())
   .setExcludeNamespaces(peerConfig.getExcludeNamespaces())
-  
.setBandwidth(peerConfig.getBandwidth()).setSerial(peerConfig.isSerial());
+  .setBandwidth(peerConfig.getBandwidth()).setSerial(peerConfig.isSerial())
+  .setRemoteWALDir(peerConfig.getRemoteWALDir());
 return builder;
   }
 
@@ -259,6 +267,8 @@ public class ReplicationPeerConfig {
 
 private boolean serial = false;
 
+private String remoteWALDir = null;
+
 @Override
 public ReplicationPeerConfigBuilder setClusterKey(String 

[13/29] hbase git commit: HBASE-19857 Complete the procedure for adding a sync replication peer

2018-05-04 Thread zhangduo
HBASE-19857 Complete the procedure for adding a sync replication peer


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/50d2b516
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/50d2b516
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/50d2b516

Branch: refs/heads/HBASE-19064
Commit: 50d2b516d64030d7c9f51ab2c2324084e7ffc494
Parents: 502a954
Author: zhangduo 
Authored: Thu Jan 25 20:09:00 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 17:46:18 2018 +0800

--
 .../hbase/replication/ReplicationPeer.java  |   9 +
 .../hbase/replication/ReplicationPeerImpl.java  |  28 +--
 .../hbase/replication/ReplicationPeers.java |   3 +-
 .../regionserver/PeerActionListener.java|  10 +-
 .../SyncReplicationPeerProvider.java|  35 +++
 .../SynchronousReplicationPeerProvider.java |  35 ---
 .../hbase/wal/SyncReplicationWALProvider.java   | 234 +++
 .../wal/SynchronousReplicationWALProvider.java  | 225 --
 .../org/apache/hadoop/hbase/wal/WALFactory.java |   8 +-
 .../TestReplicationSourceManager.java   |   3 +
 .../wal/TestSyncReplicationWALProvider.java | 153 
 .../TestSynchronousReplicationWALProvider.java  | 153 
 12 files changed, 456 insertions(+), 440 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/50d2b516/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeer.java
--
diff --git 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeer.java
 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeer.java
index 2da3cce..0196a9a 100644
--- 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeer.java
+++ 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeer.java
@@ -54,6 +54,15 @@ public interface ReplicationPeer {
   PeerState getPeerState();
 
   /**
+   * Returns the sync replication state of the peer by reading local cache.
+   * 
+   * If the peer is not a synchronous replication peer, a {@link 
SyncReplicationState#NONE} will be
+   * returned.
+   * @return the sync replication state
+   */
+  SyncReplicationState getSyncReplicationState();
+
+  /**
* Test whether the peer is enabled.
* @return {@code true} if enabled, otherwise {@code false}.
*/

http://git-wip-us.apache.org/repos/asf/hbase/blob/50d2b516/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerImpl.java
--
diff --git 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerImpl.java
 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerImpl.java
index d656466..ff3f662 100644
--- 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerImpl.java
+++ 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerImpl.java
@@ -36,6 +36,8 @@ public class ReplicationPeerImpl implements ReplicationPeer {
 
   private volatile PeerState peerState;
 
+  private volatile SyncReplicationState syncReplicationState;
+
   private final List peerConfigListeners;
 
   /**
@@ -45,12 +47,13 @@ public class ReplicationPeerImpl implements ReplicationPeer 
{
* @param id string representation of this peer's identifier
* @param peerConfig configuration for the replication peer
*/
-  public ReplicationPeerImpl(Configuration conf, String id, boolean peerState,
-  ReplicationPeerConfig peerConfig) {
+  public ReplicationPeerImpl(Configuration conf, String id, 
ReplicationPeerConfig peerConfig,
+  boolean peerState, SyncReplicationState syncReplicationState) {
 this.conf = conf;
 this.id = id;
 this.peerState = peerState ? PeerState.ENABLED : PeerState.DISABLED;
 this.peerConfig = peerConfig;
+this.syncReplicationState = syncReplicationState;
 this.peerConfigListeners = new ArrayList<>();
   }
 
@@ -77,37 +80,26 @@ public class ReplicationPeerImpl implements ReplicationPeer 
{
 return peerState;
   }
 
-  /**
-   * Get the peer config object
-   * @return the ReplicationPeerConfig for this peer
-   */
+  @Override
+  public SyncReplicationState getSyncReplicationState() {
+return syncReplicationState;
+  }
+
   @Override
   public ReplicationPeerConfig getPeerConfig() {
 return peerConfig;
   }
 
-  /**
-   * Get the configuration object required to communicate with this peer
-   * @return configuration object
-   */
   @Override
   public Configuration getConfiguration() {
 return conf;
   }
 
-  /**
-  

[20/29] hbase git commit: HBASE-19781 Add a new cluster state flag for synchronous replication

2018-05-04 Thread zhangduo
HBASE-19781 Add a new cluster state flag for synchronous replication


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/502a9543
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/502a9543
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/502a9543

Branch: refs/heads/HBASE-19064
Commit: 502a9543b344eae7cb327dc1aeb593b5d6c4f2d0
Parents: 70212ce
Author: Guanghao Zhang 
Authored: Mon Jan 22 11:44:49 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 17:46:18 2018 +0800

--
 .../org/apache/hadoop/hbase/client/Admin.java   |  39 +
 .../apache/hadoop/hbase/client/AsyncAdmin.java  |  31 
 .../hadoop/hbase/client/AsyncHBaseAdmin.java|   7 +
 .../hbase/client/ConnectionImplementation.java  |   9 ++
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |  26 +++
 .../hadoop/hbase/client/RawAsyncHBaseAdmin.java |  15 ++
 .../client/ShortCircuitMasterConnection.java|   9 ++
 .../replication/ReplicationPeerConfigUtil.java  |  26 +--
 .../replication/ReplicationPeerDescription.java |  10 +-
 .../hbase/replication/SyncReplicationState.java |  48 ++
 .../hbase/shaded/protobuf/RequestConverter.java |  10 ++
 .../src/main/protobuf/Master.proto  |   4 +
 .../src/main/protobuf/MasterProcedure.proto |   4 +
 .../src/main/protobuf/Replication.proto |  20 +++
 .../replication/ReplicationPeerStorage.java |  18 ++-
 .../hbase/replication/ReplicationUtils.java |   1 +
 .../replication/ZKReplicationPeerStorage.java   |  61 +--
 .../replication/TestReplicationStateBasic.java  |  23 ++-
 .../TestZKReplicationPeerStorage.java   |  12 +-
 .../hbase/coprocessor/MasterObserver.java   |  23 +++
 .../org/apache/hadoop/hbase/master/HMaster.java |  12 ++
 .../hbase/master/MasterCoprocessorHost.java |  21 +++
 .../hadoop/hbase/master/MasterRpcServices.java  |  17 ++
 .../hadoop/hbase/master/MasterServices.java |   9 ++
 .../procedure/PeerProcedureInterface.java   |   2 +-
 .../replication/ReplicationPeerManager.java |  51 +-
 ...ransitPeerSyncReplicationStateProcedure.java | 159 +++
 .../hbase/security/access/AccessController.java |   8 +
 .../replication/TestReplicationAdmin.java   |  62 
 .../hbase/master/MockNoopMasterServices.java|   8 +-
 .../cleaner/TestReplicationHFileCleaner.java|   4 +-
 .../TestReplicationTrackerZKImpl.java   |   6 +-
 .../TestReplicationSourceManager.java   |   3 +-
 .../security/access/TestAccessController.java   |  16 ++
 .../hbase/util/TestHBaseFsckReplication.java|   5 +-
 .../src/main/ruby/hbase/replication_admin.rb|  15 ++
 hbase-shell/src/main/ruby/shell.rb  |   1 +
 .../src/main/ruby/shell/commands/list_peers.rb  |   6 +-
 .../transit_peer_sync_replication_state.rb  |  44 +
 .../test/ruby/hbase/replication_admin_test.rb   |  24 +++
 40 files changed, 816 insertions(+), 53 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/502a9543/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
index 331f2d1..39542e4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
@@ -51,6 +51,7 @@ import 
org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
 import org.apache.hadoop.hbase.replication.ReplicationException;
 import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
 import org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
+import org.apache.hadoop.hbase.replication.SyncReplicationState;
 import org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
 import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
 import org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
@@ -2657,6 +2658,44 @@ public interface Admin extends Abortable, Closeable {
   List listReplicationPeers(Pattern pattern) 
throws IOException;
 
   /**
+   * Transit current cluster to a new state in a synchronous replication peer.
+   * @param peerId a short name that identifies the peer
+   * @param state a new state of current cluster
+   * @throws IOException if a remote or network exception occurs
+   */
+  void transitReplicationPeerSyncReplicationState(String peerId, 
SyncReplicationState state)
+  throws IOException;
+
+  /**
+   * Transit current cluster to a new state in a synchronous replication peer. 
But does not block
+   * and wait for it.
+   * 
+   * You can use Future.get(long, TimeUnit) to wait on the 

[02/29] hbase git commit: HBASE-20524 Need to clear metrics when ReplicationSourceManager refresh replication sources

2018-05-04 Thread zhangduo
HBASE-20524 Need to clear metrics when ReplicationSourceManager refresh 
replication sources


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9b9f8514
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9b9f8514
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9b9f8514

Branch: refs/heads/HBASE-19064
Commit: 9b9f851470b74ea95766d2828c37e75675392a1b
Parents: 09ca619
Author: Guanghao Zhang 
Authored: Thu May 3 16:45:06 2018 +0800
Committer: Guanghao Zhang 
Committed: Fri May 4 14:31:41 2018 +0800

--
 .../replication/regionserver/RecoveredReplicationSource.java | 1 +
 .../hadoop/hbase/replication/regionserver/ReplicationSource.java | 1 +
 .../hbase/replication/regionserver/ReplicationSourceManager.java | 2 --
 .../apache/hadoop/hbase/replication/ReplicationSourceDummy.java  | 4 ++--
 4 files changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9b9f8514/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
index f1ad99d..a21ca44 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
@@ -149,6 +149,7 @@ public class RecoveredReplicationSource extends 
ReplicationSource {
   Threads.sleep(100);// wait a short while for other worker thread to 
fully exit
   boolean allTasksDone = workerThreads.values().stream().allMatch(w -> 
w.isFinished());
   if (allTasksDone) {
+this.getSourceMetrics().clear();
 manager.removeRecoveredSource(this);
 LOG.info("Finished recovering queue {} with the following stats: {}", 
queueId, getStats());
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/9b9f8514/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
index 236c575..b05a673 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
@@ -519,6 +519,7 @@ public class ReplicationSource implements 
ReplicationSourceInterface {
 }
   }
 }
+this.metrics.clear();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/9b9f8514/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
index 70cd986..9b4a22c 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
@@ -418,7 +418,6 @@ public class ReplicationSourceManager implements 
ReplicationListener {
*/
   void removeRecoveredSource(ReplicationSourceInterface src) {
 LOG.info("Done with the recovered queue " + src.getQueueId());
-src.getSourceMetrics().clear();
 this.oldsources.remove(src);
 // Delete queue from storage and memory
 deleteQueue(src.getQueueId());
@@ -431,7 +430,6 @@ public class ReplicationSourceManager implements 
ReplicationListener {
*/
   void removeSource(ReplicationSourceInterface src) {
 LOG.info("Done with the queue " + src.getQueueId());
-src.getSourceMetrics().clear();
 this.sources.remove(src.getPeerId());
 // Delete queue from storage and memory
 deleteQueue(src.getQueueId());

http://git-wip-us.apache.org/repos/asf/hbase/blob/9b9f8514/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/ReplicationSourceDummy.java
--
diff --git 

[18/29] hbase git commit: HBASE-19999 Remove the SYNC_REPLICATION_ENABLED flag

2018-05-04 Thread zhangduo
HBASE-1 Remove the SYNC_REPLICATION_ENABLED flag


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9f3b31b6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9f3b31b6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9f3b31b6

Branch: refs/heads/HBASE-19064
Commit: 9f3b31b6fb93d06268c22457f8ed8916076b812b
Parents: 91d4eff
Author: Guanghao Zhang 
Authored: Fri Mar 9 11:30:25 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 17:46:18 2018 +0800

--
 .../hbase/replication/ReplicationUtils.java  |  2 --
 .../hadoop/hbase/regionserver/HRegionServer.java | 13 -
 .../hbase/wal/SyncReplicationWALProvider.java| 19 ++-
 .../org/apache/hadoop/hbase/wal/WALFactory.java  | 18 --
 .../hbase/replication/TestSyncReplication.java   |  1 -
 .../master/TestRecoverStandbyProcedure.java  |  2 --
 .../wal/TestSyncReplicationWALProvider.java  |  2 --
 7 files changed, 38 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9f3b31b6/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
--
diff --git 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
index e402d0f..cb22f57 100644
--- 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
+++ 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationUtils.java
@@ -37,8 +37,6 @@ import org.apache.yetus.audience.InterfaceAudience;
 @InterfaceAudience.Private
 public final class ReplicationUtils {
 
-  public static final String SYNC_REPLICATION_ENABLED = 
"hbase.replication.sync.enabled";
-
   public static final String REPLICATION_ATTR_NAME = "__rep__";
 
   public static final String REMOTE_WAL_DIR_NAME = "remoteWALs";

http://git-wip-us.apache.org/repos/asf/hbase/blob/9f3b31b6/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index f8e2105..25bc6be 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -1804,10 +1804,8 @@ public class HRegionServer extends HasThread implements
   private void setupWALAndReplication() throws IOException {
 boolean isMasterNoTableOrSystemTableOnly = this instanceof HMaster &&
   (!LoadBalancer.isTablesOnMaster(conf) || 
LoadBalancer.isSystemTablesOnlyOnMaster(conf));
-if (isMasterNoTableOrSystemTableOnly) {
-  conf.setBoolean(ReplicationUtils.SYNC_REPLICATION_ENABLED, false);
-}
-WALFactory factory = new WALFactory(conf, serverName.toString());
+WALFactory factory =
+new WALFactory(conf, serverName.toString(), 
!isMasterNoTableOrSystemTableOnly);
 if (!isMasterNoTableOrSystemTableOnly) {
   // TODO Replication make assumptions here based on the default 
filesystem impl
   Path oldLogDir = new Path(walRootDir, HConstants.HREGION_OLDLOGDIR_NAME);
@@ -1926,11 +1924,8 @@ public class HRegionServer extends HasThread implements
 }
 this.executorService.startExecutorService(ExecutorType.RS_REFRESH_PEER,
   conf.getInt("hbase.regionserver.executor.refresh.peer.threads", 2));
-
-if (conf.getBoolean(ReplicationUtils.SYNC_REPLICATION_ENABLED, false)) {
-  
this.executorService.startExecutorService(ExecutorType.RS_REPLAY_SYNC_REPLICATION_WAL,
-
conf.getInt("hbase.regionserver.executor.replay.sync.replication.wal.threads", 
2));
-}
+
this.executorService.startExecutorService(ExecutorType.RS_REPLAY_SYNC_REPLICATION_WAL,
+  
conf.getInt("hbase.regionserver.executor.replay.sync.replication.wal.threads", 
1));
 
 Threads.setDaemonThreadRunning(this.walRoller.getThread(), getName() + 
".logRoller",
 uncaughtExceptionHandler);

http://git-wip-us.apache.org/repos/asf/hbase/blob/9f3b31b6/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.java
index 282aa21..54287fe 100644
--- 

[11/29] hbase git commit: HBASE-19943 Only allow removing sync replication peer which is in DA state

2018-05-04 Thread zhangduo
HBASE-19943 Only allow removing sync replication peer which is in DA state


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b5568d3f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b5568d3f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b5568d3f

Branch: refs/heads/HBASE-19064
Commit: b5568d3f4649b1e8184e7d6b4d93fd3aeb3003b4
Parents: ff1e79a
Author: huzheng 
Authored: Thu Mar 1 18:34:02 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 17:46:18 2018 +0800

--
 .../replication/ReplicationPeerManager.java | 14 -
 .../hbase/wal/SyncReplicationWALProvider.java   |  2 +-
 .../replication/TestReplicationAdmin.java   | 63 
 .../hbase/replication/TestSyncReplication.java  |  2 +-
 4 files changed, 78 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b5568d3f/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
index 0dc922d..41dd6e3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
@@ -120,8 +120,20 @@ public class ReplicationPeerManager {
 return desc;
   }
 
+  private void checkPeerInDAStateIfSyncReplication(String peerId) throws 
DoNotRetryIOException {
+ReplicationPeerDescription desc = peers.get(peerId);
+if (desc != null && desc.getPeerConfig().isSyncReplication()
+&& 
!SyncReplicationState.DOWNGRADE_ACTIVE.equals(desc.getSyncReplicationState())) {
+  throw new DoNotRetryIOException("Couldn't remove synchronous replication 
peer with state="
+  + desc.getSyncReplicationState()
+  + ", Transit the synchronous replication state to be 
DOWNGRADE_ACTIVE firstly.");
+}
+  }
+
   ReplicationPeerConfig preRemovePeer(String peerId) throws 
DoNotRetryIOException {
-return checkPeerExists(peerId).getPeerConfig();
+ReplicationPeerDescription pd = checkPeerExists(peerId);
+checkPeerInDAStateIfSyncReplication(peerId);
+return pd.getPeerConfig();
   }
 
   void preEnablePeer(String peerId) throws DoNotRetryIOException {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5568d3f/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.java
index ac4b4cd..282aa21 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/SyncReplicationWALProvider.java
@@ -142,7 +142,7 @@ public class SyncReplicationWALProvider implements 
WALProvider, PeerActionListen
   @Override
   public WAL getWAL(RegionInfo region) throws IOException {
 if (region == null) {
-  return provider.getWAL(region);
+  return provider.getWAL(null);
 }
 Optional> peerIdAndRemoteWALDir =
   peerInfoProvider.getPeerIdAndRemoteWALDir(region);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5568d3f/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.java
index 0ad476f..486ab51 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.java
@@ -254,6 +254,62 @@ public class TestReplicationAdmin {
   }
 
   @Test
+  public void testRemovePeerWithNonDAState() throws Exception {
+TableName tableName = TableName.valueOf(name.getMethodName());
+TEST_UTIL.createTable(tableName, Bytes.toBytes("family"));
+ReplicationPeerConfigBuilder builder = ReplicationPeerConfig.newBuilder();
+
+String rootDir = "hdfs://srv1:/hbase";
+builder.setClusterKey(KEY_ONE);
+builder.setRemoteWALDir(rootDir);
+builder.setReplicateAllUserTables(false);
+Map tableCfs = new 

hbase git commit: HBASE-20481 Replicate entries from same region serially in ReplicationEndpoint for serial replication

2018-05-04 Thread openinx
Repository: hbase
Updated Branches:
  refs/heads/branch-2 5f260451d -> de71cb591


HBASE-20481 Replicate entries from same region serially in ReplicationEndpoint 
for serial replication


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/de71cb59
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/de71cb59
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/de71cb59

Branch: refs/heads/branch-2
Commit: de71cb591573508f4f5e96e0efcfc0171c1f8a67
Parents: 5f26045
Author: huzheng 
Authored: Wed May 2 10:44:42 2018 +0800
Committer: huzheng 
Committed: Fri May 4 15:52:43 2018 +0800

--
 .../hbase/replication/ReplicationEndpoint.java  |   2 +-
 .../HBaseInterClusterReplicationEndpoint.java   | 281 +-
 .../TestReplicationAdminWithClusters.java   |   1 -
 .../replication/TestReplicationEndpoint.java|  36 +--
 .../regionserver/TestReplicator.java| 288 +++
 .../TestSerialReplicationEndpoint.java  | 188 
 6 files changed, 384 insertions(+), 412 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/de71cb59/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ReplicationEndpoint.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ReplicationEndpoint.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ReplicationEndpoint.java
index 543dc2f..f4c37b1 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ReplicationEndpoint.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ReplicationEndpoint.java
@@ -117,7 +117,7 @@ public interface ReplicationEndpoint extends 
ReplicationPeerConfigListener {
   /**
* Initialize the replication endpoint with the given context.
* @param context replication context
-   * @throws IOException
+   * @throws IOException error occur when initialize the endpoint.
*/
   void init(Context context) throws IOException;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/de71cb59/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java
index fd3c671..7db53aa 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java
@@ -24,9 +24,9 @@ import java.net.SocketTimeoutException;
 import java.net.UnknownHostException;
 import java.util.ArrayList;
 import java.util.Collections;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.TreeMap;
 import java.util.concurrent.Callable;
 import java.util.concurrent.CompletionService;
 import java.util.concurrent.ExecutionException;
@@ -37,6 +37,9 @@ import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
@@ -108,6 +111,7 @@ public class HBaseInterClusterReplicationEndpoint extends 
HBaseReplicationEndpoi
   private boolean replicationBulkLoadDataEnabled;
   private Abortable abortable;
   private boolean dropOnDeletedTables;
+  private boolean isSerial = false;
 
   @Override
   public void init(Context context) throws IOException {
@@ -160,6 +164,7 @@ public class HBaseInterClusterReplicationEndpoint extends 
HBaseReplicationEndpoi
 Path baseNSDir = new Path(HConstants.BASE_NAMESPACE_DIR);
 baseNamespaceDir = new Path(rootDir, baseNSDir);
 hfileArchiveDir = new Path(rootDir, new 
Path(HConstants.HFILE_ARCHIVE_DIRECTORY, baseNSDir));
+isSerial = context.getPeerConfig().isSerial();
   }
 
   private void decorateConf() {
@@ -203,40 +208,60 @@ public class HBaseInterClusterReplicationEndpoint extends 
HBaseReplicationEndpoi
 return sleepMultiplier < maxRetriesMultiplier;
   }
 
-  private List createBatches(final List entries) {
+  private int getEstimatedEntrySize(Entry e) {
+long size = e.getKey().estimatedSerializedSizeOf() + 
e.getEdit().estimatedSerializedSizeOf();
+return (int) size;
+  }
+
+  private List 

hbase git commit: HBASE-20378 Provide a hbck option to cleanup replication barrier for a table

2018-05-04 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-2 eaafdff76 -> 5f260451d


HBASE-20378 Provide a hbck option to cleanup replication barrier for a table

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5f260451
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5f260451
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5f260451

Branch: refs/heads/branch-2
Commit: 5f260451d553011a22e8dad28b7e13fcdded6037
Parents: eaafdff
Author: jingyuntian 
Authored: Sat Apr 28 11:34:29 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 15:20:17 2018 +0800

--
 .../org/apache/hadoop/hbase/util/HBaseFsck.java | 131 ++--
 .../TestHBaseFsckCleanReplicationBarriers.java  | 205 +++
 .../hadoop/hbase/util/hbck/HbckTestingUtil.java |  20 +-
 3 files changed, 336 insertions(+), 20 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5f260451/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
index f5342b0..1f9c9ab 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
@@ -58,6 +58,7 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
+import java.util.stream.Collectors;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang3.RandomStringUtils;
 import org.apache.commons.lang3.StringUtils;
@@ -85,6 +86,7 @@ import org.apache.hadoop.hbase.MetaTableAccessor;
 import org.apache.hadoop.hbase.RegionLocations;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.ZooKeeperConnectionException;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.ClusterConnection;
@@ -99,7 +101,9 @@ import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.client.RegionInfoBuilder;
 import org.apache.hadoop.hbase.client.RegionReplicaUtil;
 import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.RowMutations;
+import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
@@ -115,6 +119,10 @@ import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
 import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
 import org.apache.hadoop.hbase.replication.ReplicationException;
+import org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
+import org.apache.hadoop.hbase.replication.ReplicationQueueStorage;
+import org.apache.hadoop.hbase.replication.ReplicationStorageFactory;
+import org.apache.hadoop.hbase.replication.ReplicationUtils;
 import org.apache.hadoop.hbase.security.AccessDeniedException;
 import org.apache.hadoop.hbase.security.UserProvider;
 import org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
@@ -268,11 +276,13 @@ public class HBaseFsck extends Configured implements 
Closeable {
   private boolean fixHFileLinks = false; // fix lingering HFileLinks
   private boolean fixEmptyMetaCells = false; // fix (remove) empty 
REGIONINFO_QUALIFIER rows
   private boolean fixReplication = false; // fix undeleted replication queues 
for removed peer
+  private boolean cleanReplicationBarrier = false; // clean replication 
barriers of a table
   private boolean fixAny = false; // Set to true if any of the fix is required.
 
   // limit checking/fixes to listed tables, if empty attempt to check/fix all
   // hbase:meta are always checked
   private Set tablesIncluded = new HashSet<>();
+  private TableName cleanReplicationBarrierTable;
   private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping 
regions to merge
   // maximum number of overlapping regions to sideline
   private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;
@@ -786,6 +796,8 @@ public class HBaseFsck extends Configured implements 
Closeable {
 
 checkAndFixReplication();
 
+cleanReplicationBarrier();
+
 // Remove the hbck znode
 cleanupHbckZnode();
 
@@ -4119,14 +4131,13 @@ public class HBaseFsck extends Configured implements 

hbase git commit: HBASE-20378 Provide a hbck option to cleanup replication barrier for a table

2018-05-04 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master 6225b4a49 -> 87f5b5f34


HBASE-20378 Provide a hbck option to cleanup replication barrier for a table

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/87f5b5f3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/87f5b5f3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/87f5b5f3

Branch: refs/heads/master
Commit: 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f
Parents: 6225b4a
Author: jingyuntian 
Authored: Sat Apr 28 11:34:29 2018 +0800
Committer: zhangduo 
Committed: Fri May 4 15:27:33 2018 +0800

--
 .../org/apache/hadoop/hbase/util/HBaseFsck.java | 131 ++--
 .../TestHBaseFsckCleanReplicationBarriers.java  | 205 +++
 .../hadoop/hbase/util/hbck/HbckTestingUtil.java |  20 +-
 3 files changed, 336 insertions(+), 20 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/87f5b5f3/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
index 9fcf320..6d9ca9a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
@@ -58,6 +58,7 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
+import java.util.stream.Collectors;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang3.RandomStringUtils;
 import org.apache.commons.lang3.StringUtils;
@@ -85,6 +86,7 @@ import org.apache.hadoop.hbase.MetaTableAccessor;
 import org.apache.hadoop.hbase.RegionLocations;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.ZooKeeperConnectionException;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.ClusterConnection;
@@ -99,7 +101,9 @@ import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.client.RegionInfoBuilder;
 import org.apache.hadoop.hbase.client.RegionReplicaUtil;
 import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.RowMutations;
+import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
@@ -115,6 +119,10 @@ import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
 import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
 import org.apache.hadoop.hbase.replication.ReplicationException;
+import org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
+import org.apache.hadoop.hbase.replication.ReplicationQueueStorage;
+import org.apache.hadoop.hbase.replication.ReplicationStorageFactory;
+import org.apache.hadoop.hbase.replication.ReplicationUtils;
 import org.apache.hadoop.hbase.security.AccessDeniedException;
 import org.apache.hadoop.hbase.security.UserProvider;
 import org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
@@ -268,11 +276,13 @@ public class HBaseFsck extends Configured implements 
Closeable {
   private boolean fixHFileLinks = false; // fix lingering HFileLinks
   private boolean fixEmptyMetaCells = false; // fix (remove) empty 
REGIONINFO_QUALIFIER rows
   private boolean fixReplication = false; // fix undeleted replication queues 
for removed peer
+  private boolean cleanReplicationBarrier = false; // clean replication 
barriers of a table
   private boolean fixAny = false; // Set to true if any of the fix is required.
 
   // limit checking/fixes to listed tables, if empty attempt to check/fix all
   // hbase:meta are always checked
   private Set tablesIncluded = new HashSet<>();
+  private TableName cleanReplicationBarrierTable;
   private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping 
regions to merge
   // maximum number of overlapping regions to sideline
   private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;
@@ -786,6 +796,8 @@ public class HBaseFsck extends Configured implements 
Closeable {
 
 checkAndFixReplication();
 
+cleanReplicationBarrier();
+
 // Remove the hbck znode
 cleanupHbckZnode();
 
@@ -4118,14 +4130,13 @@ public class HBaseFsck extends Configured implements 

hbase git commit: HBASE-20481 Replicate entries from same region serially in ReplicationEndpoint for serial replication

2018-05-04 Thread openinx
Repository: hbase
Updated Branches:
  refs/heads/master 9b9f85147 -> 6225b4a49


HBASE-20481 Replicate entries from same region serially in ReplicationEndpoint 
for serial replication


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6225b4a4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6225b4a4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6225b4a4

Branch: refs/heads/master
Commit: 6225b4a492c40a03475b666915b96984b25b3c47
Parents: 9b9f851
Author: huzheng 
Authored: Wed May 2 10:44:42 2018 +0800
Committer: huzheng 
Committed: Fri May 4 15:22:02 2018 +0800

--
 .../hbase/replication/ReplicationEndpoint.java  |   2 +-
 .../HBaseInterClusterReplicationEndpoint.java   | 281 +-
 .../TestReplicationAdminWithClusters.java   |   1 -
 .../replication/TestReplicationEndpoint.java|  36 +--
 .../regionserver/TestReplicator.java| 288 +++
 .../TestSerialReplicationEndpoint.java  | 188 
 6 files changed, 384 insertions(+), 412 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6225b4a4/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ReplicationEndpoint.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ReplicationEndpoint.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ReplicationEndpoint.java
index 543dc2f..f4c37b1 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ReplicationEndpoint.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ReplicationEndpoint.java
@@ -117,7 +117,7 @@ public interface ReplicationEndpoint extends 
ReplicationPeerConfigListener {
   /**
* Initialize the replication endpoint with the given context.
* @param context replication context
-   * @throws IOException
+   * @throws IOException error occur when initialize the endpoint.
*/
   void init(Context context) throws IOException;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/6225b4a4/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java
index fd3c671..7db53aa 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java
@@ -24,9 +24,9 @@ import java.net.SocketTimeoutException;
 import java.net.UnknownHostException;
 import java.util.ArrayList;
 import java.util.Collections;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.TreeMap;
 import java.util.concurrent.Callable;
 import java.util.concurrent.CompletionService;
 import java.util.concurrent.ExecutionException;
@@ -37,6 +37,9 @@ import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
@@ -108,6 +111,7 @@ public class HBaseInterClusterReplicationEndpoint extends 
HBaseReplicationEndpoi
   private boolean replicationBulkLoadDataEnabled;
   private Abortable abortable;
   private boolean dropOnDeletedTables;
+  private boolean isSerial = false;
 
   @Override
   public void init(Context context) throws IOException {
@@ -160,6 +164,7 @@ public class HBaseInterClusterReplicationEndpoint extends 
HBaseReplicationEndpoi
 Path baseNSDir = new Path(HConstants.BASE_NAMESPACE_DIR);
 baseNamespaceDir = new Path(rootDir, baseNSDir);
 hfileArchiveDir = new Path(rootDir, new 
Path(HConstants.HFILE_ARCHIVE_DIRECTORY, baseNSDir));
+isSerial = context.getPeerConfig().isSerial();
   }
 
   private void decorateConf() {
@@ -203,40 +208,60 @@ public class HBaseInterClusterReplicationEndpoint extends 
HBaseReplicationEndpoi
 return sleepMultiplier < maxRetriesMultiplier;
   }
 
-  private List createBatches(final List entries) {
+  private int getEstimatedEntrySize(Entry e) {
+long size = e.getKey().estimatedSerializedSizeOf() + 
e.getEdit().estimatedSerializedSizeOf();
+return (int) size;
+  }
+
+  private List 

hbase git commit: HBASE-20524 Need to clear metrics when ReplicationSourceManager refresh replication sources

2018-05-04 Thread zghao
Repository: hbase
Updated Branches:
  refs/heads/branch-2 e92580e20 -> eaafdff76


HBASE-20524 Need to clear metrics when ReplicationSourceManager refresh 
replication sources


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/eaafdff7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/eaafdff7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/eaafdff7

Branch: refs/heads/branch-2
Commit: eaafdff7619d4586fc43ffbc4ecc0bcdd872
Parents: e92580e
Author: Guanghao Zhang 
Authored: Thu May 3 16:45:06 2018 +0800
Committer: Guanghao Zhang 
Committed: Fri May 4 14:34:07 2018 +0800

--
 .../replication/regionserver/RecoveredReplicationSource.java | 1 +
 .../hadoop/hbase/replication/regionserver/ReplicationSource.java | 1 +
 .../hbase/replication/regionserver/ReplicationSourceManager.java | 2 --
 .../apache/hadoop/hbase/replication/ReplicationSourceDummy.java  | 4 ++--
 4 files changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/eaafdff7/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
index f1ad99d..a21ca44 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
@@ -149,6 +149,7 @@ public class RecoveredReplicationSource extends 
ReplicationSource {
   Threads.sleep(100);// wait a short while for other worker thread to 
fully exit
   boolean allTasksDone = workerThreads.values().stream().allMatch(w -> 
w.isFinished());
   if (allTasksDone) {
+this.getSourceMetrics().clear();
 manager.removeRecoveredSource(this);
 LOG.info("Finished recovering queue {} with the following stats: {}", 
queueId, getStats());
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/eaafdff7/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
index 236c575..b05a673 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
@@ -519,6 +519,7 @@ public class ReplicationSource implements 
ReplicationSourceInterface {
 }
   }
 }
+this.metrics.clear();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/eaafdff7/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
index 70cd986..9b4a22c 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
@@ -418,7 +418,6 @@ public class ReplicationSourceManager implements 
ReplicationListener {
*/
   void removeRecoveredSource(ReplicationSourceInterface src) {
 LOG.info("Done with the recovered queue " + src.getQueueId());
-src.getSourceMetrics().clear();
 this.oldsources.remove(src);
 // Delete queue from storage and memory
 deleteQueue(src.getQueueId());
@@ -431,7 +430,6 @@ public class ReplicationSourceManager implements 
ReplicationListener {
*/
   void removeSource(ReplicationSourceInterface src) {
 LOG.info("Done with the queue " + src.getQueueId());
-src.getSourceMetrics().clear();
 this.sources.remove(src.getPeerId());
 // Delete queue from storage and memory
 deleteQueue(src.getQueueId());

http://git-wip-us.apache.org/repos/asf/hbase/blob/eaafdff7/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/ReplicationSourceDummy.java
--
diff --git 

hbase git commit: HBASE-20524 Need to clear metrics when ReplicationSourceManager refresh replication sources

2018-05-04 Thread zghao
Repository: hbase
Updated Branches:
  refs/heads/master 09ca61938 -> 9b9f85147


HBASE-20524 Need to clear metrics when ReplicationSourceManager refresh 
replication sources


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9b9f8514
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9b9f8514
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9b9f8514

Branch: refs/heads/master
Commit: 9b9f851470b74ea95766d2828c37e75675392a1b
Parents: 09ca619
Author: Guanghao Zhang 
Authored: Thu May 3 16:45:06 2018 +0800
Committer: Guanghao Zhang 
Committed: Fri May 4 14:31:41 2018 +0800

--
 .../replication/regionserver/RecoveredReplicationSource.java | 1 +
 .../hadoop/hbase/replication/regionserver/ReplicationSource.java | 1 +
 .../hbase/replication/regionserver/ReplicationSourceManager.java | 2 --
 .../apache/hadoop/hbase/replication/ReplicationSourceDummy.java  | 4 ++--
 4 files changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9b9f8514/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
index f1ad99d..a21ca44 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
@@ -149,6 +149,7 @@ public class RecoveredReplicationSource extends 
ReplicationSource {
   Threads.sleep(100);// wait a short while for other worker thread to 
fully exit
   boolean allTasksDone = workerThreads.values().stream().allMatch(w -> 
w.isFinished());
   if (allTasksDone) {
+this.getSourceMetrics().clear();
 manager.removeRecoveredSource(this);
 LOG.info("Finished recovering queue {} with the following stats: {}", 
queueId, getStats());
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/9b9f8514/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
index 236c575..b05a673 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
@@ -519,6 +519,7 @@ public class ReplicationSource implements 
ReplicationSourceInterface {
 }
   }
 }
+this.metrics.clear();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/9b9f8514/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
index 70cd986..9b4a22c 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
@@ -418,7 +418,6 @@ public class ReplicationSourceManager implements 
ReplicationListener {
*/
   void removeRecoveredSource(ReplicationSourceInterface src) {
 LOG.info("Done with the recovered queue " + src.getQueueId());
-src.getSourceMetrics().clear();
 this.oldsources.remove(src);
 // Delete queue from storage and memory
 deleteQueue(src.getQueueId());
@@ -431,7 +430,6 @@ public class ReplicationSourceManager implements 
ReplicationListener {
*/
   void removeSource(ReplicationSourceInterface src) {
 LOG.info("Done with the queue " + src.getQueueId());
-src.getSourceMetrics().clear();
 this.sources.remove(src.getPeerId());
 // Delete queue from storage and memory
 deleteQueue(src.getQueueId());

http://git-wip-us.apache.org/repos/asf/hbase/blob/9b9f8514/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/ReplicationSourceDummy.java
--
diff --git