hbase git commit: HBASE-20941 Created and implemented HbckService in master

2018-09-12 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2.1 2479282fb -> 589c1e407


HBASE-20941 Created and implemented HbckService in master

Added API setTableStateInMeta() to update table state only in Meta. This will 
be used by hbck2 tool.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/589c1e40
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/589c1e40
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/589c1e40

Branch: refs/heads/branch-2.1
Commit: 589c1e40785aab5ea7b0a18e5261970b408a09d5
Parents: 2479282
Author: Umesh Agashe 
Authored: Tue Aug 7 11:46:42 2018 -0700
Committer: Michael Stack 
Committed: Wed Sep 12 21:31:13 2018 -0700

--
 .../hadoop/hbase/client/ClusterConnection.java  |  32 ++
 .../hbase/client/ConnectionImplementation.java  |  22 
 .../apache/hadoop/hbase/client/HBaseHbck.java   |  95 +
 .../org/apache/hadoop/hbase/client/Hbck.java|  50 +
 .../hbase/shaded/protobuf/RequestConverter.java |  12 +++
 .../hadoop/hbase/HBaseInterfaceAudience.java|   5 +
 .../src/main/protobuf/Master.proto  |  11 ++
 .../hadoop/hbase/master/MasterRpcServices.java  |  31 +-
 .../hadoop/hbase/HBaseTestingUtility.java   |   9 +-
 .../apache/hadoop/hbase/client/TestHbck.java| 104 +++
 .../hadoop/hbase/master/TestMasterMetrics.java  |   1 +
 11 files changed, 370 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/589c1e40/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java
index adf47ca..d3e675c 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java
@@ -326,4 +326,36 @@ public interface ClusterConnection extends Connection {
* @throws IOException if a remote or network exception occurs
*/
   int getCurrentNrHRS() throws IOException;
+
+  /**
+   * Retrieve an Hbck implementation to fix an HBase cluster.
+   * The returned Hbck is not guaranteed to be thread-safe. A new instance 
should be created by
+   * each thread. This is a lightweight operation. Pooling or caching of the 
returned Hbck instance
+   * is not recommended.
+   * 
+   * The caller is responsible for calling {@link Hbck#close()} on the 
returned Hbck instance.
+   *
+   * This will be used mostly by hbck tool.
+   *
+   * @return an Hbck instance for active master. Active master is fetched from 
the zookeeper.
+   */
+  Hbck getHbck() throws IOException;
+
+  /**
+   * Retrieve an Hbck implementation to fix an HBase cluster.
+   * The returned Hbck is not guaranteed to be thread-safe. A new instance 
should be created by
+   * each thread. This is a lightweight operation. Pooling or caching of the 
returned Hbck instance
+   * is not recommended.
+   * 
+   * The caller is responsible for calling {@link Hbck#close()} on the 
returned Hbck instance.
+   *
+   * This will be used mostly by hbck tool. This may only be used to by pass 
getting
+   * registered master from ZK. In situations where ZK is not available or 
active master is not
+   * registered with ZK and user can get master address by other means, master 
can be explicitly
+   * specified.
+   *
+   * @param masterServer explicit {@link ServerName} for master server
+   * @return an Hbck instance for a specified master server
+   */
+  Hbck getHbck(ServerName masterServer) throws IOException;
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/589c1e40/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
index 1176cbd..21d796c 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
@@ -408,6 +408,28 @@ class ConnectionImplementation implements 
ClusterConnection, Closeable {
   }
 
   @Override
+  public Hbck getHbck() throws IOException {
+return getHbck(get(registry.getMasterAddress()));
+  }
+
+  @Override
+  public Hbck getHbck(ServerName masterServer) throws IOException {
+checkClosed();
+if (isDeadServer(masterServer)) {
+  throw new RegionServerStoppedException(masterServer + " is dead.");

[2/8] hbase git commit: HBASE-21189 flaky job should gather machine stats

2018-09-12 Thread busbey
HBASE-21189 flaky job should gather machine stats

Signed-off-by: Michael Stack 
(cherry picked from commit 5d14c1af65c02f4e87059337c35e4431505de91c)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/715b95ca
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/715b95ca
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/715b95ca

Branch: refs/heads/branch-2
Commit: 715b95ca5bff7d9d43e430044456d0cf52067c51
Parents: 9d13196
Author: Sean Busbey 
Authored: Wed Sep 12 09:20:41 2018 -0500
Committer: Sean Busbey 
Committed: Wed Sep 12 23:09:08 2018 -0500

--
 dev-support/flaky-tests/run-flaky-tests.Jenkinsfile | 16 
 1 file changed, 12 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/715b95ca/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
--
diff --git a/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile 
b/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
index cbb75c1..341d45c 100644
--- a/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
+++ b/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
@@ -49,11 +49,19 @@ pipeline {
 mvn_args=("${mvn_args[@]}" -X)
 set -x
   fi
-  ulimit -a
-  rm -rf local-repository/org/apache/hbase
   curl "${curl_args[@]}" -o includes.txt 
"${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/includes"
   if [ -s includes.txt ]; then
-mvn clean package "${mvn_args[@]}" -Dtest="$(cat includes.txt)" 
-Dmaven.test.redirectTestOutputToFile=true -Dsurefire.firstPartForkCount=3 
-Dsurefire.secondPartForkCount=3
+rm -rf local-repository/org/apache/hbase
+mvn clean "${mvn_args[@]}"
+rm -rf "target/machine" && mkdir -p "target/machine"
+if [ -x dev-support/gather_machine_environment.sh ]; then
+  "./dev-support/gather_machine_environment.sh" "target/machine"
+  echo "got the following saved stats in 'target/machine'"
+  ls -lh "target/machine"
+else
+  echo "Skipped gathering machine environment because we couldn't 
read the script to do so."
+fi
+mvn package "${mvn_args[@]}" -Dtest="$(cat includes.txt)" 
-Dmaven.test.redirectTestOutputToFile=true -Dsurefire.firstPartForkCount=3 
-Dsurefire.secondPartForkCount=3
   else
 echo "set of flaky tests is currently empty."
   fi
@@ -65,7 +73,7 @@ pipeline {
 always {
   junit testResults: "**/surefire-reports/*.xml", allowEmptyResults: true
   // TODO compress these logs
-  archive 'includes.txt,**/surefire-reports/*,**/test-data/*'
+  archive 
'includes.txt,**/surefire-reports/*,**/test-data/*,target/machine/*'
 }
   }
 }



[7/8] hbase git commit: HBASE-21189 flaky job should gather machine stats

2018-09-12 Thread busbey
HBASE-21189 flaky job should gather machine stats

Signed-off-by: Michael Stack 
(cherry picked from commit 5d14c1af65c02f4e87059337c35e4431505de91c)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2dec0202
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2dec0202
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2dec0202

Branch: refs/heads/branch-1.3
Commit: 2dec02022429703f7c89e9a0e89542c1fbe7803d
Parents: 31c7863
Author: Sean Busbey 
Authored: Wed Sep 12 09:20:41 2018 -0500
Committer: Sean Busbey 
Committed: Wed Sep 12 23:09:32 2018 -0500

--
 dev-support/flaky-tests/run-flaky-tests.Jenkinsfile | 16 
 1 file changed, 12 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2dec0202/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
--
diff --git a/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile 
b/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
index cbb75c1..341d45c 100644
--- a/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
+++ b/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
@@ -49,11 +49,19 @@ pipeline {
 mvn_args=("${mvn_args[@]}" -X)
 set -x
   fi
-  ulimit -a
-  rm -rf local-repository/org/apache/hbase
   curl "${curl_args[@]}" -o includes.txt 
"${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/includes"
   if [ -s includes.txt ]; then
-mvn clean package "${mvn_args[@]}" -Dtest="$(cat includes.txt)" 
-Dmaven.test.redirectTestOutputToFile=true -Dsurefire.firstPartForkCount=3 
-Dsurefire.secondPartForkCount=3
+rm -rf local-repository/org/apache/hbase
+mvn clean "${mvn_args[@]}"
+rm -rf "target/machine" && mkdir -p "target/machine"
+if [ -x dev-support/gather_machine_environment.sh ]; then
+  "./dev-support/gather_machine_environment.sh" "target/machine"
+  echo "got the following saved stats in 'target/machine'"
+  ls -lh "target/machine"
+else
+  echo "Skipped gathering machine environment because we couldn't 
read the script to do so."
+fi
+mvn package "${mvn_args[@]}" -Dtest="$(cat includes.txt)" 
-Dmaven.test.redirectTestOutputToFile=true -Dsurefire.firstPartForkCount=3 
-Dsurefire.secondPartForkCount=3
   else
 echo "set of flaky tests is currently empty."
   fi
@@ -65,7 +73,7 @@ pipeline {
 always {
   junit testResults: "**/surefire-reports/*.xml", allowEmptyResults: true
   // TODO compress these logs
-  archive 'includes.txt,**/surefire-reports/*,**/test-data/*'
+  archive 
'includes.txt,**/surefire-reports/*,**/test-data/*,target/machine/*'
 }
   }
 }



[4/8] hbase git commit: HBASE-21189 flaky job should gather machine stats

2018-09-12 Thread busbey
HBASE-21189 flaky job should gather machine stats

Signed-off-by: Michael Stack 
(cherry picked from commit 5d14c1af65c02f4e87059337c35e4431505de91c)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d6ab9150
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d6ab9150
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d6ab9150

Branch: refs/heads/branch-2.0
Commit: d6ab91506e20587e19eefca1b7160c3f4472fc3f
Parents: 739d725
Author: Sean Busbey 
Authored: Wed Sep 12 09:20:41 2018 -0500
Committer: Sean Busbey 
Committed: Wed Sep 12 23:09:17 2018 -0500

--
 dev-support/flaky-tests/run-flaky-tests.Jenkinsfile | 16 
 1 file changed, 12 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d6ab9150/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
--
diff --git a/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile 
b/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
index cbb75c1..341d45c 100644
--- a/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
+++ b/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
@@ -49,11 +49,19 @@ pipeline {
 mvn_args=("${mvn_args[@]}" -X)
 set -x
   fi
-  ulimit -a
-  rm -rf local-repository/org/apache/hbase
   curl "${curl_args[@]}" -o includes.txt 
"${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/includes"
   if [ -s includes.txt ]; then
-mvn clean package "${mvn_args[@]}" -Dtest="$(cat includes.txt)" 
-Dmaven.test.redirectTestOutputToFile=true -Dsurefire.firstPartForkCount=3 
-Dsurefire.secondPartForkCount=3
+rm -rf local-repository/org/apache/hbase
+mvn clean "${mvn_args[@]}"
+rm -rf "target/machine" && mkdir -p "target/machine"
+if [ -x dev-support/gather_machine_environment.sh ]; then
+  "./dev-support/gather_machine_environment.sh" "target/machine"
+  echo "got the following saved stats in 'target/machine'"
+  ls -lh "target/machine"
+else
+  echo "Skipped gathering machine environment because we couldn't 
read the script to do so."
+fi
+mvn package "${mvn_args[@]}" -Dtest="$(cat includes.txt)" 
-Dmaven.test.redirectTestOutputToFile=true -Dsurefire.firstPartForkCount=3 
-Dsurefire.secondPartForkCount=3
   else
 echo "set of flaky tests is currently empty."
   fi
@@ -65,7 +73,7 @@ pipeline {
 always {
   junit testResults: "**/surefire-reports/*.xml", allowEmptyResults: true
   // TODO compress these logs
-  archive 'includes.txt,**/surefire-reports/*,**/test-data/*'
+  archive 
'includes.txt,**/surefire-reports/*,**/test-data/*,target/machine/*'
 }
   }
 }



[8/8] hbase git commit: HBASE-21189 flaky job should gather machine stats

2018-09-12 Thread busbey
HBASE-21189 flaky job should gather machine stats

Signed-off-by: Michael Stack 
(cherry picked from commit 5d14c1af65c02f4e87059337c35e4431505de91c)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6679643d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6679643d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6679643d

Branch: refs/heads/branch-1.2
Commit: 6679643d6ea5ab1765693dd8fa38cc10f082a63a
Parents: 2c8060e
Author: Sean Busbey 
Authored: Wed Sep 12 09:20:41 2018 -0500
Committer: Sean Busbey 
Committed: Wed Sep 12 23:09:37 2018 -0500

--
 dev-support/flaky-tests/run-flaky-tests.Jenkinsfile | 16 
 1 file changed, 12 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6679643d/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
--
diff --git a/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile 
b/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
index cbb75c1..341d45c 100644
--- a/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
+++ b/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
@@ -49,11 +49,19 @@ pipeline {
 mvn_args=("${mvn_args[@]}" -X)
 set -x
   fi
-  ulimit -a
-  rm -rf local-repository/org/apache/hbase
   curl "${curl_args[@]}" -o includes.txt 
"${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/includes"
   if [ -s includes.txt ]; then
-mvn clean package "${mvn_args[@]}" -Dtest="$(cat includes.txt)" 
-Dmaven.test.redirectTestOutputToFile=true -Dsurefire.firstPartForkCount=3 
-Dsurefire.secondPartForkCount=3
+rm -rf local-repository/org/apache/hbase
+mvn clean "${mvn_args[@]}"
+rm -rf "target/machine" && mkdir -p "target/machine"
+if [ -x dev-support/gather_machine_environment.sh ]; then
+  "./dev-support/gather_machine_environment.sh" "target/machine"
+  echo "got the following saved stats in 'target/machine'"
+  ls -lh "target/machine"
+else
+  echo "Skipped gathering machine environment because we couldn't 
read the script to do so."
+fi
+mvn package "${mvn_args[@]}" -Dtest="$(cat includes.txt)" 
-Dmaven.test.redirectTestOutputToFile=true -Dsurefire.firstPartForkCount=3 
-Dsurefire.secondPartForkCount=3
   else
 echo "set of flaky tests is currently empty."
   fi
@@ -65,7 +73,7 @@ pipeline {
 always {
   junit testResults: "**/surefire-reports/*.xml", allowEmptyResults: true
   // TODO compress these logs
-  archive 'includes.txt,**/surefire-reports/*,**/test-data/*'
+  archive 
'includes.txt,**/surefire-reports/*,**/test-data/*,target/machine/*'
 }
   }
 }



[1/8] hbase git commit: HBASE-21189 flaky job should gather machine stats

2018-09-12 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-1 02089f20b -> 7f5de94ca
  refs/heads/branch-1.2 2c8060e1e -> 6679643d6
  refs/heads/branch-1.3 31c786338 -> 2dec02022
  refs/heads/branch-1.4 1d2f6f80d -> cdb79a543
  refs/heads/branch-2 9d1319648 -> 715b95ca5
  refs/heads/branch-2.0 739d7256e -> d6ab91506
  refs/heads/branch-2.1 487f713c6 -> 2479282fb
  refs/heads/master dc1dedb07 -> 5d14c1af6


HBASE-21189 flaky job should gather machine stats

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5d14c1af
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5d14c1af
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5d14c1af

Branch: refs/heads/master
Commit: 5d14c1af65c02f4e87059337c35e4431505de91c
Parents: dc1dedb
Author: Sean Busbey 
Authored: Wed Sep 12 09:20:41 2018 -0500
Committer: Sean Busbey 
Committed: Wed Sep 12 23:08:31 2018 -0500

--
 dev-support/flaky-tests/run-flaky-tests.Jenkinsfile | 16 
 1 file changed, 12 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5d14c1af/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
--
diff --git a/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile 
b/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
index cbb75c1..341d45c 100644
--- a/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
+++ b/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
@@ -49,11 +49,19 @@ pipeline {
 mvn_args=("${mvn_args[@]}" -X)
 set -x
   fi
-  ulimit -a
-  rm -rf local-repository/org/apache/hbase
   curl "${curl_args[@]}" -o includes.txt 
"${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/includes"
   if [ -s includes.txt ]; then
-mvn clean package "${mvn_args[@]}" -Dtest="$(cat includes.txt)" 
-Dmaven.test.redirectTestOutputToFile=true -Dsurefire.firstPartForkCount=3 
-Dsurefire.secondPartForkCount=3
+rm -rf local-repository/org/apache/hbase
+mvn clean "${mvn_args[@]}"
+rm -rf "target/machine" && mkdir -p "target/machine"
+if [ -x dev-support/gather_machine_environment.sh ]; then
+  "./dev-support/gather_machine_environment.sh" "target/machine"
+  echo "got the following saved stats in 'target/machine'"
+  ls -lh "target/machine"
+else
+  echo "Skipped gathering machine environment because we couldn't 
read the script to do so."
+fi
+mvn package "${mvn_args[@]}" -Dtest="$(cat includes.txt)" 
-Dmaven.test.redirectTestOutputToFile=true -Dsurefire.firstPartForkCount=3 
-Dsurefire.secondPartForkCount=3
   else
 echo "set of flaky tests is currently empty."
   fi
@@ -65,7 +73,7 @@ pipeline {
 always {
   junit testResults: "**/surefire-reports/*.xml", allowEmptyResults: true
   // TODO compress these logs
-  archive 'includes.txt,**/surefire-reports/*,**/test-data/*'
+  archive 
'includes.txt,**/surefire-reports/*,**/test-data/*,target/machine/*'
 }
   }
 }



[3/8] hbase git commit: HBASE-21189 flaky job should gather machine stats

2018-09-12 Thread busbey
HBASE-21189 flaky job should gather machine stats

Signed-off-by: Michael Stack 
(cherry picked from commit 5d14c1af65c02f4e87059337c35e4431505de91c)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2479282f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2479282f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2479282f

Branch: refs/heads/branch-2.1
Commit: 2479282fb271c6f7ebf5cde4b22f41cafec57e91
Parents: 487f713
Author: Sean Busbey 
Authored: Wed Sep 12 09:20:41 2018 -0500
Committer: Sean Busbey 
Committed: Wed Sep 12 23:09:13 2018 -0500

--
 dev-support/flaky-tests/run-flaky-tests.Jenkinsfile | 16 
 1 file changed, 12 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2479282f/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
--
diff --git a/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile 
b/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
index cbb75c1..341d45c 100644
--- a/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
+++ b/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
@@ -49,11 +49,19 @@ pipeline {
 mvn_args=("${mvn_args[@]}" -X)
 set -x
   fi
-  ulimit -a
-  rm -rf local-repository/org/apache/hbase
   curl "${curl_args[@]}" -o includes.txt 
"${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/includes"
   if [ -s includes.txt ]; then
-mvn clean package "${mvn_args[@]}" -Dtest="$(cat includes.txt)" 
-Dmaven.test.redirectTestOutputToFile=true -Dsurefire.firstPartForkCount=3 
-Dsurefire.secondPartForkCount=3
+rm -rf local-repository/org/apache/hbase
+mvn clean "${mvn_args[@]}"
+rm -rf "target/machine" && mkdir -p "target/machine"
+if [ -x dev-support/gather_machine_environment.sh ]; then
+  "./dev-support/gather_machine_environment.sh" "target/machine"
+  echo "got the following saved stats in 'target/machine'"
+  ls -lh "target/machine"
+else
+  echo "Skipped gathering machine environment because we couldn't 
read the script to do so."
+fi
+mvn package "${mvn_args[@]}" -Dtest="$(cat includes.txt)" 
-Dmaven.test.redirectTestOutputToFile=true -Dsurefire.firstPartForkCount=3 
-Dsurefire.secondPartForkCount=3
   else
 echo "set of flaky tests is currently empty."
   fi
@@ -65,7 +73,7 @@ pipeline {
 always {
   junit testResults: "**/surefire-reports/*.xml", allowEmptyResults: true
   // TODO compress these logs
-  archive 'includes.txt,**/surefire-reports/*,**/test-data/*'
+  archive 
'includes.txt,**/surefire-reports/*,**/test-data/*,target/machine/*'
 }
   }
 }



[6/8] hbase git commit: HBASE-21189 flaky job should gather machine stats

2018-09-12 Thread busbey
HBASE-21189 flaky job should gather machine stats

Signed-off-by: Michael Stack 
(cherry picked from commit 5d14c1af65c02f4e87059337c35e4431505de91c)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cdb79a54
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cdb79a54
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cdb79a54

Branch: refs/heads/branch-1.4
Commit: cdb79a5430d604778a1a7cbe800c7e579e1190f9
Parents: 1d2f6f8
Author: Sean Busbey 
Authored: Wed Sep 12 09:20:41 2018 -0500
Committer: Sean Busbey 
Committed: Wed Sep 12 23:09:27 2018 -0500

--
 dev-support/flaky-tests/run-flaky-tests.Jenkinsfile | 16 
 1 file changed, 12 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/cdb79a54/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
--
diff --git a/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile 
b/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
index cbb75c1..341d45c 100644
--- a/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
+++ b/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
@@ -49,11 +49,19 @@ pipeline {
 mvn_args=("${mvn_args[@]}" -X)
 set -x
   fi
-  ulimit -a
-  rm -rf local-repository/org/apache/hbase
   curl "${curl_args[@]}" -o includes.txt 
"${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/includes"
   if [ -s includes.txt ]; then
-mvn clean package "${mvn_args[@]}" -Dtest="$(cat includes.txt)" 
-Dmaven.test.redirectTestOutputToFile=true -Dsurefire.firstPartForkCount=3 
-Dsurefire.secondPartForkCount=3
+rm -rf local-repository/org/apache/hbase
+mvn clean "${mvn_args[@]}"
+rm -rf "target/machine" && mkdir -p "target/machine"
+if [ -x dev-support/gather_machine_environment.sh ]; then
+  "./dev-support/gather_machine_environment.sh" "target/machine"
+  echo "got the following saved stats in 'target/machine'"
+  ls -lh "target/machine"
+else
+  echo "Skipped gathering machine environment because we couldn't 
read the script to do so."
+fi
+mvn package "${mvn_args[@]}" -Dtest="$(cat includes.txt)" 
-Dmaven.test.redirectTestOutputToFile=true -Dsurefire.firstPartForkCount=3 
-Dsurefire.secondPartForkCount=3
   else
 echo "set of flaky tests is currently empty."
   fi
@@ -65,7 +73,7 @@ pipeline {
 always {
   junit testResults: "**/surefire-reports/*.xml", allowEmptyResults: true
   // TODO compress these logs
-  archive 'includes.txt,**/surefire-reports/*,**/test-data/*'
+  archive 
'includes.txt,**/surefire-reports/*,**/test-data/*,target/machine/*'
 }
   }
 }



[5/8] hbase git commit: HBASE-21189 flaky job should gather machine stats

2018-09-12 Thread busbey
HBASE-21189 flaky job should gather machine stats

Signed-off-by: Michael Stack 
(cherry picked from commit 5d14c1af65c02f4e87059337c35e4431505de91c)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7f5de94c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7f5de94c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7f5de94c

Branch: refs/heads/branch-1
Commit: 7f5de94ca9d486d9f4157a1cadce26b74319361e
Parents: 02089f2
Author: Sean Busbey 
Authored: Wed Sep 12 09:20:41 2018 -0500
Committer: Sean Busbey 
Committed: Wed Sep 12 23:09:23 2018 -0500

--
 dev-support/flaky-tests/run-flaky-tests.Jenkinsfile | 16 
 1 file changed, 12 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7f5de94c/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
--
diff --git a/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile 
b/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
index cbb75c1..341d45c 100644
--- a/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
+++ b/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
@@ -49,11 +49,19 @@ pipeline {
 mvn_args=("${mvn_args[@]}" -X)
 set -x
   fi
-  ulimit -a
-  rm -rf local-repository/org/apache/hbase
   curl "${curl_args[@]}" -o includes.txt 
"${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/includes"
   if [ -s includes.txt ]; then
-mvn clean package "${mvn_args[@]}" -Dtest="$(cat includes.txt)" 
-Dmaven.test.redirectTestOutputToFile=true -Dsurefire.firstPartForkCount=3 
-Dsurefire.secondPartForkCount=3
+rm -rf local-repository/org/apache/hbase
+mvn clean "${mvn_args[@]}"
+rm -rf "target/machine" && mkdir -p "target/machine"
+if [ -x dev-support/gather_machine_environment.sh ]; then
+  "./dev-support/gather_machine_environment.sh" "target/machine"
+  echo "got the following saved stats in 'target/machine'"
+  ls -lh "target/machine"
+else
+  echo "Skipped gathering machine environment because we couldn't 
read the script to do so."
+fi
+mvn package "${mvn_args[@]}" -Dtest="$(cat includes.txt)" 
-Dmaven.test.redirectTestOutputToFile=true -Dsurefire.firstPartForkCount=3 
-Dsurefire.secondPartForkCount=3
   else
 echo "set of flaky tests is currently empty."
   fi
@@ -65,7 +73,7 @@ pipeline {
 always {
   junit testResults: "**/surefire-reports/*.xml", allowEmptyResults: true
   // TODO compress these logs
-  archive 'includes.txt,**/surefire-reports/*,**/test-data/*'
+  archive 
'includes.txt,**/surefire-reports/*,**/test-data/*,target/machine/*'
 }
   }
 }



hbase git commit: HBASE-21188 Print heap and gc informations in our junit ResourceChecker

2018-09-12 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master 76199a0a2 -> dc1dedb07


HBASE-21188 Print heap and gc informations in our junit ResourceChecker


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/dc1dedb0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/dc1dedb0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/dc1dedb0

Branch: refs/heads/master
Commit: dc1dedb073f390a5cb98bcd1b57358900d69867c
Parents: 76199a0
Author: zhangduo 
Authored: Wed Sep 12 22:16:27 2018 +0800
Committer: zhangduo 
Committed: Thu Sep 13 09:58:34 2018 +0800

--
 .../hbase/ResourceCheckerJUnitListener.java | 42 
 1 file changed, 42 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/dc1dedb0/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceCheckerJUnitListener.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceCheckerJUnitListener.java
 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceCheckerJUnitListener.java
index 225d94f..d8df137 100644
--- 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceCheckerJUnitListener.java
+++ 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceCheckerJUnitListener.java
@@ -18,12 +18,15 @@
 
 package org.apache.hadoop.hbase;
 
+import java.lang.management.ManagementFactory;
+import java.lang.management.MemoryUsage;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.TimeUnit;
 import org.apache.hadoop.hbase.ResourceChecker.Phase;
 import org.apache.hadoop.hbase.util.JVM;
 import org.junit.runner.notification.RunListener;
@@ -139,6 +142,41 @@ public class ResourceCheckerJUnitListener extends 
RunListener {
 }
   }
 
+  static class MaxHeapMemoryMBResourceAnalyzer extends 
ResourceChecker.ResourceAnalyzer {
+
+@Override
+public int getVal(Phase phase) {
+  MemoryUsage usage = 
ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();
+  return (int) (usage.getMax() / (1024 * 1024));
+}
+  }
+
+  static class UsedHeapMemoryMBResourceAnalyzer extends 
ResourceChecker.ResourceAnalyzer {
+
+@Override
+public int getVal(Phase phase) {
+  MemoryUsage usage = 
ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();
+  return (int) (usage.getUsed() / (1024 * 1024));
+}
+  }
+
+  static class GCCountResourceAnalyzer extends 
ResourceChecker.ResourceAnalyzer {
+
+@Override
+public int getVal(Phase phase) {
+  return 
Math.toIntExact(ManagementFactory.getGarbageCollectorMXBeans().stream()
+.mapToLong(b -> b.getCollectionCount()).sum());
+}
+  }
+
+  static class GCTimeSecondResourceAnalyzer extends 
ResourceChecker.ResourceAnalyzer {
+
+@Override
+public int getVal(Phase phase) {
+  return Math.toIntExact(TimeUnit.MILLISECONDS.toSeconds(ManagementFactory
+.getGarbageCollectorMXBeans().stream().mapToLong(b -> 
b.getCollectionTime()).sum()));
+}
+  }
 
   /**
* To be implemented by sub classes if they want to add specific 
ResourceAnalyzer.
@@ -155,6 +193,10 @@ public class ResourceCheckerJUnitListener extends 
RunListener {
 rc.addResourceAnalyzer(new SystemLoadAverageResourceAnalyzer());
 rc.addResourceAnalyzer(new ProcessCountResourceAnalyzer());
 rc.addResourceAnalyzer(new AvailableMemoryMBResourceAnalyzer());
+rc.addResourceAnalyzer(new MaxHeapMemoryMBResourceAnalyzer());
+rc.addResourceAnalyzer(new UsedHeapMemoryMBResourceAnalyzer());
+rc.addResourceAnalyzer(new GCCountResourceAnalyzer());
+rc.addResourceAnalyzer(new GCTimeSecondResourceAnalyzer());
 
 addResourceAnalyzer(rc);
 



[2/3] hbase git commit: HBASE-21190 Log files and count of entries in each as we load from the MasterProcWAL store

2018-09-12 Thread apurtell
HBASE-21190 Log files and count of entries in each as we load from the 
MasterProcWAL store

Conflicts:

hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1d2f6f80
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1d2f6f80
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1d2f6f80

Branch: refs/heads/branch-1.4
Commit: 1d2f6f80d67dc060549b4a8c4146f4c580190e29
Parents: 403a883
Author: Michael Stack 
Authored: Wed Sep 12 10:13:43 2018 -0700
Committer: Andrew Purtell 
Committed: Wed Sep 12 13:36:45 2018 -0700

--
 .../hbase/procedure2/store/wal/ProcedureWALFormatReader.java| 5 -
 1 file changed, 4 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1d2f6f80/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
index 281292d..b8dbc25 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
@@ -113,6 +113,7 @@ public class ProcedureWALFormatReader {
   }
 
   public void read(ProcedureWALFile log, ProcedureWALFormat.Loader loader) 
throws IOException {
+long count = 0;
 FSDataInputStream stream = log.getStream();
 try {
   boolean hasMore = true;
@@ -123,6 +124,7 @@ public class ProcedureWALFormatReader {
   hasMore = false;
   break;
 }
+count++;
 switch (entry.getType()) {
   case PROCEDURE_WAL_INIT:
 readInitEntry(entry);
@@ -144,8 +146,9 @@ public class ProcedureWALFormatReader {
 throw new CorruptedWALProcedureStoreException("Invalid entry: " + 
entry);
 }
   }
+  LOG.info("Read " + count + " entries in " + log);
 } catch (InvalidProtocolBufferException e) {
-  LOG.error("got an exception while reading the procedure WAL: " + log, e);
+  LOG.error("While reading entry #" + count + " in " + log, e);
   loader.markCorruptedWAL(log, e);
 }
 



[1/3] hbase git commit: HBASE-21190 Log files and count of entries in each as we load from the MasterProcWAL store

2018-09-12 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1 0a2ebdce1 -> 02089f20b
  refs/heads/branch-1.3 bfe373d18 -> 31c786338
  refs/heads/branch-1.4 403a88385 -> 1d2f6f80d


HBASE-21190 Log files and count of entries in each as we load from the 
MasterProcWAL store

Conflicts:

hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/02089f20
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/02089f20
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/02089f20

Branch: refs/heads/branch-1
Commit: 02089f20b49acfb3ab2f3acb4248855eaf9bfe8d
Parents: 0a2ebdc
Author: Michael Stack 
Authored: Wed Sep 12 10:13:43 2018 -0700
Committer: Andrew Purtell 
Committed: Wed Sep 12 13:34:01 2018 -0700

--
 .../hbase/procedure2/store/wal/ProcedureWALFormatReader.java| 5 -
 1 file changed, 4 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/02089f20/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
index 281292d..b8dbc25 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
@@ -113,6 +113,7 @@ public class ProcedureWALFormatReader {
   }
 
   public void read(ProcedureWALFile log, ProcedureWALFormat.Loader loader) 
throws IOException {
+long count = 0;
 FSDataInputStream stream = log.getStream();
 try {
   boolean hasMore = true;
@@ -123,6 +124,7 @@ public class ProcedureWALFormatReader {
   hasMore = false;
   break;
 }
+count++;
 switch (entry.getType()) {
   case PROCEDURE_WAL_INIT:
 readInitEntry(entry);
@@ -144,8 +146,9 @@ public class ProcedureWALFormatReader {
 throw new CorruptedWALProcedureStoreException("Invalid entry: " + 
entry);
 }
   }
+  LOG.info("Read " + count + " entries in " + log);
 } catch (InvalidProtocolBufferException e) {
-  LOG.error("got an exception while reading the procedure WAL: " + log, e);
+  LOG.error("While reading entry #" + count + " in " + log, e);
   loader.markCorruptedWAL(log, e);
 }
 



[3/3] hbase git commit: HBASE-21190 Log files and count of entries in each as we load from the MasterProcWAL store

2018-09-12 Thread apurtell
HBASE-21190 Log files and count of entries in each as we load from the 
MasterProcWAL store

Conflicts:

hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/31c78633
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/31c78633
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/31c78633

Branch: refs/heads/branch-1.3
Commit: 31c7863383c32add6eab965e1d3e0d0c22db3576
Parents: bfe373d
Author: Michael Stack 
Authored: Wed Sep 12 10:13:43 2018 -0700
Committer: Andrew Purtell 
Committed: Wed Sep 12 13:36:50 2018 -0700

--
 .../hbase/procedure2/store/wal/ProcedureWALFormatReader.java| 5 -
 1 file changed, 4 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/31c78633/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
index 312eedb..587f6b4 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
@@ -113,6 +113,7 @@ public class ProcedureWALFormatReader {
   }
 
   public void read(ProcedureWALFile log, ProcedureWALFormat.Loader loader) 
throws IOException {
+long count = 0;
 FSDataInputStream stream = log.getStream();
 try {
   boolean hasMore = true;
@@ -123,6 +124,7 @@ public class ProcedureWALFormatReader {
   hasMore = false;
   break;
 }
+count++;
 switch (entry.getType()) {
   case PROCEDURE_WAL_INIT:
 readInitEntry(entry);
@@ -144,8 +146,9 @@ public class ProcedureWALFormatReader {
 throw new CorruptedWALProcedureStoreException("Invalid entry: " + 
entry);
 }
   }
+  LOG.info("Read " + count + " entries in " + log);
 } catch (InvalidProtocolBufferException e) {
-  LOG.error("got an exception while reading the procedure WAL: " + log, e);
+  LOG.error("While reading entry #" + count + " in " + log, e);
   loader.markCorruptedWAL(log, e);
 }
 



[1/2] hbase git commit: HBASE-21168 BloomFilterUtil uses hardcoded randomness (Mike Drob) [Forced Update!]

2018-09-12 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 93bffa40d -> 2c8060e1e (forced update)
  refs/heads/branch-1.3 82f72b5be -> bfe373d18 (forced update)


HBASE-21168 BloomFilterUtil uses hardcoded randomness (Mike Drob)

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2c8060e1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2c8060e1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2c8060e1

Branch: refs/heads/branch-1.2
Commit: 2c8060e1e8be34b92f17645c80b8985354485281
Parents: d8790be
Author: Mingliang Liu 
Authored: Wed Sep 12 12:31:09 2018 -0700
Committer: Andrew Purtell 
Committed: Wed Sep 12 13:29:38 2018 -0700

--
 .../hadoop/hbase/util/ByteBloomFilter.java  | 54 +++-
 .../regionserver/TestCompoundBloomFilter.java   |  6 ++-
 2 files changed, 34 insertions(+), 26 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2c8060e1/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
index b8ec4c3..723571d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
@@ -26,6 +26,7 @@ import java.nio.ByteBuffer;
 import java.text.NumberFormat;
 import java.util.Random;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.KVComparator;
@@ -423,26 +424,26 @@ public class ByteBloomFilter implements BloomFilter, 
BloomFilterWriter {
   int hashCount) {
 
 int hash1 = hash.hash(buf, offset, length, 0);
-int hash2 = hash.hash(buf, offset, length, hash1);
 int bloomBitSize = bloomSize << 3;
-
+
+int hash2 = 0;
+int compositeHash = 0;
+
 if (randomGeneratorForTest == null) {
-  // Production mode.
-  int compositeHash = hash1;
-  for (int i = 0; i < hashCount; i++) {
-int hashLoc = Math.abs(compositeHash % bloomBitSize);
-compositeHash += hash2;
-if (!get(hashLoc, bloomBuf, bloomOffset)) {
-  return false;
-}
-  }
-} else {
-  // Test mode with "fake lookups" to estimate "ideal false positive rate".
-  for (int i = 0; i < hashCount; i++) {
-int hashLoc = randomGeneratorForTest.nextInt(bloomBitSize);
-if (!get(hashLoc, bloomBuf, bloomOffset)){
-  return false;
-}
+  // Production mode
+  compositeHash = hash1;
+  hash2 = hash.hash(buf, offset, length, hash1);
+}
+
+for (int i = 0; i < hashCount; i++) {
+  int hashLoc = (randomGeneratorForTest == null
+  // Production mode
+  ? Math.abs(compositeHash % bloomBitSize)
+  // Test mode with "fake look-ups" to estimate "ideal false positive 
rate"
+  : randomGeneratorForTest.nextInt(bloomBitSize));
+  compositeHash += hash2;
+  if (!get(hashLoc, bloomBuf, bloomOffset)) {
+return false;
   }
 }
 return true;
@@ -598,12 +599,17 @@ public class ByteBloomFilter implements BloomFilter, 
BloomFilterWriter {
 return bloom != null;
   }
 
-  public static void setFakeLookupMode(boolean enabled) {
-if (enabled) {
-  randomGeneratorForTest = new Random(283742987L);
-} else {
-  randomGeneratorForTest = null;
-}
+  /**
+   * Sets a random generator to be used for look-ups instead of computing 
hashes. Can be used to
+   * simulate uniformity of accesses better in a test environment. Should not 
be set in a real
+   * environment where correctness matters!
+   * 
+   * This gets used in {@link #contains(byte[], int, int, ByteBuffer, int, 
int, Hash, int)}
+   * @param random The random number source to use, or null to compute actual 
hashes
+   */
+  @VisibleForTesting
+  public static void setRandomGeneratorForTest(Random random) {
+randomGeneratorForTest = random;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/2c8060e1/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
index 0129fad..a0bf6c5 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
+

[2/2] hbase git commit: HBASE-21168 BloomFilterUtil uses hardcoded randomness (Mike Drob)

2018-09-12 Thread apurtell
HBASE-21168 BloomFilterUtil uses hardcoded randomness (Mike Drob)

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bfe373d1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bfe373d1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bfe373d1

Branch: refs/heads/branch-1.3
Commit: bfe373d1882c9b89aaa0fa092846d6a77d7b0602
Parents: b7dfb74
Author: Mingliang Liu 
Authored: Wed Sep 12 12:31:09 2018 -0700
Committer: Andrew Purtell 
Committed: Wed Sep 12 13:29:44 2018 -0700

--
 .../hadoop/hbase/util/ByteBloomFilter.java  | 54 +++-
 .../regionserver/TestCompoundBloomFilter.java   |  6 ++-
 2 files changed, 34 insertions(+), 26 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bfe373d1/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
index b8ec4c3..723571d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
@@ -26,6 +26,7 @@ import java.nio.ByteBuffer;
 import java.text.NumberFormat;
 import java.util.Random;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.KVComparator;
@@ -423,26 +424,26 @@ public class ByteBloomFilter implements BloomFilter, 
BloomFilterWriter {
   int hashCount) {
 
 int hash1 = hash.hash(buf, offset, length, 0);
-int hash2 = hash.hash(buf, offset, length, hash1);
 int bloomBitSize = bloomSize << 3;
-
+
+int hash2 = 0;
+int compositeHash = 0;
+
 if (randomGeneratorForTest == null) {
-  // Production mode.
-  int compositeHash = hash1;
-  for (int i = 0; i < hashCount; i++) {
-int hashLoc = Math.abs(compositeHash % bloomBitSize);
-compositeHash += hash2;
-if (!get(hashLoc, bloomBuf, bloomOffset)) {
-  return false;
-}
-  }
-} else {
-  // Test mode with "fake lookups" to estimate "ideal false positive rate".
-  for (int i = 0; i < hashCount; i++) {
-int hashLoc = randomGeneratorForTest.nextInt(bloomBitSize);
-if (!get(hashLoc, bloomBuf, bloomOffset)){
-  return false;
-}
+  // Production mode
+  compositeHash = hash1;
+  hash2 = hash.hash(buf, offset, length, hash1);
+}
+
+for (int i = 0; i < hashCount; i++) {
+  int hashLoc = (randomGeneratorForTest == null
+  // Production mode
+  ? Math.abs(compositeHash % bloomBitSize)
+  // Test mode with "fake look-ups" to estimate "ideal false positive 
rate"
+  : randomGeneratorForTest.nextInt(bloomBitSize));
+  compositeHash += hash2;
+  if (!get(hashLoc, bloomBuf, bloomOffset)) {
+return false;
   }
 }
 return true;
@@ -598,12 +599,17 @@ public class ByteBloomFilter implements BloomFilter, 
BloomFilterWriter {
 return bloom != null;
   }
 
-  public static void setFakeLookupMode(boolean enabled) {
-if (enabled) {
-  randomGeneratorForTest = new Random(283742987L);
-} else {
-  randomGeneratorForTest = null;
-}
+  /**
+   * Sets a random generator to be used for look-ups instead of computing 
hashes. Can be used to
+   * simulate uniformity of accesses better in a test environment. Should not 
be set in a real
+   * environment where correctness matters!
+   * 
+   * This gets used in {@link #contains(byte[], int, int, ByteBuffer, int, 
int, Hash, int)}
+   * @param random The random number source to use, or null to compute actual 
hashes
+   */
+  @VisibleForTesting
+  public static void setRandomGeneratorForTest(Random random) {
+randomGeneratorForTest = random;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/bfe373d1/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
index 0129fad..a0bf6c5 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
@@ -218,7 +218,9 @@ public class TestCompoundBloomFilter {
 /

[2/2] hbase git commit: HBASE-21168 BloomFilterUtil uses hardcoded randomness (Mike Drob)

2018-09-12 Thread apurtell
HBASE-21168 BloomFilterUtil uses hardcoded randomness (Mike Drob)

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0a2ebdce
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0a2ebdce
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0a2ebdce

Branch: refs/heads/branch-1
Commit: 0a2ebdce152fc077f9f219fc288fe50c52c92f9c
Parents: 4256d38
Author: Mingliang Liu 
Authored: Wed Sep 12 12:31:09 2018 -0700
Committer: Andrew Purtell 
Committed: Wed Sep 12 13:28:48 2018 -0700

--
 .../hadoop/hbase/util/ByteBloomFilter.java  | 54 +++-
 .../regionserver/TestCompoundBloomFilter.java   |  6 ++-
 2 files changed, 34 insertions(+), 26 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0a2ebdce/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
index b8ec4c3..723571d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
@@ -26,6 +26,7 @@ import java.nio.ByteBuffer;
 import java.text.NumberFormat;
 import java.util.Random;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.KVComparator;
@@ -423,26 +424,26 @@ public class ByteBloomFilter implements BloomFilter, 
BloomFilterWriter {
   int hashCount) {
 
 int hash1 = hash.hash(buf, offset, length, 0);
-int hash2 = hash.hash(buf, offset, length, hash1);
 int bloomBitSize = bloomSize << 3;
-
+
+int hash2 = 0;
+int compositeHash = 0;
+
 if (randomGeneratorForTest == null) {
-  // Production mode.
-  int compositeHash = hash1;
-  for (int i = 0; i < hashCount; i++) {
-int hashLoc = Math.abs(compositeHash % bloomBitSize);
-compositeHash += hash2;
-if (!get(hashLoc, bloomBuf, bloomOffset)) {
-  return false;
-}
-  }
-} else {
-  // Test mode with "fake lookups" to estimate "ideal false positive rate".
-  for (int i = 0; i < hashCount; i++) {
-int hashLoc = randomGeneratorForTest.nextInt(bloomBitSize);
-if (!get(hashLoc, bloomBuf, bloomOffset)){
-  return false;
-}
+  // Production mode
+  compositeHash = hash1;
+  hash2 = hash.hash(buf, offset, length, hash1);
+}
+
+for (int i = 0; i < hashCount; i++) {
+  int hashLoc = (randomGeneratorForTest == null
+  // Production mode
+  ? Math.abs(compositeHash % bloomBitSize)
+  // Test mode with "fake look-ups" to estimate "ideal false positive 
rate"
+  : randomGeneratorForTest.nextInt(bloomBitSize));
+  compositeHash += hash2;
+  if (!get(hashLoc, bloomBuf, bloomOffset)) {
+return false;
   }
 }
 return true;
@@ -598,12 +599,17 @@ public class ByteBloomFilter implements BloomFilter, 
BloomFilterWriter {
 return bloom != null;
   }
 
-  public static void setFakeLookupMode(boolean enabled) {
-if (enabled) {
-  randomGeneratorForTest = new Random(283742987L);
-} else {
-  randomGeneratorForTest = null;
-}
+  /**
+   * Sets a random generator to be used for look-ups instead of computing 
hashes. Can be used to
+   * simulate uniformity of accesses better in a test environment. Should not 
be set in a real
+   * environment where correctness matters!
+   * 
+   * This gets used in {@link #contains(byte[], int, int, ByteBuffer, int, 
int, Hash, int)}
+   * @param random The random number source to use, or null to compute actual 
hashes
+   */
+  @VisibleForTesting
+  public static void setRandomGeneratorForTest(Random random) {
+randomGeneratorForTest = random;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/0a2ebdce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
index fd4d35a..066ad7f 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
@@ -218,7 +218,9 @@ public class TestCompoundBloomFilter {
 // 

[1/2] hbase git commit: HBASE-21168 BloomFilterUtil uses hardcoded randomness (Mike Drob)

2018-09-12 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1 4256d38ba -> 0a2ebdce1
  refs/heads/branch-1.4 4b5d2def3 -> 403a88385


HBASE-21168 BloomFilterUtil uses hardcoded randomness (Mike Drob)

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/403a8838
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/403a8838
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/403a8838

Branch: refs/heads/branch-1.4
Commit: 403a88385d27e699500f919f5f88e71196683186
Parents: 4b5d2de
Author: Mingliang Liu 
Authored: Wed Sep 12 12:31:09 2018 -0700
Committer: Andrew Purtell 
Committed: Wed Sep 12 13:28:40 2018 -0700

--
 .../hadoop/hbase/util/ByteBloomFilter.java  | 54 +++-
 .../regionserver/TestCompoundBloomFilter.java   |  6 ++-
 2 files changed, 34 insertions(+), 26 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/403a8838/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
index b8ec4c3..723571d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
@@ -26,6 +26,7 @@ import java.nio.ByteBuffer;
 import java.text.NumberFormat;
 import java.util.Random;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.KVComparator;
@@ -423,26 +424,26 @@ public class ByteBloomFilter implements BloomFilter, 
BloomFilterWriter {
   int hashCount) {
 
 int hash1 = hash.hash(buf, offset, length, 0);
-int hash2 = hash.hash(buf, offset, length, hash1);
 int bloomBitSize = bloomSize << 3;
-
+
+int hash2 = 0;
+int compositeHash = 0;
+
 if (randomGeneratorForTest == null) {
-  // Production mode.
-  int compositeHash = hash1;
-  for (int i = 0; i < hashCount; i++) {
-int hashLoc = Math.abs(compositeHash % bloomBitSize);
-compositeHash += hash2;
-if (!get(hashLoc, bloomBuf, bloomOffset)) {
-  return false;
-}
-  }
-} else {
-  // Test mode with "fake lookups" to estimate "ideal false positive rate".
-  for (int i = 0; i < hashCount; i++) {
-int hashLoc = randomGeneratorForTest.nextInt(bloomBitSize);
-if (!get(hashLoc, bloomBuf, bloomOffset)){
-  return false;
-}
+  // Production mode
+  compositeHash = hash1;
+  hash2 = hash.hash(buf, offset, length, hash1);
+}
+
+for (int i = 0; i < hashCount; i++) {
+  int hashLoc = (randomGeneratorForTest == null
+  // Production mode
+  ? Math.abs(compositeHash % bloomBitSize)
+  // Test mode with "fake look-ups" to estimate "ideal false positive 
rate"
+  : randomGeneratorForTest.nextInt(bloomBitSize));
+  compositeHash += hash2;
+  if (!get(hashLoc, bloomBuf, bloomOffset)) {
+return false;
   }
 }
 return true;
@@ -598,12 +599,17 @@ public class ByteBloomFilter implements BloomFilter, 
BloomFilterWriter {
 return bloom != null;
   }
 
-  public static void setFakeLookupMode(boolean enabled) {
-if (enabled) {
-  randomGeneratorForTest = new Random(283742987L);
-} else {
-  randomGeneratorForTest = null;
-}
+  /**
+   * Sets a random generator to be used for look-ups instead of computing 
hashes. Can be used to
+   * simulate uniformity of accesses better in a test environment. Should not 
be set in a real
+   * environment where correctness matters!
+   * 
+   * This gets used in {@link #contains(byte[], int, int, ByteBuffer, int, 
int, Hash, int)}
+   * @param random The random number source to use, or null to compute actual 
hashes
+   */
+  @VisibleForTesting
+  public static void setRandomGeneratorForTest(Random random) {
+randomGeneratorForTest = random;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/403a8838/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
index fd4d35a..066ad7f 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
+++ 
b/hbase-server/src/test/java/o

[2/2] hbase git commit: BASE-21168 BloomFilterUtil uses hardcoded randomness (Mike Drob)

2018-09-12 Thread apurtell
BASE-21168 BloomFilterUtil uses hardcoded randomness (Mike Drob)

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/93bffa40
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/93bffa40
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/93bffa40

Branch: refs/heads/branch-1.2
Commit: 93bffa40d1dd3d2f7dbe8f31e540ef3970d6e446
Parents: d8790be
Author: Mingliang Liu 
Authored: Wed Sep 12 12:31:09 2018 -0700
Committer: Andrew Purtell 
Committed: Wed Sep 12 13:26:12 2018 -0700

--
 .../hadoop/hbase/util/ByteBloomFilter.java  | 54 +++-
 .../regionserver/TestCompoundBloomFilter.java   |  6 ++-
 2 files changed, 34 insertions(+), 26 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/93bffa40/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
index b8ec4c3..723571d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
@@ -26,6 +26,7 @@ import java.nio.ByteBuffer;
 import java.text.NumberFormat;
 import java.util.Random;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.KVComparator;
@@ -423,26 +424,26 @@ public class ByteBloomFilter implements BloomFilter, 
BloomFilterWriter {
   int hashCount) {
 
 int hash1 = hash.hash(buf, offset, length, 0);
-int hash2 = hash.hash(buf, offset, length, hash1);
 int bloomBitSize = bloomSize << 3;
-
+
+int hash2 = 0;
+int compositeHash = 0;
+
 if (randomGeneratorForTest == null) {
-  // Production mode.
-  int compositeHash = hash1;
-  for (int i = 0; i < hashCount; i++) {
-int hashLoc = Math.abs(compositeHash % bloomBitSize);
-compositeHash += hash2;
-if (!get(hashLoc, bloomBuf, bloomOffset)) {
-  return false;
-}
-  }
-} else {
-  // Test mode with "fake lookups" to estimate "ideal false positive rate".
-  for (int i = 0; i < hashCount; i++) {
-int hashLoc = randomGeneratorForTest.nextInt(bloomBitSize);
-if (!get(hashLoc, bloomBuf, bloomOffset)){
-  return false;
-}
+  // Production mode
+  compositeHash = hash1;
+  hash2 = hash.hash(buf, offset, length, hash1);
+}
+
+for (int i = 0; i < hashCount; i++) {
+  int hashLoc = (randomGeneratorForTest == null
+  // Production mode
+  ? Math.abs(compositeHash % bloomBitSize)
+  // Test mode with "fake look-ups" to estimate "ideal false positive 
rate"
+  : randomGeneratorForTest.nextInt(bloomBitSize));
+  compositeHash += hash2;
+  if (!get(hashLoc, bloomBuf, bloomOffset)) {
+return false;
   }
 }
 return true;
@@ -598,12 +599,17 @@ public class ByteBloomFilter implements BloomFilter, 
BloomFilterWriter {
 return bloom != null;
   }
 
-  public static void setFakeLookupMode(boolean enabled) {
-if (enabled) {
-  randomGeneratorForTest = new Random(283742987L);
-} else {
-  randomGeneratorForTest = null;
-}
+  /**
+   * Sets a random generator to be used for look-ups instead of computing 
hashes. Can be used to
+   * simulate uniformity of accesses better in a test environment. Should not 
be set in a real
+   * environment where correctness matters!
+   * 
+   * This gets used in {@link #contains(byte[], int, int, ByteBuffer, int, 
int, Hash, int)}
+   * @param random The random number source to use, or null to compute actual 
hashes
+   */
+  @VisibleForTesting
+  public static void setRandomGeneratorForTest(Random random) {
+randomGeneratorForTest = random;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/93bffa40/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
index 0129fad..a0bf6c5 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
@@ -218,7 +218,9 @@ public class TestCompoundBloomFilter {
 //

[1/2] hbase git commit: BASE-21168 BloomFilterUtil uses hardcoded randomness (Mike Drob)

2018-09-12 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 d8790bed5 -> 93bffa40d
  refs/heads/branch-1.3 b7dfb7462 -> 82f72b5be


BASE-21168 BloomFilterUtil uses hardcoded randomness (Mike Drob)

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/82f72b5b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/82f72b5b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/82f72b5b

Branch: refs/heads/branch-1.3
Commit: 82f72b5bec678cb3e531747f530ccf076bd5115a
Parents: b7dfb74
Author: Mingliang Liu 
Authored: Wed Sep 12 12:31:09 2018 -0700
Committer: Andrew Purtell 
Committed: Wed Sep 12 13:26:09 2018 -0700

--
 .../hadoop/hbase/util/ByteBloomFilter.java  | 54 +++-
 .../regionserver/TestCompoundBloomFilter.java   |  6 ++-
 2 files changed, 34 insertions(+), 26 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/82f72b5b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
index b8ec4c3..723571d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
@@ -26,6 +26,7 @@ import java.nio.ByteBuffer;
 import java.text.NumberFormat;
 import java.util.Random;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.KVComparator;
@@ -423,26 +424,26 @@ public class ByteBloomFilter implements BloomFilter, 
BloomFilterWriter {
   int hashCount) {
 
 int hash1 = hash.hash(buf, offset, length, 0);
-int hash2 = hash.hash(buf, offset, length, hash1);
 int bloomBitSize = bloomSize << 3;
-
+
+int hash2 = 0;
+int compositeHash = 0;
+
 if (randomGeneratorForTest == null) {
-  // Production mode.
-  int compositeHash = hash1;
-  for (int i = 0; i < hashCount; i++) {
-int hashLoc = Math.abs(compositeHash % bloomBitSize);
-compositeHash += hash2;
-if (!get(hashLoc, bloomBuf, bloomOffset)) {
-  return false;
-}
-  }
-} else {
-  // Test mode with "fake lookups" to estimate "ideal false positive rate".
-  for (int i = 0; i < hashCount; i++) {
-int hashLoc = randomGeneratorForTest.nextInt(bloomBitSize);
-if (!get(hashLoc, bloomBuf, bloomOffset)){
-  return false;
-}
+  // Production mode
+  compositeHash = hash1;
+  hash2 = hash.hash(buf, offset, length, hash1);
+}
+
+for (int i = 0; i < hashCount; i++) {
+  int hashLoc = (randomGeneratorForTest == null
+  // Production mode
+  ? Math.abs(compositeHash % bloomBitSize)
+  // Test mode with "fake look-ups" to estimate "ideal false positive 
rate"
+  : randomGeneratorForTest.nextInt(bloomBitSize));
+  compositeHash += hash2;
+  if (!get(hashLoc, bloomBuf, bloomOffset)) {
+return false;
   }
 }
 return true;
@@ -598,12 +599,17 @@ public class ByteBloomFilter implements BloomFilter, 
BloomFilterWriter {
 return bloom != null;
   }
 
-  public static void setFakeLookupMode(boolean enabled) {
-if (enabled) {
-  randomGeneratorForTest = new Random(283742987L);
-} else {
-  randomGeneratorForTest = null;
-}
+  /**
+   * Sets a random generator to be used for look-ups instead of computing 
hashes. Can be used to
+   * simulate uniformity of accesses better in a test environment. Should not 
be set in a real
+   * environment where correctness matters!
+   * 
+   * This gets used in {@link #contains(byte[], int, int, ByteBuffer, int, 
int, Hash, int)}
+   * @param random The random number source to use, or null to compute actual 
hashes
+   */
+  @VisibleForTesting
+  public static void setRandomGeneratorForTest(Random random) {
+randomGeneratorForTest = random;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/82f72b5b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
index 0129fad..a0bf6c5 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
+++ 
b/hbase-server/src/test/java/

hbase git commit: HBASE-21190 Log files and count of entries in each as we load from the MasterProcWAL store

2018-09-12 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 0075093d2 -> 76199a0a2


HBASE-21190 Log files and count of entries in each as we load from the 
MasterProcWAL store


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/76199a0a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/76199a0a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/76199a0a

Branch: refs/heads/master
Commit: 76199a0a29bfcde5b282f779f9d3b2036147722c
Parents: 0075093
Author: Michael Stack 
Authored: Wed Sep 12 10:13:43 2018 -0700
Committer: Michael Stack 
Committed: Wed Sep 12 10:21:26 2018 -0700

--
 .../hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java   | 1 -
 .../hbase/procedure2/store/wal/ProcedureWALFormatReader.java| 5 -
 2 files changed, 4 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/76199a0a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java
index da8af84..ac3a529 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java
@@ -83,7 +83,6 @@ public final class ProcedureWALFormat {
   // Ignore the last log which is current active log.
   while (logs.hasNext()) {
 ProcedureWALFile log = logs.next();
-LOG.debug("Loading WAL id={}", log.getLogId());
 log.open();
 try {
   reader.read(log);

http://git-wip-us.apache.org/repos/asf/hbase/blob/76199a0a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
index d1deb18..4ab70f1 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
@@ -140,6 +140,7 @@ public class ProcedureWALFormatReader {
   LOG.info("Rebuilding tracker for " + log);
 }
 
+long count = 0;
 FSDataInputStream stream = log.getStream();
 try {
   boolean hasMore = true;
@@ -149,6 +150,7 @@ public class ProcedureWALFormatReader {
   LOG.warn("Nothing left to decode. Exiting with missing EOF, log=" + 
log);
   break;
 }
+count++;
 switch (entry.getType()) {
   case PROCEDURE_WAL_INIT:
 readInitEntry(entry);
@@ -170,8 +172,9 @@ public class ProcedureWALFormatReader {
 throw new CorruptedWALProcedureStoreException("Invalid entry: " + 
entry);
 }
   }
+  LOG.info("Read {} entries in {}", count, log);
 } catch (InvalidProtocolBufferException e) {
-  LOG.error("While reading procedure from " + log, e);
+  LOG.error("While reading entry #{} in {}", count, log, e);
   loader.markCorruptedWAL(log, e);
 }
 



hbase git commit: HBASE-21190 Log files and count of entries in each as we load from the MasterProcWAL store

2018-09-12 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 e9d23d5d2 -> 9d1319648


HBASE-21190 Log files and count of entries in each as we load from the 
MasterProcWAL store


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9d131964
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9d131964
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9d131964

Branch: refs/heads/branch-2
Commit: 9d1319648529f035c36733ca62975b24417d7f29
Parents: e9d23d5
Author: Michael Stack 
Authored: Wed Sep 12 10:13:43 2018 -0700
Committer: Michael Stack 
Committed: Wed Sep 12 10:21:12 2018 -0700

--
 .../hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java   | 1 -
 .../hbase/procedure2/store/wal/ProcedureWALFormatReader.java| 5 -
 2 files changed, 4 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9d131964/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java
index da8af84..ac3a529 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java
@@ -83,7 +83,6 @@ public final class ProcedureWALFormat {
   // Ignore the last log which is current active log.
   while (logs.hasNext()) {
 ProcedureWALFile log = logs.next();
-LOG.debug("Loading WAL id={}", log.getLogId());
 log.open();
 try {
   reader.read(log);

http://git-wip-us.apache.org/repos/asf/hbase/blob/9d131964/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
index d1deb18..4ab70f1 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
@@ -140,6 +140,7 @@ public class ProcedureWALFormatReader {
   LOG.info("Rebuilding tracker for " + log);
 }
 
+long count = 0;
 FSDataInputStream stream = log.getStream();
 try {
   boolean hasMore = true;
@@ -149,6 +150,7 @@ public class ProcedureWALFormatReader {
   LOG.warn("Nothing left to decode. Exiting with missing EOF, log=" + 
log);
   break;
 }
+count++;
 switch (entry.getType()) {
   case PROCEDURE_WAL_INIT:
 readInitEntry(entry);
@@ -170,8 +172,9 @@ public class ProcedureWALFormatReader {
 throw new CorruptedWALProcedureStoreException("Invalid entry: " + 
entry);
 }
   }
+  LOG.info("Read {} entries in {}", count, log);
 } catch (InvalidProtocolBufferException e) {
-  LOG.error("While reading procedure from " + log, e);
+  LOG.error("While reading entry #{} in {}", count, log, e);
   loader.markCorruptedWAL(log, e);
 }
 



hbase git commit: HBASE-21190 Log files and count of entries in each as we load from the MasterProcWAL store

2018-09-12 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 0476f4e4b -> 739d7256e


HBASE-21190 Log files and count of entries in each as we load from the 
MasterProcWAL store


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/739d7256
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/739d7256
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/739d7256

Branch: refs/heads/branch-2.0
Commit: 739d7256ed988674f85dc4684ef109f77c599f77
Parents: 0476f4e
Author: Michael Stack 
Authored: Wed Sep 12 10:13:43 2018 -0700
Committer: Michael Stack 
Committed: Wed Sep 12 10:20:45 2018 -0700

--
 .../hbase/procedure2/store/wal/ProcedureWALFormatReader.java| 5 -
 1 file changed, 4 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/739d7256/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
index d1deb18..4ab70f1 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
@@ -140,6 +140,7 @@ public class ProcedureWALFormatReader {
   LOG.info("Rebuilding tracker for " + log);
 }
 
+long count = 0;
 FSDataInputStream stream = log.getStream();
 try {
   boolean hasMore = true;
@@ -149,6 +150,7 @@ public class ProcedureWALFormatReader {
   LOG.warn("Nothing left to decode. Exiting with missing EOF, log=" + 
log);
   break;
 }
+count++;
 switch (entry.getType()) {
   case PROCEDURE_WAL_INIT:
 readInitEntry(entry);
@@ -170,8 +172,9 @@ public class ProcedureWALFormatReader {
 throw new CorruptedWALProcedureStoreException("Invalid entry: " + 
entry);
 }
   }
+  LOG.info("Read {} entries in {}", count, log);
 } catch (InvalidProtocolBufferException e) {
-  LOG.error("While reading procedure from " + log, e);
+  LOG.error("While reading entry #{} in {}", count, log, e);
   loader.markCorruptedWAL(log, e);
 }
 



hbase git commit: HBASE-21190 Log files and count of entries in each as we load from the MasterProcWAL store

2018-09-12 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2.1 d81e80671 -> 487f713c6


HBASE-21190 Log files and count of entries in each as we load from the 
MasterProcWAL store


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/487f713c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/487f713c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/487f713c

Branch: refs/heads/branch-2.1
Commit: 487f713c632e1a8afed7cae5d2ccaf770386bc1c
Parents: d81e806
Author: Michael Stack 
Authored: Wed Sep 12 10:13:43 2018 -0700
Committer: Michael Stack 
Committed: Wed Sep 12 10:19:46 2018 -0700

--
 .../hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java   | 1 -
 .../hbase/procedure2/store/wal/ProcedureWALFormatReader.java| 5 -
 2 files changed, 4 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/487f713c/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java
index da8af84..ac3a529 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java
@@ -83,7 +83,6 @@ public final class ProcedureWALFormat {
   // Ignore the last log which is current active log.
   while (logs.hasNext()) {
 ProcedureWALFile log = logs.next();
-LOG.debug("Loading WAL id={}", log.getLogId());
 log.open();
 try {
   reader.read(log);

http://git-wip-us.apache.org/repos/asf/hbase/blob/487f713c/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
index d1deb18..4ab70f1 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
@@ -140,6 +140,7 @@ public class ProcedureWALFormatReader {
   LOG.info("Rebuilding tracker for " + log);
 }
 
+long count = 0;
 FSDataInputStream stream = log.getStream();
 try {
   boolean hasMore = true;
@@ -149,6 +150,7 @@ public class ProcedureWALFormatReader {
   LOG.warn("Nothing left to decode. Exiting with missing EOF, log=" + 
log);
   break;
 }
+count++;
 switch (entry.getType()) {
   case PROCEDURE_WAL_INIT:
 readInitEntry(entry);
@@ -170,8 +172,9 @@ public class ProcedureWALFormatReader {
 throw new CorruptedWALProcedureStoreException("Invalid entry: " + 
entry);
 }
   }
+  LOG.info("Read {} entries in {}", count, log);
 } catch (InvalidProtocolBufferException e) {
-  LOG.error("While reading procedure from " + log, e);
+  LOG.error("While reading entry #{} in {}", count, log, e);
   loader.markCorruptedWAL(log, e);
 }
 



[1/2] hbase git commit: HBASE-21174 [REST] Failed to parse empty qualifier in TableResource#getScanResource

2018-09-12 Thread gxcheng
Repository: hbase
Updated Branches:
  refs/heads/branch-1 158607bf2 -> 4256d38ba
  refs/heads/branch-1.4 aa28cf7e9 -> 4b5d2def3


HBASE-21174 [REST] Failed to parse empty qualifier in 
TableResource#getScanResource

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4256d38b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4256d38b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4256d38b

Branch: refs/heads/branch-1
Commit: 4256d38bac0b02946f269d6332449dd87f522578
Parents: 158607b
Author: Guangxu Cheng 
Authored: Wed Sep 12 22:51:03 2018 +0800
Committer: Guangxu Cheng 
Committed: Wed Sep 12 23:00:34 2018 +0800

--
 .../apache/hadoop/hbase/rest/TableResource.java | 37 ++---
 .../apache/hadoop/hbase/rest/TestTableScan.java | 55 +---
 2 files changed, 64 insertions(+), 28 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4256d38b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
--
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
index a1a60bb..b208c96 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
@@ -24,16 +24,14 @@ import java.util.List;
 
 import javax.ws.rs.DefaultValue;
 import javax.ws.rs.Encoded;
-import javax.ws.rs.HeaderParam;
 import javax.ws.rs.Path;
 import javax.ws.rs.PathParam;
 import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.UriInfo;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.Scan;
@@ -129,7 +127,7 @@ public class TableResource extends ResourceBase {
   @QueryParam(Constants.SCAN_LIMIT) int userRequestedLimit,
   @DefaultValue("") @QueryParam(Constants.SCAN_START_ROW) String startRow,
   @DefaultValue("") @QueryParam(Constants.SCAN_END_ROW) String endRow,
-  @DefaultValue("") @QueryParam(Constants.SCAN_COLUMN) List column,
+  @QueryParam(Constants.SCAN_COLUMN) List column,
   @DefaultValue("1") @QueryParam(Constants.SCAN_MAX_VERSIONS) int 
maxVersions,
   @DefaultValue("-1") @QueryParam(Constants.SCAN_BATCH_SIZE) int batchSize,
   @DefaultValue("0") @QueryParam(Constants.SCAN_START_TIME) long startTime,
@@ -162,26 +160,21 @@ public class TableResource extends ResourceBase {
 tableScan.setStartRow(Bytes.toBytes(startRow));
   }
   tableScan.setStopRow(Bytes.toBytes(endRow));
-  for (String csplit : column) {
-String[] familysplit = csplit.trim().split(":");
-if (familysplit.length == 2) {
-  if (familysplit[1].length() > 0) {
-if (LOG.isTraceEnabled()) {
-  LOG.trace("Scan family and column : " + familysplit[0] + "  " + 
familysplit[1]);
-}
-tableScan.addColumn(Bytes.toBytes(familysplit[0]), 
Bytes.toBytes(familysplit[1]));
-  } else {
-tableScan.addFamily(Bytes.toBytes(familysplit[0]));
-if (LOG.isTraceEnabled()) {
-  LOG.trace("Scan family : " + familysplit[0] + " and empty 
qualifier.");
-}
-tableScan.addColumn(Bytes.toBytes(familysplit[0]), null);
+  for (String col : column) {
+byte [][] parts = KeyValue.parseColumn(Bytes.toBytes(col.trim()));
+if (parts.length == 1) {
+  if (LOG.isTraceEnabled()) {
+LOG.trace("Scan family : " + Bytes.toStringBinary(parts[0]));
   }
-} else if (StringUtils.isNotEmpty(familysplit[0])) {
+  tableScan.addFamily(parts[0]);
+} else if (parts.length == 2) {
   if (LOG.isTraceEnabled()) {
-LOG.trace("Scan family : " + familysplit[0]);
+LOG.trace("Scan family and column : " + 
Bytes.toStringBinary(parts[0])
++ "  " + Bytes.toStringBinary(parts[1]));
   }
-  tableScan.addFamily(Bytes.toBytes(familysplit[0]));
+  tableScan.addColumn(parts[0], parts[1]);
+} else {
+  throw new IllegalArgumentException("Invalid column specifier.");
 }
   }
 
@@ -205,7 +198,7 @@ public class TableResource extends ResourceBase {
   tableScan.setCaching(fetchSize);
   tableScan.setReversed(reversed);
   tableScan.setCacheBlocks(cacheBlocks);
- return new TableScanResource(hTable.getScanner(tableScan), 

[2/2] hbase git commit: HBASE-21174 [REST] Failed to parse empty qualifier in TableResource#getScanResource

2018-09-12 Thread gxcheng
HBASE-21174 [REST] Failed to parse empty qualifier in 
TableResource#getScanResource

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4b5d2def
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4b5d2def
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4b5d2def

Branch: refs/heads/branch-1.4
Commit: 4b5d2def398abf7d8e02fe3fe1387e107272ea49
Parents: aa28cf7
Author: Guangxu Cheng 
Authored: Wed Sep 12 22:51:03 2018 +0800
Committer: Guangxu Cheng 
Committed: Wed Sep 12 23:01:13 2018 +0800

--
 .../apache/hadoop/hbase/rest/TableResource.java | 37 ++---
 .../apache/hadoop/hbase/rest/TestTableScan.java | 55 +---
 2 files changed, 64 insertions(+), 28 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4b5d2def/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
--
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
index a1a60bb..b208c96 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
@@ -24,16 +24,14 @@ import java.util.List;
 
 import javax.ws.rs.DefaultValue;
 import javax.ws.rs.Encoded;
-import javax.ws.rs.HeaderParam;
 import javax.ws.rs.Path;
 import javax.ws.rs.PathParam;
 import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.UriInfo;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.Scan;
@@ -129,7 +127,7 @@ public class TableResource extends ResourceBase {
   @QueryParam(Constants.SCAN_LIMIT) int userRequestedLimit,
   @DefaultValue("") @QueryParam(Constants.SCAN_START_ROW) String startRow,
   @DefaultValue("") @QueryParam(Constants.SCAN_END_ROW) String endRow,
-  @DefaultValue("") @QueryParam(Constants.SCAN_COLUMN) List column,
+  @QueryParam(Constants.SCAN_COLUMN) List column,
   @DefaultValue("1") @QueryParam(Constants.SCAN_MAX_VERSIONS) int 
maxVersions,
   @DefaultValue("-1") @QueryParam(Constants.SCAN_BATCH_SIZE) int batchSize,
   @DefaultValue("0") @QueryParam(Constants.SCAN_START_TIME) long startTime,
@@ -162,26 +160,21 @@ public class TableResource extends ResourceBase {
 tableScan.setStartRow(Bytes.toBytes(startRow));
   }
   tableScan.setStopRow(Bytes.toBytes(endRow));
-  for (String csplit : column) {
-String[] familysplit = csplit.trim().split(":");
-if (familysplit.length == 2) {
-  if (familysplit[1].length() > 0) {
-if (LOG.isTraceEnabled()) {
-  LOG.trace("Scan family and column : " + familysplit[0] + "  " + 
familysplit[1]);
-}
-tableScan.addColumn(Bytes.toBytes(familysplit[0]), 
Bytes.toBytes(familysplit[1]));
-  } else {
-tableScan.addFamily(Bytes.toBytes(familysplit[0]));
-if (LOG.isTraceEnabled()) {
-  LOG.trace("Scan family : " + familysplit[0] + " and empty 
qualifier.");
-}
-tableScan.addColumn(Bytes.toBytes(familysplit[0]), null);
+  for (String col : column) {
+byte [][] parts = KeyValue.parseColumn(Bytes.toBytes(col.trim()));
+if (parts.length == 1) {
+  if (LOG.isTraceEnabled()) {
+LOG.trace("Scan family : " + Bytes.toStringBinary(parts[0]));
   }
-} else if (StringUtils.isNotEmpty(familysplit[0])) {
+  tableScan.addFamily(parts[0]);
+} else if (parts.length == 2) {
   if (LOG.isTraceEnabled()) {
-LOG.trace("Scan family : " + familysplit[0]);
+LOG.trace("Scan family and column : " + 
Bytes.toStringBinary(parts[0])
++ "  " + Bytes.toStringBinary(parts[1]));
   }
-  tableScan.addFamily(Bytes.toBytes(familysplit[0]));
+  tableScan.addColumn(parts[0], parts[1]);
+} else {
+  throw new IllegalArgumentException("Invalid column specifier.");
 }
   }
 
@@ -205,7 +198,7 @@ public class TableResource extends ResourceBase {
   tableScan.setCaching(fetchSize);
   tableScan.setReversed(reversed);
   tableScan.setCacheBlocks(cacheBlocks);
- return new TableScanResource(hTable.getScanner(tableScan), 
userRequestedLimit);
+  return new TableScanResource(hTable.getScanner(tableScan), 
userRequestedLimit);
 } catch (Exce

[36/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.LeaseRecovery.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.LeaseRecovery.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.LeaseRecovery.html
index df746ae..d27bef3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.LeaseRecovery.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.LeaseRecovery.html
@@ -208,1157 +208,1163 @@
 200  throws IOException {
 201this(conf,
 202new 
Path(CommonFSUtils.getWALRootDir(conf), MASTER_PROCEDURE_LOGDIR),
-203new 
Path(CommonFSUtils.getRootDir(conf), HConstants.HREGION_OLDLOGDIR_NAME), 
leaseRecovery);
-204  }
-205
-206  @VisibleForTesting
-207  public WALProcedureStore(final 
Configuration conf, final Path walDir, final Path walArchiveDir,
-208  final LeaseRecovery leaseRecovery) 
throws IOException {
-209this.conf = conf;
-210this.leaseRecovery = leaseRecovery;
-211this.walDir = walDir;
-212this.walArchiveDir = walArchiveDir;
-213this.fs = 
walDir.getFileSystem(conf);
-214this.enforceStreamCapability = 
conf.getBoolean(CommonFSUtils.UNSAFE_STREAM_CAPABILITY_ENFORCE, true);
-215
-216// Create the log directory for the 
procedure store
-217if (!fs.exists(walDir)) {
-218  if (!fs.mkdirs(walDir)) {
-219throw new IOException("Unable to 
mkdir " + walDir);
-220  }
-221}
-222// Now that it exists, set the log 
policy
-223String storagePolicy =
-224
conf.get(HConstants.WAL_STORAGE_POLICY, 
HConstants.DEFAULT_WAL_STORAGE_POLICY);
-225CommonFSUtils.setStoragePolicy(fs, 
walDir, storagePolicy);
-226
-227// Create archive dir up front. 
Rename won't work w/o it up on HDFS.
-228if (this.walArchiveDir != null 
&& !this.fs.exists(this.walArchiveDir)) {
-229  if 
(this.fs.mkdirs(this.walArchiveDir)) {
-230if (LOG.isDebugEnabled()) {
-231  LOG.debug("Created Procedure 
Store WAL archive dir " + this.walArchiveDir);
-232}
-233  } else {
-234LOG.warn("Failed create of " + 
this.walArchiveDir);
-235  }
-236}
-237  }
-238
-239  @Override
-240  public void start(int numSlots) throws 
IOException {
-241if (!setRunning(true)) {
-242  return;
-243}
-244
-245// Init buffer slots
-246loading.set(true);
-247runningProcCount = numSlots;
-248syncMaxSlot = numSlots;
-249slots = new ByteSlot[numSlots];
-250slotsCache = new 
LinkedTransferQueue();
-251while (slotsCache.size() < 
numSlots) {
-252  slotsCache.offer(new ByteSlot());
-253}
-254
-255// Tunings
-256walCountWarnThreshold =
-257  
conf.getInt(WAL_COUNT_WARN_THRESHOLD_CONF_KEY, 
DEFAULT_WAL_COUNT_WARN_THRESHOLD);
-258maxRetriesBeforeRoll =
-259  
conf.getInt(MAX_RETRIES_BEFORE_ROLL_CONF_KEY, 
DEFAULT_MAX_RETRIES_BEFORE_ROLL);
-260maxSyncFailureRoll = 
conf.getInt(MAX_SYNC_FAILURE_ROLL_CONF_KEY, DEFAULT_MAX_SYNC_FAILURE_ROLL);
-261waitBeforeRoll = 
conf.getInt(WAIT_BEFORE_ROLL_CONF_KEY, DEFAULT_WAIT_BEFORE_ROLL);
-262rollRetries = 
conf.getInt(ROLL_RETRIES_CONF_KEY, DEFAULT_ROLL_RETRIES);
-263rollThreshold = 
conf.getLong(ROLL_THRESHOLD_CONF_KEY, DEFAULT_ROLL_THRESHOLD);
-264periodicRollMsec = 
conf.getInt(PERIODIC_ROLL_CONF_KEY, DEFAULT_PERIODIC_ROLL);
-265syncWaitMsec = 
conf.getInt(SYNC_WAIT_MSEC_CONF_KEY, DEFAULT_SYNC_WAIT_MSEC);
-266useHsync = 
conf.getBoolean(USE_HSYNC_CONF_KEY, DEFAULT_USE_HSYNC);
-267
-268// WebUI
-269syncMetricsQueue = new 
CircularFifoQueue(
-270  
conf.getInt(STORE_WAL_SYNC_STATS_COUNT, DEFAULT_SYNC_STATS_COUNT));
-271
-272// Init sync thread
-273syncThread = new 
Thread("WALProcedureStoreSyncThread") {
-274  @Override
-275  public void run() {
-276try {
-277  syncLoop();
-278} catch (Throwable e) {
-279  LOG.error("Got an exception 
from the sync-loop", e);
-280  if (!isSyncAborted()) {
-281sendAbortProcessSignal();
-282  }
-283}
-284  }
-285};
-286syncThread.start();
-287  }
-288
-289  @Override
-290  public void stop(final boolean abort) 
{
-291if (!setRunning(false)) {
-292  return;
-293}
-294
-295LOG.info("Stopping the WAL Procedure 
Store, isAbort=" + abort +
-296  (isSyncAborted() ? " (self 
aborting)" : ""));
-297sendStopSignal();
-298if (!isSyncAborted()) {
-299  try {
-300while (syncThread.isAlive()) {
-301  sendStopSignal();
-302  syncThread.join(250);
-303}
-304  } catch (InterruptedException e) 
{
-305LOG.warn("join interrupted", 
e);
-306
Thread.currentThread().interrupt

[23/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.ManifestBuilder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.ManifestBuilder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.ManifestBuilder.html
index 5034654..f02fb1d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.ManifestBuilder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.ManifestBuilder.html
@@ -75,151 +75,154 @@
 067  
HRegionFileSystem, Path> {
 068private final Configuration conf;
 069private final Path snapshotDir;
-070private final FileSystem fs;
-071
-072public ManifestBuilder(final 
Configuration conf, final FileSystem fs, final Path snapshotDir) {
-073  this.snapshotDir = snapshotDir;
-074  this.conf = conf;
-075  this.fs = fs;
-076}
-077
-078@Override
-079public HRegionFileSystem 
regionOpen(final RegionInfo regionInfo) throws IOException {
-080  HRegionFileSystem snapshotRegionFs 
= HRegionFileSystem.createRegionOnFileSystem(conf,
-081fs, snapshotDir, regionInfo);
-082  return snapshotRegionFs;
-083}
-084
-085@Override
-086public void regionClose(final 
HRegionFileSystem region) {
-087}
-088
-089@Override
-090public Path familyOpen(final 
HRegionFileSystem snapshotRegionFs, final byte[] familyName) {
-091  Path familyDir = 
snapshotRegionFs.getStoreDir(Bytes.toString(familyName));
-092  return familyDir;
-093}
-094
-095@Override
-096public void familyClose(final 
HRegionFileSystem region, final Path family) {
-097}
-098
-099@Override
-100public void storeFile(final 
HRegionFileSystem region, final Path familyDir,
-101final StoreFileInfo storeFile) 
throws IOException {
-102  Path referenceFile = new 
Path(familyDir, storeFile.getPath().getName());
-103  boolean success = true;
-104  if (storeFile.isReference()) {
-105// write the Reference object to 
the snapshot
-106
storeFile.getReference().write(fs, referenceFile);
-107  } else {
-108// create "reference" to this 
store file.  It is intentionally an empty file -- all
-109// necessary information is 
captured by its fs location and filename.  This allows us to
-110// only figure out what needs to 
be done via a single nn operation (instead of having to
-111// open and read the files as 
well).
-112success = 
fs.createNewFile(referenceFile);
-113  }
-114  if (!success) {
-115throw new IOException("Failed to 
create reference file:" + referenceFile);
+070private final FileSystem rootFs;
+071private final FileSystem 
workingDirFs;
+072
+073public ManifestBuilder(final 
Configuration conf, final FileSystem rootFs,
+074final Path snapshotDir) throws 
IOException {
+075  this.snapshotDir = snapshotDir;
+076  this.conf = conf;
+077  this.rootFs = rootFs;
+078  this.workingDirFs = 
snapshotDir.getFileSystem(conf);
+079}
+080
+081@Override
+082public HRegionFileSystem 
regionOpen(final RegionInfo regionInfo) throws IOException {
+083  HRegionFileSystem snapshotRegionFs 
= HRegionFileSystem.createRegionOnFileSystem(conf,
+084workingDirFs, snapshotDir, 
regionInfo);
+085  return snapshotRegionFs;
+086}
+087
+088@Override
+089public void regionClose(final 
HRegionFileSystem region) {
+090}
+091
+092@Override
+093public Path familyOpen(final 
HRegionFileSystem snapshotRegionFs, final byte[] familyName) {
+094  Path familyDir = 
snapshotRegionFs.getStoreDir(Bytes.toString(familyName));
+095  return familyDir;
+096}
+097
+098@Override
+099public void familyClose(final 
HRegionFileSystem region, final Path family) {
+100}
+101
+102@Override
+103public void storeFile(final 
HRegionFileSystem region, final Path familyDir,
+104final StoreFileInfo storeFile) 
throws IOException {
+105  Path referenceFile = new 
Path(familyDir, storeFile.getPath().getName());
+106  boolean success = true;
+107  if (storeFile.isReference()) {
+108// write the Reference object to 
the snapshot
+109
storeFile.getReference().write(workingDirFs, referenceFile);
+110  } else {
+111// create "reference" to this 
store file.  It is intentionally an empty file -- all
+112// necessary information is 
captured by its fs location and filename.  This allows us to
+113// only figure out what needs to 
be done via a single nn operation (instead of having to
+114// open and read the files as 
well).
+115success = 
workingDirFs.createNewFile(referenceFile);
 116  }
-117}
-1

[3/3] hbase git commit: HBASE-21168 Insecure Randomness in BloomFilterUtil

2018-09-12 Thread mdrob
HBASE-21168 Insecure Randomness in BloomFilterUtil

Flagged by Fortify static analysis

Signed-off-by: Andrew Purtell 
Signed-off-by: Mingliang Liu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d81e8067
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d81e8067
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d81e8067

Branch: refs/heads/branch-2.1
Commit: d81e80671874e42901f9fb166bae814530a9e98e
Parents: 2da6dbe
Author: Mike Drob 
Authored: Fri Sep 7 10:28:30 2018 -0500
Committer: Mike Drob 
Committed: Wed Sep 12 09:52:41 2018 -0500

--
 .../hadoop/hbase/util/BloomFilterUtil.java  | 55 +++-
 .../regionserver/TestCompoundBloomFilter.java   |  6 ++-
 2 files changed, 35 insertions(+), 26 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d81e8067/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java
index b1b3bcc..33bea7a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java
@@ -21,9 +21,11 @@ import java.text.NumberFormat;
 import java.util.Random;
 
 import org.apache.hadoop.hbase.Cell;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.nio.ByteBuff;
 import org.apache.hadoop.hbase.regionserver.BloomType;
+import org.apache.yetus.audience.InterfaceAudience;
+
+import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
 
 /**
  * Utility methods related to BloomFilters
@@ -75,12 +77,17 @@ public final class BloomFilterUtil {
 return (long) Math.ceil(maxKeys * (-Math.log(errorRate) / LOG2_SQUARED));
   }
 
-  public static void setFakeLookupMode(boolean enabled) {
-if (enabled) {
-  randomGeneratorForTest = new Random(283742987L);
-} else {
-  randomGeneratorForTest = null;
-}
+  /**
+   * Sets a random generator to be used for look-ups instead of computing 
hashes. Can be used to
+   * simulate uniformity of accesses better in a test environment. Should not 
be set in a real
+   * environment where correctness matters!
+   * 
+ *   This gets used in {@link #contains(ByteBuff, int, int, Hash, int, 
HashKey)}
+   * @param random The random number source to use, or null to compute actual 
hashes
+   */
+  @VisibleForTesting
+  public static void setRandomGeneratorForTest(Random random) {
+randomGeneratorForTest = random;
   }
 
   /**
@@ -205,26 +212,26 @@ public final class BloomFilterUtil {
   private static  boolean contains(ByteBuff bloomBuf, int bloomOffset, int 
bloomSize, Hash hash,
   int hashCount, HashKey hashKey) {
 int hash1 = hash.hash(hashKey, 0);
-int hash2 = hash.hash(hashKey, hash1);
 int bloomBitSize = bloomSize << 3;
 
+int hash2 = 0;
+int compositeHash = 0;
+
 if (randomGeneratorForTest == null) {
-  // Production mode.
-  int compositeHash = hash1;
-  for (int i = 0; i < hashCount; i++) {
-int hashLoc = Math.abs(compositeHash % bloomBitSize);
-compositeHash += hash2;
-if (!checkBit(hashLoc, bloomBuf, bloomOffset)) {
-  return false;
-}
-  }
-} else {
-  // Test mode with "fake lookups" to estimate "ideal false positive rate".
-  for (int i = 0; i < hashCount; i++) {
-int hashLoc = randomGeneratorForTest.nextInt(bloomBitSize);
-if (!checkBit(hashLoc, bloomBuf, bloomOffset)){
-  return false;
-}
+  // Production mode
+  compositeHash = hash1;
+  hash2 = hash.hash(hashKey, hash1);
+}
+
+for (int i = 0; i < hashCount; i++) {
+  int hashLoc = (randomGeneratorForTest == null
+  // Production mode
+  ? Math.abs(compositeHash % bloomBitSize)
+  // Test mode with "fake look-ups" to estimate "ideal false positive 
rate"
+  : randomGeneratorForTest.nextInt(bloomBitSize));
+  compositeHash += hash2;
+  if (!checkBit(hashLoc, bloomBuf, bloomOffset)) {
+return false;
   }
 }
 return true;

http://git-wip-us.apache.org/repos/asf/hbase/blob/d81e8067/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
index 0b17d28..424a788 100644
--- 
a/hbase-server/src/test/java/o

[2/3] hbase git commit: HBASE-21168 Insecure Randomness in BloomFilterUtil

2018-09-12 Thread mdrob
HBASE-21168 Insecure Randomness in BloomFilterUtil

Flagged by Fortify static analysis

Signed-off-by: Andrew Purtell 
Signed-off-by: Mingliang Liu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e9d23d5d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e9d23d5d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e9d23d5d

Branch: refs/heads/branch-2
Commit: e9d23d5d25571f4bf682f53303c40e4a18284e66
Parents: e1548d3
Author: Mike Drob 
Authored: Fri Sep 7 10:28:30 2018 -0500
Committer: Mike Drob 
Committed: Wed Sep 12 09:51:57 2018 -0500

--
 .../hadoop/hbase/util/BloomFilterUtil.java  | 55 +++-
 .../regionserver/TestCompoundBloomFilter.java   |  6 ++-
 2 files changed, 35 insertions(+), 26 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e9d23d5d/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java
index b1b3bcc..33bea7a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java
@@ -21,9 +21,11 @@ import java.text.NumberFormat;
 import java.util.Random;
 
 import org.apache.hadoop.hbase.Cell;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.nio.ByteBuff;
 import org.apache.hadoop.hbase.regionserver.BloomType;
+import org.apache.yetus.audience.InterfaceAudience;
+
+import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
 
 /**
  * Utility methods related to BloomFilters
@@ -75,12 +77,17 @@ public final class BloomFilterUtil {
 return (long) Math.ceil(maxKeys * (-Math.log(errorRate) / LOG2_SQUARED));
   }
 
-  public static void setFakeLookupMode(boolean enabled) {
-if (enabled) {
-  randomGeneratorForTest = new Random(283742987L);
-} else {
-  randomGeneratorForTest = null;
-}
+  /**
+   * Sets a random generator to be used for look-ups instead of computing 
hashes. Can be used to
+   * simulate uniformity of accesses better in a test environment. Should not 
be set in a real
+   * environment where correctness matters!
+   * 
+ *   This gets used in {@link #contains(ByteBuff, int, int, Hash, int, 
HashKey)}
+   * @param random The random number source to use, or null to compute actual 
hashes
+   */
+  @VisibleForTesting
+  public static void setRandomGeneratorForTest(Random random) {
+randomGeneratorForTest = random;
   }
 
   /**
@@ -205,26 +212,26 @@ public final class BloomFilterUtil {
   private static  boolean contains(ByteBuff bloomBuf, int bloomOffset, int 
bloomSize, Hash hash,
   int hashCount, HashKey hashKey) {
 int hash1 = hash.hash(hashKey, 0);
-int hash2 = hash.hash(hashKey, hash1);
 int bloomBitSize = bloomSize << 3;
 
+int hash2 = 0;
+int compositeHash = 0;
+
 if (randomGeneratorForTest == null) {
-  // Production mode.
-  int compositeHash = hash1;
-  for (int i = 0; i < hashCount; i++) {
-int hashLoc = Math.abs(compositeHash % bloomBitSize);
-compositeHash += hash2;
-if (!checkBit(hashLoc, bloomBuf, bloomOffset)) {
-  return false;
-}
-  }
-} else {
-  // Test mode with "fake lookups" to estimate "ideal false positive rate".
-  for (int i = 0; i < hashCount; i++) {
-int hashLoc = randomGeneratorForTest.nextInt(bloomBitSize);
-if (!checkBit(hashLoc, bloomBuf, bloomOffset)){
-  return false;
-}
+  // Production mode
+  compositeHash = hash1;
+  hash2 = hash.hash(hashKey, hash1);
+}
+
+for (int i = 0; i < hashCount; i++) {
+  int hashLoc = (randomGeneratorForTest == null
+  // Production mode
+  ? Math.abs(compositeHash % bloomBitSize)
+  // Test mode with "fake look-ups" to estimate "ideal false positive 
rate"
+  : randomGeneratorForTest.nextInt(bloomBitSize));
+  compositeHash += hash2;
+  if (!checkBit(hashLoc, bloomBuf, bloomOffset)) {
+return false;
   }
 }
 return true;

http://git-wip-us.apache.org/repos/asf/hbase/blob/e9d23d5d/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
index 0b17d28..424a788 100644
--- 
a/hbase-server/src/test/java/org

[1/3] hbase git commit: HBASE-21168 Insecure Randomness in BloomFilterUtil

2018-09-12 Thread mdrob
Repository: hbase
Updated Branches:
  refs/heads/branch-2 e1548d334 -> e9d23d5d2
  refs/heads/branch-2.1 2da6dbe56 -> d81e80671
  refs/heads/master 3810ba2c6 -> 0075093d2


HBASE-21168 Insecure Randomness in BloomFilterUtil

Flagged by Fortify static analysis

Signed-off-by: Andrew Purtell 
Signed-off-by: Mingliang Liu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0075093d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0075093d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0075093d

Branch: refs/heads/master
Commit: 0075093d21c3534b7e39da2f359b0815038bd378
Parents: 3810ba2
Author: Mike Drob 
Authored: Fri Sep 7 10:28:30 2018 -0500
Committer: Mike Drob 
Committed: Wed Sep 12 09:51:45 2018 -0500

--
 .../hadoop/hbase/util/BloomFilterUtil.java  | 55 +++-
 .../regionserver/TestCompoundBloomFilter.java   |  6 ++-
 2 files changed, 35 insertions(+), 26 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0075093d/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java
index b1b3bcc..33bea7a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java
@@ -21,9 +21,11 @@ import java.text.NumberFormat;
 import java.util.Random;
 
 import org.apache.hadoop.hbase.Cell;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.nio.ByteBuff;
 import org.apache.hadoop.hbase.regionserver.BloomType;
+import org.apache.yetus.audience.InterfaceAudience;
+
+import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
 
 /**
  * Utility methods related to BloomFilters
@@ -75,12 +77,17 @@ public final class BloomFilterUtil {
 return (long) Math.ceil(maxKeys * (-Math.log(errorRate) / LOG2_SQUARED));
   }
 
-  public static void setFakeLookupMode(boolean enabled) {
-if (enabled) {
-  randomGeneratorForTest = new Random(283742987L);
-} else {
-  randomGeneratorForTest = null;
-}
+  /**
+   * Sets a random generator to be used for look-ups instead of computing 
hashes. Can be used to
+   * simulate uniformity of accesses better in a test environment. Should not 
be set in a real
+   * environment where correctness matters!
+   * 
+ *   This gets used in {@link #contains(ByteBuff, int, int, Hash, int, 
HashKey)}
+   * @param random The random number source to use, or null to compute actual 
hashes
+   */
+  @VisibleForTesting
+  public static void setRandomGeneratorForTest(Random random) {
+randomGeneratorForTest = random;
   }
 
   /**
@@ -205,26 +212,26 @@ public final class BloomFilterUtil {
   private static  boolean contains(ByteBuff bloomBuf, int bloomOffset, int 
bloomSize, Hash hash,
   int hashCount, HashKey hashKey) {
 int hash1 = hash.hash(hashKey, 0);
-int hash2 = hash.hash(hashKey, hash1);
 int bloomBitSize = bloomSize << 3;
 
+int hash2 = 0;
+int compositeHash = 0;
+
 if (randomGeneratorForTest == null) {
-  // Production mode.
-  int compositeHash = hash1;
-  for (int i = 0; i < hashCount; i++) {
-int hashLoc = Math.abs(compositeHash % bloomBitSize);
-compositeHash += hash2;
-if (!checkBit(hashLoc, bloomBuf, bloomOffset)) {
-  return false;
-}
-  }
-} else {
-  // Test mode with "fake lookups" to estimate "ideal false positive rate".
-  for (int i = 0; i < hashCount; i++) {
-int hashLoc = randomGeneratorForTest.nextInt(bloomBitSize);
-if (!checkBit(hashLoc, bloomBuf, bloomOffset)){
-  return false;
-}
+  // Production mode
+  compositeHash = hash1;
+  hash2 = hash.hash(hashKey, hash1);
+}
+
+for (int i = 0; i < hashCount; i++) {
+  int hashLoc = (randomGeneratorForTest == null
+  // Production mode
+  ? Math.abs(compositeHash % bloomBitSize)
+  // Test mode with "fake look-ups" to estimate "ideal false positive 
rate"
+  : randomGeneratorForTest.nextInt(bloomBitSize));
+  compositeHash += hash2;
+  if (!checkBit(hashLoc, bloomBuf, bloomOffset)) {
+return false;
   }
 }
 return true;

http://git-wip-us.apache.org/repos/asf/hbase/blob/0075093d/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilte

[45/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index cfa9873..a54afb5 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
 en-us
 ©2007 - 2018 The Apache Software Foundation
 
-  File: 3735,
- Errors: 15232,
+  File: 3739,
+ Errors: 15225,
  Warnings: 0,
  Infos: 0
   
@@ -15623,7 +15623,7 @@ under the License.
   0
 
 
-  2
+  1
 
   
   
@@ -27892,6 +27892,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.TestSnapshotTemporaryDirectoryWithRegionReplicas.java";>org/apache/hadoop/hbase/client/TestSnapshotTemporaryDirectoryWithRegionReplicas.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.MasterCoprocessorRpcChannelImpl.java";>org/apache/hadoop/hbase/client/MasterCoprocessorRpcChannelImpl.java
 
 
@@ -31191,7 +31205,7 @@ under the License.
   0
 
 
-  18
+  12
 
   
   
@@ -36754,6 +36768,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.TestSnapshotDFSTemporaryDirectory.java";>org/apache/hadoop/hbase/client/TestSnapshotDFSTemporaryDirectory.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.TestGetClosestAtOrBefore.java";>org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java
 
 
@@ -40492,6 +40520,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.snapshot.TestExportSnapshotWithTemporaryDirectory.java";>org/apache/hadoop/hbase/snapshot/TestExportSnapshotWithTemporaryDirectory.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.TestClientClusterMetrics.java";>org/apache/hadoop/hbase/TestClientClusterMetrics.java
 
 
@@ -46162,6 +46204,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.TestSnapshotTemporaryDirectory.java";>org/apache/hadoop/hbase/client/TestSnapshotTemporaryDirectory.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.IsolationLevel.java";>org/apache/hadoop/hbase/client/IsolationLevel.java
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/coc.html
--
diff --git a/coc.html b/coc.html
index 2a27833..5358760 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – 
   Code of Conduct Policy
@@ -375,7 +375,7 @@ email to mailto:priv...@hbase.apache.org";>the priv
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-09-11
+  Last Published: 
2018-09-12
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/dependencies.html
--
diff --git a/dependencies.html b/dependencies.html
index 530252d..57d0669 100644
---

[29/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html
index 7d49582..01861a7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html
@@ -980,175 +980,177 @@
 972FileSystem outputFs = 
FileSystem.get(outputRoot.toUri(), destConf);
 973LOG.debug("outputFs=" + 
outputFs.getUri().toString() + " outputRoot=" + outputRoot.toString());
 974
-975boolean skipTmp = 
conf.getBoolean(CONF_SKIP_TMP, false);
-976
-977Path snapshotDir = 
SnapshotDescriptionUtils.getCompletedSnapshotDir(snapshotName, inputRoot);
-978Path snapshotTmpDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(targetName, outputRoot);
-979Path outputSnapshotDir = 
SnapshotDescriptionUtils.getCompletedSnapshotDir(targetName, outputRoot);
-980Path initialOutputSnapshotDir = 
skipTmp ? outputSnapshotDir : snapshotTmpDir;
-981
-982// Find the necessary directory which 
need to change owner and group
-983Path needSetOwnerDir = 
SnapshotDescriptionUtils.getSnapshotRootDir(outputRoot);
-984if (outputFs.exists(needSetOwnerDir)) 
{
-985  if (skipTmp) {
-986needSetOwnerDir = 
outputSnapshotDir;
-987  } else {
-988needSetOwnerDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(outputRoot);
-989if 
(outputFs.exists(needSetOwnerDir)) {
-990  needSetOwnerDir = 
snapshotTmpDir;
-991}
-992  }
-993}
-994
-995// Check if the snapshot already 
exists
-996if 
(outputFs.exists(outputSnapshotDir)) {
-997  if (overwrite) {
-998if 
(!outputFs.delete(outputSnapshotDir, true)) {
-999  System.err.println("Unable to 
remove existing snapshot directory: " + outputSnapshotDir);
-1000  return 1;
-1001}
-1002  } else {
-1003System.err.println("The snapshot 
'" + targetName +
-1004  "' already exists in the 
destination: " + outputSnapshotDir);
-1005return 1;
-1006  }
-1007}
-1008
-1009if (!skipTmp) {
-1010  // Check if the snapshot already 
in-progress
-1011  if 
(outputFs.exists(snapshotTmpDir)) {
-1012if (overwrite) {
-1013  if 
(!outputFs.delete(snapshotTmpDir, true)) {
-1014System.err.println("Unable 
to remove existing snapshot tmp directory: "+snapshotTmpDir);
-1015return 1;
-1016  }
-1017} else {
-1018  System.err.println("A snapshot 
with the same name '"+ targetName +"' may be in-progress");
-1019  System.err.println("Please 
check "+snapshotTmpDir+". If the snapshot has completed, ");
-1020  System.err.println("consider 
removing "+snapshotTmpDir+" by using the -overwrite option");
-1021  return 1;
-1022}
-1023  }
-1024}
-1025
-1026// Step 1 - Copy 
fs1:/.snapshot/ to  fs2:/.snapshot/.tmp/
-1027// The snapshot references must be 
copied before the hfiles otherwise the cleaner
-1028// will remove them because they are 
unreferenced.
-1029List travesedPaths = new 
ArrayList<>();
-1030boolean copySucceeded = false;
-1031try {
-1032  LOG.info("Copy Snapshot Manifest 
from " + snapshotDir + " to " + initialOutputSnapshotDir);
-1033  travesedPaths =
-1034  
FSUtils.copyFilesParallel(inputFs, snapshotDir, outputFs, 
initialOutputSnapshotDir, conf,
-1035  
conf.getInt(CONF_COPY_MANIFEST_THREADS, DEFAULT_COPY_MANIFEST_THREADS));
-1036  copySucceeded = true;
-1037} catch (IOException e) {
-1038  throw new 
ExportSnapshotException("Failed to copy the snapshot directory: from=" +
-1039snapshotDir + " to=" + 
initialOutputSnapshotDir, e);
-1040} finally {
-1041  if (copySucceeded) {
-1042if (filesUser != null || 
filesGroup != null) {
-1043  LOG.warn((filesUser == null ? 
"" : "Change the owner of " + needSetOwnerDir + " to "
-1044  + filesUser)
-1045  + (filesGroup == null ? "" 
: ", Change the group of " + needSetOwnerDir + " to "
-1046  + filesGroup));
-1047  setOwnerParallel(outputFs, 
filesUser, filesGroup, conf, travesedPaths);
-1048}
-1049if (filesMode > 0) {
-1050  LOG.warn("Change the 
permission of " + needSetOwnerDir + " to " + filesMode);
-1051  
setPermissionParallel(outputFs, (short)filesMode, travesedPaths, conf);
-1052}
-1053  }
-1054}
-1055
-1056// Write a new .snapshotinfo if the 
target name is different from

[43/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.html 
b/devapidocs/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.html
index 1aaae9e..65251a8 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.html
@@ -123,7 +123,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public abstract class TakeSnapshotHandler
+public abstract class TakeSnapshotHandler
 extends EventHandler
 implements SnapshotSentinel, ForeignExceptionSnare
 A handler for taking snapshots from the master.
@@ -158,33 +158,33 @@ implements finished 
 
 
-protected 
org.apache.hadoop.fs.FileSystem
-fs 
-
-
 protected TableDescriptor
 htd 
 
-
+
 private static org.slf4j.Logger
 LOG 
 
-
+
 protected MasterServices
 master 
 
-
+
 protected MetricsSnapshot
 metricsSnapshot 
 
-
+
 protected ForeignExceptionDispatcher
 monitor 
 
-
+
 protected org.apache.hadoop.fs.Path
 rootDir 
 
+
+protected 
org.apache.hadoop.fs.FileSystem
+rootFs 
+
 
 protected 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription
 snapshot 
@@ -221,6 +221,10 @@ implements protected org.apache.hadoop.fs.Path
 workingDir 
 
+
+protected 
org.apache.hadoop.fs.FileSystem
+workingDirFs 
+
 
 
 
@@ -270,10 +274,11 @@ implements 
 void
-completeSnapshot(org.apache.hadoop.fs.Path snapshotDir,
+completeSnapshot(org.apache.hadoop.fs.Path snapshotDir,
 org.apache.hadoop.fs.Path workingDir,
-org.apache.hadoop.fs.FileSystem fs)
-Reset the manager to allow another snapshot to proceed
+org.apache.hadoop.fs.FileSystem fs,
+org.apache.hadoop.fs.FileSystem workingDirFs)
+Reset the manager to allow another snapshot to 
proceed.
 
 
 
@@ -385,7 +390,7 @@ implements 
 
 LOG
-private static final org.slf4j.Logger LOG
+private static final org.slf4j.Logger LOG
 
 
 
@@ -394,7 +399,7 @@ implements 
 
 finished
-private volatile boolean finished
+private volatile boolean finished
 
 
 
@@ -403,7 +408,7 @@ implements 
 
 master
-protected final MasterServices master
+protected final MasterServices master
 
 
 
@@ -412,7 +417,7 @@ implements 
 
 metricsSnapshot
-protected final MetricsSnapshot metricsSnapshot
+protected final MetricsSnapshot metricsSnapshot
 
 
 
@@ -421,7 +426,7 @@ implements 
 
 snapshot
-protected 
final org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription
 snapshot
+protected 
final org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription
 snapshot
 
 
 
@@ -430,16 +435,25 @@ implements 
 
 conf
-protected final org.apache.hadoop.conf.Configuration conf
+protected final org.apache.hadoop.conf.Configuration conf
 
 
-
+
 
 
 
 
-fs
-protected final org.apache.hadoop.fs.FileSystem fs
+rootFs
+protected final org.apache.hadoop.fs.FileSystem rootFs
+
+
+
+
+
+
+
+workingDirFs
+protected final org.apache.hadoop.fs.FileSystem workingDirFs
 
 
 
@@ -448,7 +462,7 @@ implements 
 
 rootDir
-protected final org.apache.hadoop.fs.Path rootDir
+protected final org.apache.hadoop.fs.Path rootDir
 
 
 
@@ -457,7 +471,7 @@ implements 
 
 snapshotDir
-private final org.apache.hadoop.fs.Path snapshotDir
+private final org.apache.hadoop.fs.Path snapshotDir
 
 
 
@@ -466,7 +480,7 @@ implements 
 
 workingDir
-protected final org.apache.hadoop.fs.Path workingDir
+protected final org.apache.hadoop.fs.Path workingDir
 
 
 
@@ -475,7 +489,7 @@ implements 
 
 verifier
-private final MasterSnapshotVerifier verifier
+private final MasterSnapshotVerifier verifier
 
 
 
@@ -484,7 +498,7 @@ implements 
 
 monitor
-protected final ForeignExceptionDispatcher monitor
+protected final ForeignExceptionDispatcher monitor
 
 
 
@@ -493,7 +507,7 @@ implements 
 
 tableLock
-protected final LockManager.MasterLock tableLock
+protected final LockManager.MasterLock tableLock
 
 
 
@@ -502,7 +516,7 @@ implements 
 
 status
-protected final MonitoredTask status
+protected final MonitoredTask status
 
 
 
@@ -511,7 +525,7 @@ implements 
 
 snapshotTable
-protected final TableName snapshotTable
+protected final TableName snapshotTable
 
 
 
@@ -520,7 +534,7 @@ implements 
 
 snapshotManifest
-protected final SnapshotManifest snapshotManifest
+protected final SnapshotManifest snapshotManifest
 
 
 
@@ -529,7 +543,7 @@ implements 
 
 snapshotManager
-protected final SnapshotManager 
snapshotManager
+protected final SnapshotManager 
snapshotManager
 
 
 
@@ -538,7 +552,7 @@ implements 
 
 htd
-protected TableDescriptor htd
+protected TableDescriptor htd
 
 
 
@@ -555,13 +569,19 @@ implements 
 
 TakeSnapshotHandler
-public TakeSnapshotHandler(org.apache.hadoop.hbase.shaded.protobuf

[46/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index a34a96a..5bc4312 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Checkstyle Results
 
@@ -281,10 +281,10 @@
  Warnings
  Errors
 
-3735
+3739
 0
 0
-15232
+15225
 
 Files
 
@@ -5207,7 +5207,7 @@
 org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java
 0
 0
-2
+1
 
 org/apache/hadoop/hbase/master/snapshot/SnapshotFileCache.java
 0
@@ -7927,7 +7927,7 @@
 org/apache/hadoop/hbase/rest/TestTableScan.java
 0
 0
-18
+12
 
 org/apache/hadoop/hbase/rest/VersionResource.java
 0
@@ -9806,19 +9806,19 @@
 caseIndent: "2"
 basicOffset: "2"
 lineWrappingIndentation: "2"
-4835
+4829
  Error
 
 javadoc
 http://checkstyle.sourceforge.net/config_javadoc.html#JavadocTagContinuationIndentation";>JavadocTagContinuationIndentation
 
 offset: "2"
-733
+734
  Error
 
 
 http://checkstyle.sourceforge.net/config_javadoc.html#NonEmptyAtclauseDescription";>NonEmptyAtclauseDescription
-3540
+3539
  Error
 
 misc
@@ -9836,7 +9836,7 @@
 
 max: "100"
 ignorePattern: "^package.*|^import.*|a 
href|href|http://|https://|ftp://|org.apache.thrift.|com.google.protobuf.|hbase.protobuf.generated"
-1487
+1486
  Error
 
 
@@ -19148,7 +19148,7 @@
 
  Error
 javadoc
-NonEmptyAtclauseDescription
+JavadocTagContinuationIndentation
 Javadoc comment at column 0 has parse error. Details: no viable 
alternative at input '   *' while parsing JAVADOC_TAG
 117
 
@@ -68407,32 +68407,26 @@
 Line
 
  Error
-sizes
-LineLength
-Line is longer than 100 characters (found 102).
-92
-
- Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-196
+197
 
 org/apache/hadoop/hbase/master/snapshot/SnapshotFileCache.java
 
-
+
 Severity
 Category
 Rule
 Message
 Line
-
+
  Error
 sizes
 LineLength
 Line is longer than 100 characters (found 101).
 133
-
+
  Error
 blocks
 NeedBraces
@@ -68441,13 +68435,13 @@
 
 org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java
 
-
+
 Severity
 Category
 Rule
 Message
 Line
-
+
  Error
 imports
 ImportOrder
@@ -68456,190 +68450,190 @@
 
 org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java
 
-
+
 Severity
 Category
 Rule
 Message
 Line
-
+
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
 218
-
+
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
 289
-
+
  Error
 sizes
 LineLength
 Line is longer than 100 characters (found 110).
 293
-
+
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
 331
-
+
  Error
 sizes
 LineLength
 Line is longer than 100 characters (found 102).
 406
-
+
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-549
-
+553
+
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-638
-
+642
+
  Error
 javadoc
 JavadocTagContinuationIndentation
 Line continuation have incorrect indentation level, expected level should 
be 2.
-641
-
- Error
-javadoc
-NonEmptyAtclauseDescription
-At-clause should have a non-empty description.
-691
+645
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-760
+695
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-762
+764
 
  Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-813
+766
 
  Error
+javadoc
+NonEmptyAtclauseDescription
+At-clause should have a non-empty description.
+817
+
+ Error
 blocks
 LeftCurly
 '{' at column 7 should be on the previous line.
-971
-
+975
+
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-999
-
+1003
+
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1054
-
+1058
+
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1058
+1062
 
 org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java
 
-
+
 Severity
 Category
 Rule
 Message
 Line
-
+
  Error
 sizes
 LineLength
 Line is longer than 100 characters (found 103).
-64
-
+68
+
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-191
-
+205
+
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-275
+304
 
 org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java
 
-
+
 Severity
 Category
 Rule
 Message
 Line
-
+
  Error
 sizes
 LineLength
 Line is longer than 100 characters (found 101).
 93
-
+
  Error
 sizes
 LineLength
 Line is longer than 100 characters (found 103).
 126
-
+
  Error
 sizes
 LineLength
 Line is longer than 100 characters (found 104).
 138
-
+
  Error
 sizes
 LineLength
 Line is longer than 100 characters (found 104).
-163
-
+164
+
  Error
 sizes
 LineLength
 Line is longer than 100 characters (found 104).
-189
-
+191
+
  Error
 indentation
 Indentation
 'meth

[42/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
index 22fcd51..231636d 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":9,"i13":9,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":9,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":9,"i13":9,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":9,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -569,172 +569,176 @@ extends getSyncMetrics() 
 
 
+(package private) 
org.apache.hadoop.fs.Path
+getWalArchiveDir() 
+
+
 org.apache.hadoop.fs.Path
 getWALDir() 
 
-
+
 private ProcedureWALFile
 initOldLog(org.apache.hadoop.fs.FileStatus logFile,
   org.apache.hadoop.fs.Path walArchiveDir)
 Loads given log file and it's tracker.
 
 
-
+
 private long
 initOldLogs(org.apache.hadoop.fs.FileStatus[] logFiles) 
 
-
+
 private void
 initTrackerFromOldLogs()
 If last log's tracker is not null, use it as storeTracker.
 
 
-
+
 void
 insert(Procedure[] procs)
 Serialize a set of new procedures.
 
 
-
+
 void
 insert(Procedure proc,
   Procedure[] subprocs)
 When a procedure is submitted to the executor insert(proc, 
null) will be called.
 
 
-
+
 private boolean
 isSyncAborted() 
 
-
+
 void
 load(ProcedureStore.ProcedureLoader loader)
 Load the Procedures in the store.
 
 
-
+
 static void
 main(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String[] args)
 Parses a directory of WALs building up ProcedureState.
 
 
-
+
 private void
 periodicRoll() 
 
-
+
 protected void
 periodicRollForTesting() 
 
-
+
 private long
 pushData(WALProcedureStore.PushType type,
 ByteSlot slot,
 long procId,
 long[] subProcIds) 
 
-
+
 void
 recoverLease()
 Acquire the lease for the procedure store.
 
 
-
+
 private void
 releaseSlot(ByteSlot slot) 
 
-
+
 private void
 removeAllLogs(long lastLogId)
 Remove all logs with logId <= lastLogId.
 
 
-
+
 private void
 removeInactiveLogs() 
 
-
+
 protected void
 removeInactiveLogsForTesting() 
 
-
+
 private boolean
 removeLogFile(ProcedureWALFile log,
  org.apache.hadoop.fs.Path walArchiveDir) 
 
-
+
 private boolean
 rollWriter() 
 
-
+
 (package private) boolean
 rollWriter(long logId) 
 
-
+
 protected boolean
 rollWriterForTesting() 
 
-
+
 private boolean
 rollWriterWithRetries() 
 
-
+
 private void
 sendStopSignal() 
 
-
+
 int
 setRunningProcedureCount(int count)
 Set the number of procedure running.
 
 
-
+
 void
 start(int numSlots)
 Start/Open the procedure store
 
 
-
+
 void
 stop(boolean abort)
 Stop/Close the procedure store
 
 
-
+
 private void
 syncLoop() 
 
-
+
 private long
 syncSlots() 
 
-
+
 protected long
 syncSlots(org.apache.hadoop.fs.FSDataOutputStream stream,
  ByteSlot[] slots,
  int offset,
  int count) 
 
-
+
 protected void
 syncStream(org.apache.hadoop.fs.FSDataOutputStream stream) 
 
-
+
 private void
 tryCleanupLogsOnLoad() 
 
-
+
 private boolean
 tryRollWriter() 
 
-
+
 void
 update(Procedure proc)
 The specified procedure was executed,
  and the new state should be written to the store.
 
 
-
+
 private void
 updateStoreTracker(WALProcedureStore.PushType type,
   long procId,
@@ -1439,7 +1443,7 @@ extends 
 
 WALS_PATH_FILTER
-private static final org.apache.hadoop.fs.PathFilter WALS_PATH_FILTER
+private static final org.apache.hadoop.fs.PathFilter WALS_PATH_FILTER
 
 
 
@@ -1448,7 +1452,7 @@ extends 
 
 FILE_STATUS_ID_COMPARATOR
-private static final https://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is

[48/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/apache_hbase_reference_guide.pdf
--
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index e746450..b4ceedc 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,16 +5,16 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.15, based on Prawn 2.2.2)
 /Producer (Apache HBase Team)
-/ModDate (D:20180911143207+00'00')
-/CreationDate (D:20180911145024+00'00')
+/ModDate (D:20180912143308+00'00')
+/CreationDate (D:20180912145001+00'00')
 >>
 endobj
 2 0 obj
 << /Type /Catalog
 /Pages 3 0 R
 /Names 28 0 R
-/Outlines 5008 0 R
-/PageLabels 5259 0 R
+/Outlines 5009 0 R
+/PageLabels 5260 0 R
 /PageMode /UseOutlines
 /OpenAction [7 0 R /FitH 842.89]
 /ViewerPreferences << /DisplayDocTitle true
@@ -24,7 +24,7 @@ endobj
 3 0 obj
 << /Type /Pages
 /Count 787
-/Kids [7 0 R 12 0 R 14 0 R 16 0 R 18 0 R 20 0 R 22 0 R 24 0 R 26 0 R 46 0 R 49 
0 R 52 0 R 56 0 R 63 0 R 65 0 R 69 0 R 71 0 R 73 0 R 80 0 R 83 0 R 85 0 R 91 0 
R 94 0 R 96 0 R 98 0 R 105 0 R 112 0 R 117 0 R 119 0 R 135 0 R 140 0 R 148 0 R 
157 0 R 165 0 R 169 0 R 178 0 R 189 0 R 193 0 R 195 0 R 199 0 R 208 0 R 217 0 R 
225 0 R 234 0 R 239 0 R 248 0 R 256 0 R 265 0 R 278 0 R 285 0 R 295 0 R 303 0 R 
311 0 R 318 0 R 327 0 R 333 0 R 339 0 R 346 0 R 354 0 R 362 0 R 373 0 R 386 0 R 
394 0 R 401 0 R 409 0 R 417 0 R 426 0 R 436 0 R 444 0 R 450 0 R 459 0 R 471 0 R 
481 0 R 488 0 R 496 0 R 503 0 R 512 0 R 520 0 R 524 0 R 530 0 R 535 0 R 539 0 R 
555 0 R 566 0 R 570 0 R 585 0 R 590 0 R 595 0 R 597 0 R 599 0 R 602 0 R 604 0 R 
606 0 R 614 0 R 620 0 R 623 0 R 627 0 R 636 0 R 647 0 R 655 0 R 659 0 R 663 0 R 
665 0 R 675 0 R 690 0 R 697 0 R 708 0 R 718 0 R 729 0 R 741 0 R 761 0 R 776 0 R 
783 0 R 790 0 R 796 0 R 799 0 R 803 0 R 807 0 R 810 0 R 813 0 R 815 0 R 818 0 R 
822 0 R 824 0 R 828 0 R 834 0 R 839 0 R 
 843 0 R 846 0 R 852 0 R 854 0 R 858 0 R 866 0 R 868 0 R 871 0 R 874 0 R 877 0 
R 880 0 R 894 0 R 902 0 R 913 0 R 924 0 R 930 0 R 940 0 R 951 0 R 954 0 R 958 0 
R 961 0 R 966 0 R 975 0 R 983 0 R 988 0 R 992 0 R 997 0 R 1001 0 R 1003 0 R 
1018 0 R 1029 0 R 1034 0 R 1041 0 R 1044 0 R 1052 0 R 1060 0 R 1065 0 R 1070 0 
R 1075 0 R 1077 0 R 1079 0 R 1081 0 R 1091 0 R 1099 0 R 1103 0 R 1110 0 R 1117 
0 R 1125 0 R 1129 0 R 1135 0 R 1140 0 R 1148 0 R 1152 0 R 1157 0 R 1159 0 R 
1165 0 R 1173 0 R 1179 0 R 1186 0 R 1197 0 R 1201 0 R 1203 0 R 1205 0 R 1209 0 
R 1212 0 R 1217 0 R 1220 0 R 1232 0 R 1236 0 R 1242 0 R 1250 0 R 1255 0 R 1259 
0 R 1263 0 R 1265 0 R 1268 0 R 1271 0 R 1274 0 R 1278 0 R 1282 0 R 1286 0 R 
1291 0 R 1295 0 R 1298 0 R 1300 0 R 1310 0 R 1312 0 R 1317 0 R 1330 0 R 1334 0 
R 1340 0 R 1342 0 R 1353 0 R 1356 0 R 1362 0 R 1370 0 R 1373 0 R 1380 0 R 1387 
0 R 1390 0 R 1392 0 R 1401 0 R 1403 0 R 1405 0 R 1408 0 R 1410 0 R 1412 0 R 
1414 0 R 1416 0 R 1419 0 R 1423 0 R 1428 0 R 1430 0 R 1432 0 
 R 1434 0 R 1439 0 R 1446 0 R 1452 0 R 1455 0 R 1457 0 R 1460 0 R 1464 0 R 1468 
0 R 1471 0 R 1473 0 R 1475 0 R 1478 0 R 1483 0 R 1489 0 R 1497 0 R 1511 0 R 
1525 0 R 1528 0 R 1533 0 R 1546 0 R 1551 0 R 1566 0 R 1574 0 R 1578 0 R 1587 0 
R 1602 0 R 1614 0 R 1617 0 R 1631 0 R 1639 0 R 1644 0 R 1655 0 R 1660 0 R 1666 
0 R 1672 0 R 1684 0 R 1687 0 R 1696 0 R 1699 0 R 1708 0 R 1713 0 R 1718 0 R 
1722 0 R 1735 0 R 1737 0 R 1743 0 R 1749 0 R 1752 0 R 1760 0 R 1768 0 R 1772 0 
R 1774 0 R 1776 0 R 1788 0 R 1794 0 R 1803 0 R 1810 0 R 1823 0 R 1829 0 R 1835 
0 R 1846 0 R 1852 0 R 1857 0 R 1861 0 R 1865 0 R 1868 0 R 1873 0 R 1878 0 R 
1884 0 R 1889 0 R 1893 0 R 1902 0 R 1908 0 R 1911 0 R 1915 0 R 1924 0 R 1931 0 
R 1937 0 R 1944 0 R 1948 0 R 1951 0 R 1956 0 R 1961 0 R 1967 0 R 1969 0 R 1971 
0 R 1974 0 R 1985 0 R 1988 0 R 1995 0 R 2003 0 R 2008 0 R 2011 0 R 2016 0 R 
2018 0 R 2021 0 R 2026 0 R 2029 0 R 2031 0 R 2034 0 R 2037 0 R 2040 0 R 2050 0 
R 2055 0 R 2060 0 R 2062 0 R 2070 0 R 2077 0 R 2084 0 R 2090 
 0 R 2095 0 R 2097 0 R 2106 0 R 2116 0 R 2126 0 R 2132 0 R 2139 0 R 2141 0 R 
2146 0 R 2148 0 R 2150 0 R 2154 0 R 2157 0 R 2160 0 R 2165 0 R 2169 0 R 2180 0 
R 2183 0 R 2186 0 R 2190 0 R 2194 0 R 2197 0 R 2199 0 R 2204 0 R 2207 0 R 2209 
0 R 2214 0 R 2224 0 R 2226 0 R 2228 0 R 2230 0 R 2232 0 R 2235 0 R 2237 0 R 
2239 0 R 2242 0 R 2244 0 R 2246 0 R 2250 0 R 2255 0 R 2264 0 R 2266 0 R 2268 0 
R 2274 0 R 2276 0 R 2281 0 R 2283 0 R 2285 0 R 2292 0 R 2297 0 R 2301 0 R 2306 
0 R 2310 0 R 2312 0 R 2314 0 R 2318 0 R 2321 0 R 2323 0 R 2325 0 R 2329 0 R 
2331 0 R 2334 0 R 2336 0 R 2338 0 R 2340 0 R 2347 0 R 2350 0 R 2355 0 R 2357 0 
R 2359 0 R 2361 0 R 2363 0 R 2371 0 R 2382 0 R 2396 0 R 2407 0 R 2411 0 R 2416 
0 R 2420 0 R 2423 0 R 2428 0 R 2434 0 R 2436 0 R 2439 0 R 2441 0 R 2443 0 R 
2445 0 R 2450 0 R 2452 0 R 2465 0 R 2468 0 R 2476 0 R 2482 0 R 2494 0 R 2508 0 
R 2521 0 R 2540 0 R 2542 0 R 2544 0 R 2548 0 R 2566 0 R 2572 0 R 2584 0 R 2588 
0 R 2592 0 R 2601 0 R 2613 0 R 2618 0 R 2628

[32/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
index bbf8130..566f410 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
@@ -4276,7 +4276,7 @@
 4268  public void 
addRegionToSnapshot(SnapshotDescription desc,
 4269  ForeignExceptionSnare exnSnare) 
throws IOException {
 4270Path rootDir = 
FSUtils.getRootDir(conf);
-4271Path snapshotDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir);
+4271Path snapshotDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir, conf);
 4272
 4273SnapshotManifest manifest = 
SnapshotManifest.create(conf, getFilesystem(),
 4274snapshotDir, desc, 
exnSnare);

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
index bbf8130..566f410 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
@@ -4276,7 +4276,7 @@
 4268  public void 
addRegionToSnapshot(SnapshotDescription desc,
 4269  ForeignExceptionSnare exnSnare) 
throws IOException {
 4270Path rootDir = 
FSUtils.getRootDir(conf);
-4271Path snapshotDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir);
+4271Path snapshotDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir, conf);
 4272
 4273SnapshotManifest manifest = 
SnapshotManifest.create(conf, getFilesystem(),
 4274snapshotDir, desc, 
exnSnare);

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
index bbf8130..566f410 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
@@ -4276,7 +4276,7 @@
 4268  public void 
addRegionToSnapshot(SnapshotDescription desc,
 4269  ForeignExceptionSnare exnSnare) 
throws IOException {
 4270Path rootDir = 
FSUtils.getRootDir(conf);
-4271Path snapshotDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir);
+4271Path snapshotDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir, conf);
 4272
 4273SnapshotManifest manifest = 
SnapshotManifest.create(conf, getFilesystem(),
 4274snapshotDir, desc, 
exnSnare);

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
index bbf8130..566f410 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
@@ -4276,7 +4276,7 @@
 4268  public void 
addRegionToSnapshot(SnapshotDescription desc,
 4269  ForeignExceptionSnare exnSnare) 
throws IOException {
 4270Path rootDir = 
FSUtils.getRootDir(conf);
-4271Path snapshotDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir);
+4271Path snapshotDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir, conf);
 4272
 4273SnapshotManifest manifest = 
SnapshotManifest.create(conf, getFilesystem(),
 4274snapshotDir, desc, 
exnSnare);

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResu

[33/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
index df746ae..d27bef3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
@@ -208,1157 +208,1163 @@
 200  throws IOException {
 201this(conf,
 202new 
Path(CommonFSUtils.getWALRootDir(conf), MASTER_PROCEDURE_LOGDIR),
-203new 
Path(CommonFSUtils.getRootDir(conf), HConstants.HREGION_OLDLOGDIR_NAME), 
leaseRecovery);
-204  }
-205
-206  @VisibleForTesting
-207  public WALProcedureStore(final 
Configuration conf, final Path walDir, final Path walArchiveDir,
-208  final LeaseRecovery leaseRecovery) 
throws IOException {
-209this.conf = conf;
-210this.leaseRecovery = leaseRecovery;
-211this.walDir = walDir;
-212this.walArchiveDir = walArchiveDir;
-213this.fs = 
walDir.getFileSystem(conf);
-214this.enforceStreamCapability = 
conf.getBoolean(CommonFSUtils.UNSAFE_STREAM_CAPABILITY_ENFORCE, true);
-215
-216// Create the log directory for the 
procedure store
-217if (!fs.exists(walDir)) {
-218  if (!fs.mkdirs(walDir)) {
-219throw new IOException("Unable to 
mkdir " + walDir);
-220  }
-221}
-222// Now that it exists, set the log 
policy
-223String storagePolicy =
-224
conf.get(HConstants.WAL_STORAGE_POLICY, 
HConstants.DEFAULT_WAL_STORAGE_POLICY);
-225CommonFSUtils.setStoragePolicy(fs, 
walDir, storagePolicy);
-226
-227// Create archive dir up front. 
Rename won't work w/o it up on HDFS.
-228if (this.walArchiveDir != null 
&& !this.fs.exists(this.walArchiveDir)) {
-229  if 
(this.fs.mkdirs(this.walArchiveDir)) {
-230if (LOG.isDebugEnabled()) {
-231  LOG.debug("Created Procedure 
Store WAL archive dir " + this.walArchiveDir);
-232}
-233  } else {
-234LOG.warn("Failed create of " + 
this.walArchiveDir);
-235  }
-236}
-237  }
-238
-239  @Override
-240  public void start(int numSlots) throws 
IOException {
-241if (!setRunning(true)) {
-242  return;
-243}
-244
-245// Init buffer slots
-246loading.set(true);
-247runningProcCount = numSlots;
-248syncMaxSlot = numSlots;
-249slots = new ByteSlot[numSlots];
-250slotsCache = new 
LinkedTransferQueue();
-251while (slotsCache.size() < 
numSlots) {
-252  slotsCache.offer(new ByteSlot());
-253}
-254
-255// Tunings
-256walCountWarnThreshold =
-257  
conf.getInt(WAL_COUNT_WARN_THRESHOLD_CONF_KEY, 
DEFAULT_WAL_COUNT_WARN_THRESHOLD);
-258maxRetriesBeforeRoll =
-259  
conf.getInt(MAX_RETRIES_BEFORE_ROLL_CONF_KEY, 
DEFAULT_MAX_RETRIES_BEFORE_ROLL);
-260maxSyncFailureRoll = 
conf.getInt(MAX_SYNC_FAILURE_ROLL_CONF_KEY, DEFAULT_MAX_SYNC_FAILURE_ROLL);
-261waitBeforeRoll = 
conf.getInt(WAIT_BEFORE_ROLL_CONF_KEY, DEFAULT_WAIT_BEFORE_ROLL);
-262rollRetries = 
conf.getInt(ROLL_RETRIES_CONF_KEY, DEFAULT_ROLL_RETRIES);
-263rollThreshold = 
conf.getLong(ROLL_THRESHOLD_CONF_KEY, DEFAULT_ROLL_THRESHOLD);
-264periodicRollMsec = 
conf.getInt(PERIODIC_ROLL_CONF_KEY, DEFAULT_PERIODIC_ROLL);
-265syncWaitMsec = 
conf.getInt(SYNC_WAIT_MSEC_CONF_KEY, DEFAULT_SYNC_WAIT_MSEC);
-266useHsync = 
conf.getBoolean(USE_HSYNC_CONF_KEY, DEFAULT_USE_HSYNC);
-267
-268// WebUI
-269syncMetricsQueue = new 
CircularFifoQueue(
-270  
conf.getInt(STORE_WAL_SYNC_STATS_COUNT, DEFAULT_SYNC_STATS_COUNT));
-271
-272// Init sync thread
-273syncThread = new 
Thread("WALProcedureStoreSyncThread") {
-274  @Override
-275  public void run() {
-276try {
-277  syncLoop();
-278} catch (Throwable e) {
-279  LOG.error("Got an exception 
from the sync-loop", e);
-280  if (!isSyncAborted()) {
-281sendAbortProcessSignal();
-282  }
-283}
-284  }
-285};
-286syncThread.start();
-287  }
-288
-289  @Override
-290  public void stop(final boolean abort) 
{
-291if (!setRunning(false)) {
-292  return;
-293}
-294
-295LOG.info("Stopping the WAL Procedure 
Store, isAbort=" + abort +
-296  (isSyncAborted() ? " (self 
aborting)" : ""));
-297sendStopSignal();
-298if (!isSyncAborted()) {
-299  try {
-300while (syncThread.isAlive()) {
-301  sendStopSignal();
-302  syncThread.join(250);
-303}
-304  } catch (InterruptedException e) 
{
-305LOG.warn("join interrupted", 
e);
-306
Thread.currentThread().interrupt();
-307  }
-308}
-309
-310// Close the writer
-311clo

[14/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.TestSequentialProcedure.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.TestSequentialProcedure.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.TestSequentialProcedure.html
index a18e598..44f63ea 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.TestSequentialProcedure.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.TestSequentialProcedure.html
@@ -906,82 +906,98 @@
 898assertEquals("WALs=" + 
procStore.getActiveLogs(), 1, procStore.getActiveLogs().size());
 899  }
 900
-901  private LoadCounter 
restartAndAssert(long maxProcId, long runnableCount,
-902  int completedCount, int 
corruptedCount) throws Exception {
-903return 
ProcedureTestingUtility.storeRestartAndAssert(procStore, maxProcId,
-904  runnableCount, completedCount, 
corruptedCount);
-905  }
-906
-907  private void corruptLog(final 
FileStatus logFile, final long dropBytes)
-908  throws IOException {
-909assertTrue(logFile.getLen() > 
dropBytes);
-910LOG.debug("corrupt log " + 
logFile.getPath() +
-911  " size=" + logFile.getLen() 
+ " drop=" + dropBytes);
-912Path tmpPath = new Path(testDir, 
"corrupted.log");
-913InputStream in = 
fs.open(logFile.getPath());
-914OutputStream out =  
fs.create(tmpPath);
-915IOUtils.copyBytes(in, out, 
logFile.getLen() - dropBytes, true);
-916if (!fs.rename(tmpPath, 
logFile.getPath())) {
-917  throw new IOException("Unable to 
rename");
-918}
-919  }
-920
-921  private void 
verifyProcIdsOnRestart(final Set procIds) throws Exception {
-922LOG.debug("expected: " + procIds);
-923LoadCounter loader = new 
LoadCounter();
-924storeRestart(loader);
-925assertEquals(procIds.size(), 
loader.getLoadedCount());
-926assertEquals(0, 
loader.getCorruptedCount());
-927  }
-928
-929  private void assertEmptyLogDir() {
-930try {
-931  FileStatus[] status = 
fs.listStatus(logDir);
-932  assertTrue("expected empty 
state-log dir", status == null || status.length == 0);
-933} catch (FileNotFoundException e) {
-934  fail("expected the state-log dir to 
be present: " + logDir);
-935} catch (IOException e) {
-936  fail("got en exception on state-log 
dir list: " + e.getMessage());
-937}
-938  }
-939
-940  public static class 
TestSequentialProcedure extends SequentialProcedure {
-941private static long seqid = 0;
-942
-943public TestSequentialProcedure() {
-944  setProcId(++seqid);
-945}
-946
-947@Override
-948protected Procedure[] execute(Void 
env) { return null; }
-949
-950@Override
-951protected void rollback(Void env) { 
}
-952
-953@Override
-954protected boolean abort(Void env) { 
return false; }
+901  @Test
+902  public void 
testWALDirAndWALArchiveDir() throws IOException {
+903Configuration conf = 
htu.getConfiguration();
+904procStore = 
createWALProcedureStore(conf);
+905
assertEquals(procStore.getFileSystem(), 
procStore.getWalArchiveDir().getFileSystem(conf));
+906  }
+907
+908  private WALProcedureStore 
createWALProcedureStore(Configuration conf) throws IOException {
+909return new WALProcedureStore(conf, 
new WALProcedureStore.LeaseRecovery() {
+910  @Override
+911  public void 
recoverFileLease(FileSystem fs, Path path) throws IOException {
+912// no-op
+913  }
+914});
+915  }
+916
+917  private LoadCounter 
restartAndAssert(long maxProcId, long runnableCount,
+918  int completedCount, int 
corruptedCount) throws Exception {
+919return 
ProcedureTestingUtility.storeRestartAndAssert(procStore, maxProcId,
+920  runnableCount, completedCount, 
corruptedCount);
+921  }
+922
+923  private void corruptLog(final 
FileStatus logFile, final long dropBytes)
+924  throws IOException {
+925assertTrue(logFile.getLen() > 
dropBytes);
+926LOG.debug("corrupt log " + 
logFile.getPath() +
+927  " size=" + logFile.getLen() 
+ " drop=" + dropBytes);
+928Path tmpPath = new Path(testDir, 
"corrupted.log");
+929InputStream in = 
fs.open(logFile.getPath());
+930OutputStream out =  
fs.create(tmpPath);
+931IOUtils.copyBytes(in, out, 
logFile.getLen() - dropBytes, true);
+932if (!fs.rename(tmpPath, 
logFile.getPath())) {
+933  throw new IOException("Unable to 
rename");
+934}
+935  }
+936
+937  private void 
verifyProcIdsOnRestart(final Set procIds) throws Exception {
+938LOG.debug("expected: " + procIds);
+939LoadCounter loader = new 
LoadCounter();
+940storeRestart(loader);
+941assertEquals(procIds.size()

[22/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.html
index 81c850b..1135952 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.html
@@ -77,131 +77,134 @@
 069
SnapshotRegionManifest.Builder, SnapshotRegionManifest.FamilyFiles.Builder> 
{
 070private final Configuration conf;
 071private final Path snapshotDir;
-072private final FileSystem fs;
+072private final FileSystem rootFs;
 073
-074public ManifestBuilder(final 
Configuration conf, final FileSystem fs, final Path snapshotDir) {
-075  this.snapshotDir = snapshotDir;
-076  this.conf = conf;
-077  this.fs = fs;
-078}
-079
-080@Override
-081public SnapshotRegionManifest.Builder 
regionOpen(final RegionInfo regionInfo) {
-082  SnapshotRegionManifest.Builder 
manifest = SnapshotRegionManifest.newBuilder();
-083  
manifest.setRegionInfo(ProtobufUtil.toRegionInfo(regionInfo));
-084  return manifest;
-085}
-086
-087@Override
-088public void regionClose(final 
SnapshotRegionManifest.Builder region) throws IOException {
-089  // we should ensure the snapshot 
dir exist, maybe it has been deleted by master
-090  // see HBASE-16464
-091  if (fs.exists(snapshotDir)) {
-092SnapshotRegionManifest manifest = 
region.build();
-093FSDataOutputStream stream = 
fs.create(getRegionManifestPath(snapshotDir, manifest));
-094try {
-095  manifest.writeTo(stream);
-096} finally {
-097  stream.close();
-098}
-099  } else {
-100LOG.warn("can't write manifest 
without parent dir, maybe it has been deleted by master?");
-101  }
-102}
-103
-104@Override
-105public 
SnapshotRegionManifest.FamilyFiles.Builder familyOpen(
-106final 
SnapshotRegionManifest.Builder region, final byte[] familyName) {
-107  
SnapshotRegionManifest.FamilyFiles.Builder family =
-108  
SnapshotRegionManifest.FamilyFiles.newBuilder();
-109  
family.setFamilyName(UnsafeByteOperations.unsafeWrap(familyName));
-110  return family;
-111}
-112
-113@Override
-114public void familyClose(final 
SnapshotRegionManifest.Builder region,
-115final 
SnapshotRegionManifest.FamilyFiles.Builder family) {
-116  
region.addFamilyFiles(family.build());
-117}
-118
-119@Override
-120public void storeFile(final 
SnapshotRegionManifest.Builder region,
-121final 
SnapshotRegionManifest.FamilyFiles.Builder family, final StoreFileInfo 
storeFile)
-122throws IOException {
-123  
SnapshotRegionManifest.StoreFile.Builder sfManifest =
-124
SnapshotRegionManifest.StoreFile.newBuilder();
-125  
sfManifest.setName(storeFile.getPath().getName());
-126  if (storeFile.isReference()) {
-127
sfManifest.setReference(storeFile.getReference().convert());
-128  }
-129  
sfManifest.setFileSize(storeFile.getReferencedFileStatus(fs).getLen());
-130  
family.addStoreFiles(sfManifest.build());
-131}
-132  }
-133
-134  static 
List loadRegionManifests(final Configuration 
conf,
-135  final Executor executor, final 
FileSystem fs, final Path snapshotDir,
-136  final SnapshotDescription desc, 
final int manifestSizeLimit) throws IOException {
-137FileStatus[] manifestFiles = 
FSUtils.listStatus(fs, snapshotDir, new PathFilter() {
-138  @Override
-139  public boolean accept(Path path) 
{
-140return 
path.getName().startsWith(SNAPSHOT_MANIFEST_PREFIX);
-141  }
-142});
-143
-144if (manifestFiles == null || 
manifestFiles.length == 0) return null;
-145
-146final 
ExecutorCompletionService completionService =
-147  new 
ExecutorCompletionService<>(executor);
-148for (final FileStatus st: 
manifestFiles) {
-149  completionService.submit(new 
Callable() {
-150@Override
-151public SnapshotRegionManifest 
call() throws IOException {
-152  FSDataInputStream stream = 
fs.open(st.getPath());
-153  CodedInputStream cin = 
CodedInputStream.newInstance(stream);
-154  
cin.setSizeLimit(manifestSizeLimit);
-155
-156  try {
-157return 
SnapshotRegionManifest.parseFrom(cin);
-158  } finally {
-159stream.close();
-160  }
-161}
-162  });
-163}
-164
-165
ArrayList regionsManifest = new 
ArrayList<>(manifestFiles.length);
-166try {
-167  for (int i = 0; i < 
manifestFiles.l

[18/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/org/apache/hadoop/hbase/client/package-tree.html
--
diff --git a/testdevapidocs/org/apache/hadoop/hbase/client/package-tree.html 
b/testdevapidocs/org/apache/hadoop/hbase/client/package-tree.html
index c297d21..4e2d341 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/client/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/client/package-tree.html
@@ -515,6 +515,12 @@
 
 
 org.apache.hadoop.hbase.client.TestSnapshotMetadata
+org.apache.hadoop.hbase.client.TestSnapshotTemporaryDirectory
+
+org.apache.hadoop.hbase.client.TestSnapshotDFSTemporaryDirectory
+org.apache.hadoop.hbase.client.TestSnapshotTemporaryDirectoryWithRegionReplicas
+
+
 org.apache.hadoop.hbase.client.TestSnapshotWithAcl.AccessReadAction 
(implements org.apache.hadoop.hbase.security.access.SecureTestUtil.AccessTestAction)
 org.apache.hadoop.hbase.client.TestSnapshotWithAcl.AccessWriteAction 
(implements org.apache.hadoop.hbase.security.access.SecureTestUtil.AccessTestAction)
 org.apache.hadoop.hbase.client.TestSplitOrMergeStatus

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/org/apache/hadoop/hbase/client/package-use.html
--
diff --git a/testdevapidocs/org/apache/hadoop/hbase/client/package-use.html 
b/testdevapidocs/org/apache/hadoop/hbase/client/package-use.html
index e89ffa5..683d0ad 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/client/package-use.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/client/package-use.html
@@ -227,6 +227,12 @@
 Test create/using/deleting snapshots from the client
 
 
+
+TestSnapshotTemporaryDirectory
+This class tests that the use of a temporary snapshot 
directory supports snapshot functionality
+ while the temporary directory is on a different file system than the root 
directory
+
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.html
 
b/testdevapidocs/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.html
index 3193c7d..3930687 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.html
@@ -439,7 +439,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 testCorruptedDataManifest
-public void testCorruptedDataManifest()
+public void testCorruptedDataManifest()
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 If there is a corrupted data manifest, it should throw out 
CorruptedSnapshotException,
  instead of an IOException
@@ -455,7 +455,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 testMissedTmpSnapshot
-public void testMissedTmpSnapshot()
+public void testMissedTmpSnapshot()
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 HBASE-16464
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/org/apache/hadoop/hbase/package-tree.html
--
diff --git a/testdevapidocs/org/apache/hadoop/hbase/package-tree.html 
b/testdevapidocs/org/apache/hadoop/hbase/package-tree.html
index 8b2cdec..d9e587f 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/package-tree.html
@@ -578,15 +578,15 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.PerformanceEvaluation.Counter
-org.apache.hadoop.hbase.IntegrationTestDDLMasterFailover.ACTION
-org.apache.hadoop.hbase.RESTApiClusterManager.RoleCommand
 org.apache.hadoop.hbase.ClusterManager.ServiceType
-org.apache.hadoop.hbase.RESTApiClusterManager.Service
-org.apache.hadoop.hbase.ResourceChecker.Phase
-org.apache.hadoop.hbase.IntegrationTestRegionReplicaPerf.Stat
 org.apache.hadoop.hbase.ScanPerformanceEvaluation.ScanCounter
+org.apache.hadoop.hbase.RESTApiClusterManager.Service
 org.apache.hadoop.hbase.HBaseClust

[34/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.SyncMetrics.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.SyncMetrics.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.SyncMetrics.html
index df746ae..d27bef3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.SyncMetrics.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.SyncMetrics.html
@@ -208,1157 +208,1163 @@
 200  throws IOException {
 201this(conf,
 202new 
Path(CommonFSUtils.getWALRootDir(conf), MASTER_PROCEDURE_LOGDIR),
-203new 
Path(CommonFSUtils.getRootDir(conf), HConstants.HREGION_OLDLOGDIR_NAME), 
leaseRecovery);
-204  }
-205
-206  @VisibleForTesting
-207  public WALProcedureStore(final 
Configuration conf, final Path walDir, final Path walArchiveDir,
-208  final LeaseRecovery leaseRecovery) 
throws IOException {
-209this.conf = conf;
-210this.leaseRecovery = leaseRecovery;
-211this.walDir = walDir;
-212this.walArchiveDir = walArchiveDir;
-213this.fs = 
walDir.getFileSystem(conf);
-214this.enforceStreamCapability = 
conf.getBoolean(CommonFSUtils.UNSAFE_STREAM_CAPABILITY_ENFORCE, true);
-215
-216// Create the log directory for the 
procedure store
-217if (!fs.exists(walDir)) {
-218  if (!fs.mkdirs(walDir)) {
-219throw new IOException("Unable to 
mkdir " + walDir);
-220  }
-221}
-222// Now that it exists, set the log 
policy
-223String storagePolicy =
-224
conf.get(HConstants.WAL_STORAGE_POLICY, 
HConstants.DEFAULT_WAL_STORAGE_POLICY);
-225CommonFSUtils.setStoragePolicy(fs, 
walDir, storagePolicy);
-226
-227// Create archive dir up front. 
Rename won't work w/o it up on HDFS.
-228if (this.walArchiveDir != null 
&& !this.fs.exists(this.walArchiveDir)) {
-229  if 
(this.fs.mkdirs(this.walArchiveDir)) {
-230if (LOG.isDebugEnabled()) {
-231  LOG.debug("Created Procedure 
Store WAL archive dir " + this.walArchiveDir);
-232}
-233  } else {
-234LOG.warn("Failed create of " + 
this.walArchiveDir);
-235  }
-236}
-237  }
-238
-239  @Override
-240  public void start(int numSlots) throws 
IOException {
-241if (!setRunning(true)) {
-242  return;
-243}
-244
-245// Init buffer slots
-246loading.set(true);
-247runningProcCount = numSlots;
-248syncMaxSlot = numSlots;
-249slots = new ByteSlot[numSlots];
-250slotsCache = new 
LinkedTransferQueue();
-251while (slotsCache.size() < 
numSlots) {
-252  slotsCache.offer(new ByteSlot());
-253}
-254
-255// Tunings
-256walCountWarnThreshold =
-257  
conf.getInt(WAL_COUNT_WARN_THRESHOLD_CONF_KEY, 
DEFAULT_WAL_COUNT_WARN_THRESHOLD);
-258maxRetriesBeforeRoll =
-259  
conf.getInt(MAX_RETRIES_BEFORE_ROLL_CONF_KEY, 
DEFAULT_MAX_RETRIES_BEFORE_ROLL);
-260maxSyncFailureRoll = 
conf.getInt(MAX_SYNC_FAILURE_ROLL_CONF_KEY, DEFAULT_MAX_SYNC_FAILURE_ROLL);
-261waitBeforeRoll = 
conf.getInt(WAIT_BEFORE_ROLL_CONF_KEY, DEFAULT_WAIT_BEFORE_ROLL);
-262rollRetries = 
conf.getInt(ROLL_RETRIES_CONF_KEY, DEFAULT_ROLL_RETRIES);
-263rollThreshold = 
conf.getLong(ROLL_THRESHOLD_CONF_KEY, DEFAULT_ROLL_THRESHOLD);
-264periodicRollMsec = 
conf.getInt(PERIODIC_ROLL_CONF_KEY, DEFAULT_PERIODIC_ROLL);
-265syncWaitMsec = 
conf.getInt(SYNC_WAIT_MSEC_CONF_KEY, DEFAULT_SYNC_WAIT_MSEC);
-266useHsync = 
conf.getBoolean(USE_HSYNC_CONF_KEY, DEFAULT_USE_HSYNC);
-267
-268// WebUI
-269syncMetricsQueue = new 
CircularFifoQueue(
-270  
conf.getInt(STORE_WAL_SYNC_STATS_COUNT, DEFAULT_SYNC_STATS_COUNT));
-271
-272// Init sync thread
-273syncThread = new 
Thread("WALProcedureStoreSyncThread") {
-274  @Override
-275  public void run() {
-276try {
-277  syncLoop();
-278} catch (Throwable e) {
-279  LOG.error("Got an exception 
from the sync-loop", e);
-280  if (!isSyncAborted()) {
-281sendAbortProcessSignal();
-282  }
-283}
-284  }
-285};
-286syncThread.start();
-287  }
-288
-289  @Override
-290  public void stop(final boolean abort) 
{
-291if (!setRunning(false)) {
-292  return;
-293}
-294
-295LOG.info("Stopping the WAL Procedure 
Store, isAbort=" + abort +
-296  (isSyncAborted() ? " (self 
aborting)" : ""));
-297sendStopSignal();
-298if (!isSyncAborted()) {
-299  try {
-300while (syncThread.isAlive()) {
-301  sendStopSignal();
-302  syncThread.join(250);
-303}
-304  } catch (InterruptedException e) 
{
-305LOG.warn("join interrupted", 
e);
-306
Thread.currentThread().interrupt();
-307  

[35/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.PushType.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.PushType.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.PushType.html
index df746ae..d27bef3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.PushType.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.PushType.html
@@ -208,1157 +208,1163 @@
 200  throws IOException {
 201this(conf,
 202new 
Path(CommonFSUtils.getWALRootDir(conf), MASTER_PROCEDURE_LOGDIR),
-203new 
Path(CommonFSUtils.getRootDir(conf), HConstants.HREGION_OLDLOGDIR_NAME), 
leaseRecovery);
-204  }
-205
-206  @VisibleForTesting
-207  public WALProcedureStore(final 
Configuration conf, final Path walDir, final Path walArchiveDir,
-208  final LeaseRecovery leaseRecovery) 
throws IOException {
-209this.conf = conf;
-210this.leaseRecovery = leaseRecovery;
-211this.walDir = walDir;
-212this.walArchiveDir = walArchiveDir;
-213this.fs = 
walDir.getFileSystem(conf);
-214this.enforceStreamCapability = 
conf.getBoolean(CommonFSUtils.UNSAFE_STREAM_CAPABILITY_ENFORCE, true);
-215
-216// Create the log directory for the 
procedure store
-217if (!fs.exists(walDir)) {
-218  if (!fs.mkdirs(walDir)) {
-219throw new IOException("Unable to 
mkdir " + walDir);
-220  }
-221}
-222// Now that it exists, set the log 
policy
-223String storagePolicy =
-224
conf.get(HConstants.WAL_STORAGE_POLICY, 
HConstants.DEFAULT_WAL_STORAGE_POLICY);
-225CommonFSUtils.setStoragePolicy(fs, 
walDir, storagePolicy);
-226
-227// Create archive dir up front. 
Rename won't work w/o it up on HDFS.
-228if (this.walArchiveDir != null 
&& !this.fs.exists(this.walArchiveDir)) {
-229  if 
(this.fs.mkdirs(this.walArchiveDir)) {
-230if (LOG.isDebugEnabled()) {
-231  LOG.debug("Created Procedure 
Store WAL archive dir " + this.walArchiveDir);
-232}
-233  } else {
-234LOG.warn("Failed create of " + 
this.walArchiveDir);
-235  }
-236}
-237  }
-238
-239  @Override
-240  public void start(int numSlots) throws 
IOException {
-241if (!setRunning(true)) {
-242  return;
-243}
-244
-245// Init buffer slots
-246loading.set(true);
-247runningProcCount = numSlots;
-248syncMaxSlot = numSlots;
-249slots = new ByteSlot[numSlots];
-250slotsCache = new 
LinkedTransferQueue();
-251while (slotsCache.size() < 
numSlots) {
-252  slotsCache.offer(new ByteSlot());
-253}
-254
-255// Tunings
-256walCountWarnThreshold =
-257  
conf.getInt(WAL_COUNT_WARN_THRESHOLD_CONF_KEY, 
DEFAULT_WAL_COUNT_WARN_THRESHOLD);
-258maxRetriesBeforeRoll =
-259  
conf.getInt(MAX_RETRIES_BEFORE_ROLL_CONF_KEY, 
DEFAULT_MAX_RETRIES_BEFORE_ROLL);
-260maxSyncFailureRoll = 
conf.getInt(MAX_SYNC_FAILURE_ROLL_CONF_KEY, DEFAULT_MAX_SYNC_FAILURE_ROLL);
-261waitBeforeRoll = 
conf.getInt(WAIT_BEFORE_ROLL_CONF_KEY, DEFAULT_WAIT_BEFORE_ROLL);
-262rollRetries = 
conf.getInt(ROLL_RETRIES_CONF_KEY, DEFAULT_ROLL_RETRIES);
-263rollThreshold = 
conf.getLong(ROLL_THRESHOLD_CONF_KEY, DEFAULT_ROLL_THRESHOLD);
-264periodicRollMsec = 
conf.getInt(PERIODIC_ROLL_CONF_KEY, DEFAULT_PERIODIC_ROLL);
-265syncWaitMsec = 
conf.getInt(SYNC_WAIT_MSEC_CONF_KEY, DEFAULT_SYNC_WAIT_MSEC);
-266useHsync = 
conf.getBoolean(USE_HSYNC_CONF_KEY, DEFAULT_USE_HSYNC);
-267
-268// WebUI
-269syncMetricsQueue = new 
CircularFifoQueue(
-270  
conf.getInt(STORE_WAL_SYNC_STATS_COUNT, DEFAULT_SYNC_STATS_COUNT));
-271
-272// Init sync thread
-273syncThread = new 
Thread("WALProcedureStoreSyncThread") {
-274  @Override
-275  public void run() {
-276try {
-277  syncLoop();
-278} catch (Throwable e) {
-279  LOG.error("Got an exception 
from the sync-loop", e);
-280  if (!isSyncAborted()) {
-281sendAbortProcessSignal();
-282  }
-283}
-284  }
-285};
-286syncThread.start();
-287  }
-288
-289  @Override
-290  public void stop(final boolean abort) 
{
-291if (!setRunning(false)) {
-292  return;
-293}
-294
-295LOG.info("Stopping the WAL Procedure 
Store, isAbort=" + abort +
-296  (isSyncAborted() ? " (self 
aborting)" : ""));
-297sendStopSignal();
-298if (!isSyncAborted()) {
-299  try {
-300while (syncThread.isAlive()) {
-301  sendStopSignal();
-302  syncThread.join(250);
-303}
-304  } catch (InterruptedException e) 
{
-305LOG.warn("join interrupted", 
e);
-306
Thread.currentThread().interrupt();
-307  }
-308}

[24/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotManifest.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotManifest.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotManifest.html
index 025b084..b9f473a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotManifest.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotManifest.html
@@ -89,504 +89,517 @@
 081  private final ForeignExceptionSnare 
monitor;
 082  private final Configuration conf;
 083  private final Path workingDir;
-084  private final FileSystem fs;
-085  private int manifestSizeLimit;
-086
-087  private SnapshotManifest(final 
Configuration conf, final FileSystem fs,
-088  final Path workingDir, final 
SnapshotDescription desc,
-089  final ForeignExceptionSnare 
monitor) {
-090this.monitor = monitor;
-091this.desc = desc;
-092this.workingDir = workingDir;
-093this.conf = conf;
-094this.fs = fs;
-095
-096this.manifestSizeLimit = 
conf.getInt(SNAPSHOT_MANIFEST_SIZE_LIMIT_CONF_KEY, 64 * 1024 * 1024);
-097  }
-098
-099  /**
-100   * Return a SnapshotManifest instance, 
used for writing a snapshot.
-101   *
-102   * There are two usage pattern:
-103   *  - The Master will create a 
manifest, add the descriptor, offline regions
-104   *and consolidate the snapshot by 
writing all the pending stuff on-disk.
-105   *  manifest = 
SnapshotManifest.create(...)
-106   *  manifest.addRegion(tableDir, 
hri)
-107   *  manifest.consolidate()
-108   *  - The RegionServer will create a 
single region manifest
-109   *  manifest = 
SnapshotManifest.create(...)
-110   *  manifest.addRegion(region)
-111   */
-112  public static SnapshotManifest 
create(final Configuration conf, final FileSystem fs,
-113  final Path workingDir, final 
SnapshotDescription desc,
-114  final ForeignExceptionSnare 
monitor) {
-115return new SnapshotManifest(conf, fs, 
workingDir, desc, monitor);
-116
-117  }
-118
-119  /**
-120   * Return a SnapshotManifest instance 
with the information already loaded in-memory.
-121   *SnapshotManifest manifest = 
SnapshotManifest.open(...)
-122   *TableDescriptor htd = 
manifest.getTableDescriptor()
-123   *for (SnapshotRegionManifest 
regionManifest: manifest.getRegionManifests())
-124   *  hri = 
regionManifest.getRegionInfo()
-125   *  for 
(regionManifest.getFamilyFiles())
-126   *...
-127   */
-128  public static SnapshotManifest 
open(final Configuration conf, final FileSystem fs,
-129  final Path workingDir, final 
SnapshotDescription desc) throws IOException {
-130SnapshotManifest manifest = new 
SnapshotManifest(conf, fs, workingDir, desc, null);
-131manifest.load();
-132return manifest;
-133  }
-134
-135
-136  /**
-137   * Add the table descriptor to the 
snapshot manifest
+084  private final FileSystem rootFs;
+085  private final FileSystem 
workingDirFs;
+086  private int manifestSizeLimit;
+087
+088  /**
+089   *
+090   * @param conf configuration file for 
HBase setup
+091   * @param rootFs root filesystem 
containing HFiles
+092   * @param workingDir file path of where 
the manifest should be located
+093   * @param desc description of snapshot 
being taken
+094   * @param monitor monitor of foreign 
exceptions
+095   * @throws IOException if the working 
directory file system cannot be
+096   * determined from 
the config file
+097   */
+098  private SnapshotManifest(final 
Configuration conf, final FileSystem rootFs,
+099  final Path workingDir, final 
SnapshotDescription desc,
+100  final ForeignExceptionSnare 
monitor) throws IOException {
+101this.monitor = monitor;
+102this.desc = desc;
+103this.workingDir = workingDir;
+104this.conf = conf;
+105this.rootFs = rootFs;
+106this.workingDirFs = 
this.workingDir.getFileSystem(this.conf);
+107this.manifestSizeLimit = 
conf.getInt(SNAPSHOT_MANIFEST_SIZE_LIMIT_CONF_KEY, 64 * 1024 * 1024);
+108  }
+109
+110  /**
+111   * Return a SnapshotManifest instance, 
used for writing a snapshot.
+112   *
+113   * There are two usage pattern:
+114   *  - The Master will create a 
manifest, add the descriptor, offline regions
+115   *and consolidate the snapshot by 
writing all the pending stuff on-disk.
+116   *  manifest = 
SnapshotManifest.create(...)
+117   *  manifest.addRegion(tableDir, 
hri)
+118   *  manifest.consolidate()
+119   *  - The RegionServer will create a 
single region manifest
+120   *  manifest = 
SnapshotManifest.create(...)
+121   *  manifest.addRegion(region)
+122   */
+123  public static SnapshotManifest 
create(final Configuration conf, final FileSystem fs,
+124  final Path workingDir, final 
SnapshotDescription desc,
+125  final ForeignExcept

[20/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html 
b/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html
index 7986a3a..90e736f 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html
@@ -1299,120 +1299,128 @@
 
 
 static HBaseClassTestRule
-TestAsyncTableAdminApi.CLASS_RULE 
+TestSnapshotTemporaryDirectory.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestBlockEvictionFromClient.CLASS_RULE 
+TestAsyncTableAdminApi.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestCIBadHostname.CLASS_RULE 
+TestBlockEvictionFromClient.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestAsyncTableAdminApi2.CLASS_RULE 
+TestCIBadHostname.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestAsyncQuotaAdminApi.CLASS_RULE 
+TestAsyncTableAdminApi2.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestAsyncMetaRegionLocator.CLASS_RULE 
+TestAsyncQuotaAdminApi.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestAsyncTableNoncedRetry.CLASS_RULE 
+TestAsyncMetaRegionLocator.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestConnectionUtils.CLASS_RULE 
+TestAsyncTableNoncedRetry.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestAsyncTableScanMetrics.CLASS_RULE 
+TestConnectionUtils.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestFromClientSideScanExcpetionWithCoprocessor.CLASS_RULE 
+TestAsyncTableScanMetrics.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestAsyncDecommissionAdminApi.CLASS_RULE 
+TestFromClientSideScanExcpetionWithCoprocessor.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestMetaCache.CLASS_RULE 
+TestAsyncDecommissionAdminApi.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestClientScannerRPCTimeout.CLASS_RULE 
+TestMetaCache.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestAsyncReplicationAdminApiWithClusters.CLASS_RULE 
+TestClientScannerRPCTimeout.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestCIPutOperationTimeout.CLASS_RULE 
+TestAsyncReplicationAdminApiWithClusters.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestAsyncClusterAdminApi.CLASS_RULE 
+TestCIPutOperationTimeout.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestScannersFromClientSide.CLASS_RULE 
+TestAsyncClusterAdminApi.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestHTableMultiplexer.CLASS_RULE 
+TestScannersFromClientSide.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestMobSnapshotCloneIndependence.CLASS_RULE 
+TestHTableMultiplexer.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestAsyncRegionLocatorTimeout.CLASS_RULE 
+TestMobSnapshotCloneIndependence.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestUpdateConfiguration.CLASS_RULE 
+TestAsyncRegionLocatorTimeout.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestAdmin1.CLASS_RULE 
+TestUpdateConfiguration.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestCIIncrementRpcTimeout.CLASS_RULE 
+TestAdmin1.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestCloneSnapshotFromClientWithRegionReplicas.CLASS_RULE 
+TestCIIncrementRpcTimeout.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestIllegalTableDescriptor.CLASS_RULE 
+TestCloneSnapshotFromClientWithRegionReplicas.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestGetScanPartialResult.CLASS_RULE 
+TestIllegalTableDescriptor.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestShortCircuitConnection.CLASS_RULE 
+TestGetScanPartialResult.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestResultFromCoprocessor.CLASS_RULE 
+TestShortCircuitConnection.CLASS_RULE 
 
 
 static HBaseClassTestRule
+TestResultFromCoprocessor.CLASS_RULE 
+
+
+static HBaseClassTestRule
 TestCIDeleteOperationTimeout.CLASS_RULE 
 
+
+static HBaseClassTestRule
+TestSnapshotTemporaryDirectoryWithRegionReplicas.CLASS_RULE 
+
 
 static HBaseClassTestRule
 TestAsyncTableGetMultiThreadedWithEagerCompaction.CLASS_RULE 
@@ -1519,126 +1527,130 @@
 
 
 static HBaseClassTestRule
-TestMetaWithReplicas.CLASS_RULE 
+TestSnapshotDFSTemporaryDirectory.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestClientOperationInterrupt.CLASS_RULE 
+TestMetaWithReplicas.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestAsyncTableAdminApi3.CLASS_RULE 
+TestClientOperationInterrupt.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestMultiParallel.CLASS_RULE 
+TestAsyncTableAdminApi3.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestSplitOrMergeStatus.CLASS_RULE 
+TestMultiParallel.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestAsyncNonMetaRegionLocatorConcurrenyLimit.CLASS_RULE 
+TestSplitOrMergeStatus.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestAsyncAdminBuilder.CLASS_RULE 
+TestAsyncNonMetaRegionLocatorConcurrenyLimit.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestIntraRowPagination.CLASS_RULE 
+TestAsyncAdminBuilder.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestHBaseAdminNoCluster.CLASS_RULE 
+TestIntraRo

[10/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.MockHRegion.WrappedRowLock.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.MockHRegion.WrappedRowLock.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.MockHRegion.WrappedRowLock.html
index 7ed37ca..907a45d 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.MockHRegion.WrappedRowLock.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.MockHRegion.WrappedRowLock.html
@@ -28,713 +28,752 @@
 020import static 
org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
 021import static 
org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
 022import static 
org.junit.Assert.assertEquals;
-023import static 
org.junit.Assert.assertTrue;
-024import static org.junit.Assert.fail;
-025
-026import java.io.IOException;
-027import java.util.ArrayList;
-028import java.util.Arrays;
-029import java.util.List;
-030import java.util.Objects;
-031import java.util.Random;
-032import 
java.util.concurrent.CountDownLatch;
-033import 
java.util.concurrent.atomic.AtomicInteger;
-034import 
java.util.concurrent.atomic.AtomicLong;
-035import 
org.apache.hadoop.conf.Configuration;
-036import org.apache.hadoop.fs.FileSystem;
-037import org.apache.hadoop.fs.Path;
-038import org.apache.hadoop.hbase.Cell;
-039import 
org.apache.hadoop.hbase.CellUtil;
-040import 
org.apache.hadoop.hbase.CompareOperator;
-041import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-042import 
org.apache.hadoop.hbase.HBaseTestingUtility;
-043import 
org.apache.hadoop.hbase.HColumnDescriptor;
-044import 
org.apache.hadoop.hbase.HConstants;
-045import 
org.apache.hadoop.hbase.HRegionInfo;
-046import 
org.apache.hadoop.hbase.HTableDescriptor;
-047import 
org.apache.hadoop.hbase.MultithreadedTestUtil;
-048import 
org.apache.hadoop.hbase.MultithreadedTestUtil.TestContext;
-049import 
org.apache.hadoop.hbase.MultithreadedTestUtil.TestThread;
-050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.hadoop.hbase.client.Append;
-052import 
org.apache.hadoop.hbase.client.Delete;
-053import 
org.apache.hadoop.hbase.client.Durability;
-054import 
org.apache.hadoop.hbase.client.Get;
-055import 
org.apache.hadoop.hbase.client.Increment;
-056import 
org.apache.hadoop.hbase.client.IsolationLevel;
-057import 
org.apache.hadoop.hbase.client.Mutation;
-058import 
org.apache.hadoop.hbase.client.Put;
-059import 
org.apache.hadoop.hbase.client.RegionInfo;
-060import 
org.apache.hadoop.hbase.client.Result;
-061import 
org.apache.hadoop.hbase.client.RowMutations;
-062import 
org.apache.hadoop.hbase.client.Scan;
-063import 
org.apache.hadoop.hbase.client.TableDescriptor;
-064import 
org.apache.hadoop.hbase.filter.BinaryComparator;
-065import 
org.apache.hadoop.hbase.io.HeapSize;
-066import 
org.apache.hadoop.hbase.io.hfile.BlockCache;
-067import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-068import 
org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests;
-069import 
org.apache.hadoop.hbase.util.Bytes;
-070import org.apache.hadoop.hbase.wal.WAL;
-071import org.junit.After;
-072import org.junit.Before;
-073import org.junit.ClassRule;
-074import org.junit.Rule;
-075import org.junit.Test;
-076import 
org.junit.experimental.categories.Category;
-077import org.junit.rules.TestName;
-078import org.slf4j.Logger;
-079import org.slf4j.LoggerFactory;
-080
-081/**
-082 * Testing of 
HRegion.incrementColumnValue, HRegion.increment,
-083 * and HRegion.append
-084 */
-085@Category({VerySlowRegionServerTests.class, 
MediumTests.class}) // Starts 100 threads
-086public class TestAtomicOperation {
-087
-088  @ClassRule
-089  public static final HBaseClassTestRule 
CLASS_RULE =
-090  
HBaseClassTestRule.forClass(TestAtomicOperation.class);
-091
-092  private static final Logger LOG = 
LoggerFactory.getLogger(TestAtomicOperation.class);
-093  @Rule public TestName name = new 
TestName();
-094
-095  HRegion region = null;
-096  private HBaseTestingUtility TEST_UTIL = 
HBaseTestingUtility.createLocalHTU();
-097
-098  // Test names
-099  static  byte[] tableName;
-100  static final byte[] qual1 = 
Bytes.toBytes("qual1");
-101  static final byte[] qual2 = 
Bytes.toBytes("qual2");
-102  static final byte[] qual3 = 
Bytes.toBytes("qual3");
-103  static final byte[] value1 = 
Bytes.toBytes("value1");
-104  static final byte[] value2 = 
Bytes.toBytes("value2");
-105  static final byte [] row = 
Bytes.toBytes("rowA");
-106  static final byte [] row2 = 
Bytes.toBytes("rowB");
-107
-108  @Before
-109  public void setup() {
-110tableName = 
Bytes.toBytes(name.getMethodName());
-111  }
-112
-113  @After
-114  public void teardown() throws 
IOException {
-115if (region != null) {
-116  Bloc

[19/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/org/apache/hadoop/hbase/client/TestSnapshotTemporaryDirectory.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/client/TestSnapshotTemporaryDirectory.html
 
b/testdevapidocs/org/apache/hadoop/hbase/client/TestSnapshotTemporaryDirectory.html
new file mode 100644
index 000..e57e988
--- /dev/null
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/client/TestSnapshotTemporaryDirectory.html
@@ -0,0 +1,679 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+
+TestSnapshotTemporaryDirectory (Apache HBase 3.0.0-SNAPSHOT Test 
API)
+
+
+
+
+
+var methods = 
{"i0":9,"i1":9,"i2":10,"i3":10,"i4":9,"i5":9,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev Class
+Next Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.client
+Class 
TestSnapshotTemporaryDirectory
+
+
+
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.client.TestSnapshotTemporaryDirectory
+
+
+
+
+
+
+
+Direct Known Subclasses:
+TestSnapshotDFSTemporaryDirectory, TestSnapshotTemporaryDirectoryWithRegionReplicas
+
+
+
+public class TestSnapshotTemporaryDirectory
+extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
+This class tests that the use of a temporary snapshot 
directory supports snapshot functionality
+ while the temporary directory is on a different file system than the root 
directory
+ 
+ This is an end-to-end test for the snapshot utility
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields 
+
+Modifier and Type
+Field and Description
+
+
+protected static 
org.apache.hadoop.hbase.client.Admin
+admin 
+
+
+static HBaseClassTestRule
+CLASS_RULE 
+
+
+private static org.slf4j.Logger
+LOG 
+
+
+int
+manifestVersion 
+
+
+protected static int
+NUM_RS 
+
+
+protected static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+STRING_TABLE_NAME 
+
+
+protected static 
org.apache.hadoop.hbase.TableName
+TABLE_NAME 
+
+
+protected static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+TEMP_DIR 
+
+
+protected static byte[]
+TEST_FAM 
+
+
+protected static HBaseTestingUtility
+UTIL 
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors 
+
+Constructor and Description
+
+
+TestSnapshotTemporaryDirectory() 
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All Methods Static Methods Instance Methods Concrete Methods 
+
+Modifier and Type
+Method and Description
+
+
+static void
+cleanupTest() 
+
+
+static https://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true";
 title="class or interface in java.lang">IterableInteger>
+data() 
+
+
+protected int
+getNumReplicas() 
+
+
+void
+setup() 
+
+
+static void
+setupCluster()
+Setup the config for the cluster
+
+
+
+private static void
+setupConf(org.apache.hadoop.conf.Configuration conf) 
+
+
+private void
+takeSnapshot(org.apache.hadoop.hbase.TableName tableName,
+https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String snapshotName,
+boolean disabled) 
+
+
+void
+tearDown() 
+
+
+void
+testEnsureTemporaryDirectoryTransfer() 
+
+
+void
+testOfflineTableSnapshot()
+Test snapshotting a table that is offline
+
+
+
+void
+testOfflineTableSnapshotWithEmptyRegion() 
+
+
+void
+testRestoreDisabledSnapshot() 
+
+
+void
+testRestoreEnabledSnapshot() 
+
+
+void
+testSnapshotCloneContents()
+Tests that snapshot has correct contents by taking 
snapshot, cloning it, the

[47/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/apidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html 
b/apidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html
index f1f36a4..fa81722 100644
--- a/apidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html
+++ b/apidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html
@@ -393,7 +393,7 @@ implements org.apache.hadoop.util.Tool
 
 
 printUsage
-protected void printUsage()
+protected void printUsage()
 
 Overrides:
 printUsage in 
class org.apache.hadoop.hbase.util.AbstractHBaseTool
@@ -406,7 +406,7 @@ implements org.apache.hadoop.util.Tool
 
 
 addOptions
-protected void addOptions()
+protected void addOptions()
 Description copied from 
class: org.apache.hadoop.hbase.util.AbstractHBaseTool
 Override this to add command-line options using 
AbstractHBaseTool.addOptWithArg(java.lang.String, java.lang.String)
  and similar methods.
@@ -422,7 +422,7 @@ implements org.apache.hadoop.util.Tool
 
 
 main
-public static void main(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String[] args)
+public static void main(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String[] args)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/apidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html 
b/apidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html
index 7d49582..01861a7 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html
@@ -980,175 +980,177 @@
 972FileSystem outputFs = 
FileSystem.get(outputRoot.toUri(), destConf);
 973LOG.debug("outputFs=" + 
outputFs.getUri().toString() + " outputRoot=" + outputRoot.toString());
 974
-975boolean skipTmp = 
conf.getBoolean(CONF_SKIP_TMP, false);
-976
-977Path snapshotDir = 
SnapshotDescriptionUtils.getCompletedSnapshotDir(snapshotName, inputRoot);
-978Path snapshotTmpDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(targetName, outputRoot);
-979Path outputSnapshotDir = 
SnapshotDescriptionUtils.getCompletedSnapshotDir(targetName, outputRoot);
-980Path initialOutputSnapshotDir = 
skipTmp ? outputSnapshotDir : snapshotTmpDir;
-981
-982// Find the necessary directory which 
need to change owner and group
-983Path needSetOwnerDir = 
SnapshotDescriptionUtils.getSnapshotRootDir(outputRoot);
-984if (outputFs.exists(needSetOwnerDir)) 
{
-985  if (skipTmp) {
-986needSetOwnerDir = 
outputSnapshotDir;
-987  } else {
-988needSetOwnerDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(outputRoot);
-989if 
(outputFs.exists(needSetOwnerDir)) {
-990  needSetOwnerDir = 
snapshotTmpDir;
-991}
-992  }
-993}
-994
-995// Check if the snapshot already 
exists
-996if 
(outputFs.exists(outputSnapshotDir)) {
-997  if (overwrite) {
-998if 
(!outputFs.delete(outputSnapshotDir, true)) {
-999  System.err.println("Unable to 
remove existing snapshot directory: " + outputSnapshotDir);
-1000  return 1;
-1001}
-1002  } else {
-1003System.err.println("The snapshot 
'" + targetName +
-1004  "' already exists in the 
destination: " + outputSnapshotDir);
-1005return 1;
-1006  }
-1007}
-1008
-1009if (!skipTmp) {
-1010  // Check if the snapshot already 
in-progress
-1011  if 
(outputFs.exists(snapshotTmpDir)) {
-1012if (overwrite) {
-1013  if 
(!outputFs.delete(snapshotTmpDir, true)) {
-1014System.err.println("Unable 
to remove existing snapshot tmp directory: "+snapshotTmpDir);
-1015return 1;
-1016  }
-1017} else {
-1018  System.err.println("A snapshot 
with the same name '"+ targetName +"' may be in-progress");
-1019  System.err.println("Please 
check "+snapshotTmpDir+". If the snapshot has completed, ");
-1020  System.err.println("consider 
removing "+snapshotTmpDir+" by using the -overwrite option");
-1021  return 1;
-1022}
-1023  }
-1024}
-1025
-1026// Step 1 - Copy 
fs1:/.snapshot/ to  fs2:/.snapshot/.tmp/
-1027// The snapshot references must be 
copied before the hfiles otherwise the cleaner
-1028// will remove them because they are 
unreferenced.
-1029List travesedPaths = new 
ArrayList<>();
-1030boolean copySucceeded = false;
-1031try {
-1032  LOG.info("Copy Snapshot Manifest 
from " + snapshotD

[30/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html
index 7d49582..01861a7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html
@@ -980,175 +980,177 @@
 972FileSystem outputFs = 
FileSystem.get(outputRoot.toUri(), destConf);
 973LOG.debug("outputFs=" + 
outputFs.getUri().toString() + " outputRoot=" + outputRoot.toString());
 974
-975boolean skipTmp = 
conf.getBoolean(CONF_SKIP_TMP, false);
-976
-977Path snapshotDir = 
SnapshotDescriptionUtils.getCompletedSnapshotDir(snapshotName, inputRoot);
-978Path snapshotTmpDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(targetName, outputRoot);
-979Path outputSnapshotDir = 
SnapshotDescriptionUtils.getCompletedSnapshotDir(targetName, outputRoot);
-980Path initialOutputSnapshotDir = 
skipTmp ? outputSnapshotDir : snapshotTmpDir;
-981
-982// Find the necessary directory which 
need to change owner and group
-983Path needSetOwnerDir = 
SnapshotDescriptionUtils.getSnapshotRootDir(outputRoot);
-984if (outputFs.exists(needSetOwnerDir)) 
{
-985  if (skipTmp) {
-986needSetOwnerDir = 
outputSnapshotDir;
-987  } else {
-988needSetOwnerDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(outputRoot);
-989if 
(outputFs.exists(needSetOwnerDir)) {
-990  needSetOwnerDir = 
snapshotTmpDir;
-991}
-992  }
-993}
-994
-995// Check if the snapshot already 
exists
-996if 
(outputFs.exists(outputSnapshotDir)) {
-997  if (overwrite) {
-998if 
(!outputFs.delete(outputSnapshotDir, true)) {
-999  System.err.println("Unable to 
remove existing snapshot directory: " + outputSnapshotDir);
-1000  return 1;
-1001}
-1002  } else {
-1003System.err.println("The snapshot 
'" + targetName +
-1004  "' already exists in the 
destination: " + outputSnapshotDir);
-1005return 1;
-1006  }
-1007}
-1008
-1009if (!skipTmp) {
-1010  // Check if the snapshot already 
in-progress
-1011  if 
(outputFs.exists(snapshotTmpDir)) {
-1012if (overwrite) {
-1013  if 
(!outputFs.delete(snapshotTmpDir, true)) {
-1014System.err.println("Unable 
to remove existing snapshot tmp directory: "+snapshotTmpDir);
-1015return 1;
-1016  }
-1017} else {
-1018  System.err.println("A snapshot 
with the same name '"+ targetName +"' may be in-progress");
-1019  System.err.println("Please 
check "+snapshotTmpDir+". If the snapshot has completed, ");
-1020  System.err.println("consider 
removing "+snapshotTmpDir+" by using the -overwrite option");
-1021  return 1;
-1022}
-1023  }
-1024}
-1025
-1026// Step 1 - Copy 
fs1:/.snapshot/ to  fs2:/.snapshot/.tmp/
-1027// The snapshot references must be 
copied before the hfiles otherwise the cleaner
-1028// will remove them because they are 
unreferenced.
-1029List travesedPaths = new 
ArrayList<>();
-1030boolean copySucceeded = false;
-1031try {
-1032  LOG.info("Copy Snapshot Manifest 
from " + snapshotDir + " to " + initialOutputSnapshotDir);
-1033  travesedPaths =
-1034  
FSUtils.copyFilesParallel(inputFs, snapshotDir, outputFs, 
initialOutputSnapshotDir, conf,
-1035  
conf.getInt(CONF_COPY_MANIFEST_THREADS, DEFAULT_COPY_MANIFEST_THREADS));
-1036  copySucceeded = true;
-1037} catch (IOException e) {
-1038  throw new 
ExportSnapshotException("Failed to copy the snapshot directory: from=" +
-1039snapshotDir + " to=" + 
initialOutputSnapshotDir, e);
-1040} finally {
-1041  if (copySucceeded) {
-1042if (filesUser != null || 
filesGroup != null) {
-1043  LOG.warn((filesUser == null ? 
"" : "Change the owner of " + needSetOwnerDir + " to "
-1044  + filesUser)
-1045  + (filesGroup == null ? "" 
: ", Change the group of " + needSetOwnerDir + " to "
-1046  + filesGroup));
-1047  setOwnerParallel(outputFs, 
filesUser, filesGroup, conf, travesedPaths);
-1048}
-1049if (filesMode > 0) {
-1050  LOG.warn("Change the 
permission of " + needSetOwnerDir + " to " + filesMode);
-1051  
setPermissionParallel(outputFs, (short)filesMode, travesedPaths, co

[31/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html
index 7d49582..01861a7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html
@@ -980,175 +980,177 @@
 972FileSystem outputFs = 
FileSystem.get(outputRoot.toUri(), destConf);
 973LOG.debug("outputFs=" + 
outputFs.getUri().toString() + " outputRoot=" + outputRoot.toString());
 974
-975boolean skipTmp = 
conf.getBoolean(CONF_SKIP_TMP, false);
-976
-977Path snapshotDir = 
SnapshotDescriptionUtils.getCompletedSnapshotDir(snapshotName, inputRoot);
-978Path snapshotTmpDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(targetName, outputRoot);
-979Path outputSnapshotDir = 
SnapshotDescriptionUtils.getCompletedSnapshotDir(targetName, outputRoot);
-980Path initialOutputSnapshotDir = 
skipTmp ? outputSnapshotDir : snapshotTmpDir;
-981
-982// Find the necessary directory which 
need to change owner and group
-983Path needSetOwnerDir = 
SnapshotDescriptionUtils.getSnapshotRootDir(outputRoot);
-984if (outputFs.exists(needSetOwnerDir)) 
{
-985  if (skipTmp) {
-986needSetOwnerDir = 
outputSnapshotDir;
-987  } else {
-988needSetOwnerDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(outputRoot);
-989if 
(outputFs.exists(needSetOwnerDir)) {
-990  needSetOwnerDir = 
snapshotTmpDir;
-991}
-992  }
-993}
-994
-995// Check if the snapshot already 
exists
-996if 
(outputFs.exists(outputSnapshotDir)) {
-997  if (overwrite) {
-998if 
(!outputFs.delete(outputSnapshotDir, true)) {
-999  System.err.println("Unable to 
remove existing snapshot directory: " + outputSnapshotDir);
-1000  return 1;
-1001}
-1002  } else {
-1003System.err.println("The snapshot 
'" + targetName +
-1004  "' already exists in the 
destination: " + outputSnapshotDir);
-1005return 1;
-1006  }
-1007}
-1008
-1009if (!skipTmp) {
-1010  // Check if the snapshot already 
in-progress
-1011  if 
(outputFs.exists(snapshotTmpDir)) {
-1012if (overwrite) {
-1013  if 
(!outputFs.delete(snapshotTmpDir, true)) {
-1014System.err.println("Unable 
to remove existing snapshot tmp directory: "+snapshotTmpDir);
-1015return 1;
-1016  }
-1017} else {
-1018  System.err.println("A snapshot 
with the same name '"+ targetName +"' may be in-progress");
-1019  System.err.println("Please 
check "+snapshotTmpDir+". If the snapshot has completed, ");
-1020  System.err.println("consider 
removing "+snapshotTmpDir+" by using the -overwrite option");
-1021  return 1;
-1022}
-1023  }
-1024}
-1025
-1026// Step 1 - Copy 
fs1:/.snapshot/ to  fs2:/.snapshot/.tmp/
-1027// The snapshot references must be 
copied before the hfiles otherwise the cleaner
-1028// will remove them because they are 
unreferenced.
-1029List travesedPaths = new 
ArrayList<>();
-1030boolean copySucceeded = false;
-1031try {
-1032  LOG.info("Copy Snapshot Manifest 
from " + snapshotDir + " to " + initialOutputSnapshotDir);
-1033  travesedPaths =
-1034  
FSUtils.copyFilesParallel(inputFs, snapshotDir, outputFs, 
initialOutputSnapshotDir, conf,
-1035  
conf.getInt(CONF_COPY_MANIFEST_THREADS, DEFAULT_COPY_MANIFEST_THREADS));
-1036  copySucceeded = true;
-1037} catch (IOException e) {
-1038  throw new 
ExportSnapshotException("Failed to copy the snapshot directory: from=" +
-1039snapshotDir + " to=" + 
initialOutputSnapshotDir, e);
-1040} finally {
-1041  if (copySucceeded) {
-1042if (filesUser != null || 
filesGroup != null) {
-1043  LOG.warn((filesUser == null ? 
"" : "Change the owner of " + needSetOwnerDir + " to "
-1044  + filesUser)
-1045  + (filesGroup == null ? "" 
: ", Change the group of " + needSetOwnerDir + " to "
-1046  + filesGroup));
-1047  setOwnerParallel(outputFs, 
filesUser, filesGroup, conf, travesedPaths);
-1048}
-1049if (filesMode > 0) {
-1050  LOG.warn("Change the 
permission of " + needSetOwnerDir + " to " + filesMode);
-1051  
setPermissionParallel(outputFs, (short)filesMode, travesedPaths, conf);
-1052}
-1053  }
-1054}
-1055
-1056// Write a new .snapshotinfo if the 
target name is different from the source name
-1057if 
(!targetName.equals(snapshotName)) {
-1058  SnapshotDesc

[04/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.ClientSideCellSetModel.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.ClientSideCellSetModel.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.ClientSideCellSetModel.html
index 826bcba..5323511 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.ClientSideCellSetModel.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.ClientSideCellSetModel.html
@@ -98,580 +98,623 @@
 090  private static final String CFB = 
"b";
 091  private static final String COLUMN_1 = 
CFA + ":1";
 092  private static final String COLUMN_2 = 
CFB + ":2";
-093  private static Client client;
-094  private static int expectedRows1;
-095  private static int expectedRows2;
-096  private static Configuration conf;
-097
-098  private static final 
HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
-099  private static final 
HBaseRESTTestingUtility REST_TEST_UTIL =
-100new HBaseRESTTestingUtility();
-101
-102  @BeforeClass
-103  public static void setUpBeforeClass() 
throws Exception {
-104conf = 
TEST_UTIL.getConfiguration();
-105conf.set(Constants.CUSTOM_FILTERS, 
"CustomFilter:" + CustomFilter.class.getName());
-106TEST_UTIL.startMiniCluster();
-107
REST_TEST_UTIL.startServletContainer(conf);
-108client = new Client(new 
Cluster().add("localhost",
-109  
REST_TEST_UTIL.getServletPort()));
-110Admin admin = TEST_UTIL.getAdmin();
-111if (!admin.tableExists(TABLE)) {
-112HTableDescriptor htd = new 
HTableDescriptor(TABLE);
-113htd.addFamily(new 
HColumnDescriptor(CFA));
-114htd.addFamily(new 
HColumnDescriptor(CFB));
-115admin.createTable(htd);
-116expectedRows1 = 
TestScannerResource.insertData(conf, TABLE, COLUMN_1, 1.0);
-117expectedRows2 = 
TestScannerResource.insertData(conf, TABLE, COLUMN_2, 0.5);
-118}
-119  }
-120
-121  @AfterClass
-122  public static void tearDownAfterClass() 
throws Exception {
-123
TEST_UTIL.getAdmin().disableTable(TABLE);
-124
TEST_UTIL.getAdmin().deleteTable(TABLE);
-125
REST_TEST_UTIL.shutdownServletContainer();
-126TEST_UTIL.shutdownMiniCluster();
-127  }
-128
-129  @Test
-130  public void testSimpleScannerXML() 
throws IOException, JAXBException, XMLStreamException {
-131// Test scanning particular columns
-132StringBuilder builder = new 
StringBuilder();
-133builder.append("/*");
-134builder.append("?");
-135builder.append(Constants.SCAN_COLUMN 
+ "=" + COLUMN_1);
-136builder.append("&");
-137builder.append(Constants.SCAN_LIMIT + 
"=10");
-138Response response = client.get("/" + 
TABLE + builder.toString(),
-139  Constants.MIMETYPE_XML);
-140assertEquals(200, 
response.getCode());
-141assertEquals(Constants.MIMETYPE_XML, 
response.getHeader("content-type"));
-142JAXBContext ctx = 
JAXBContext.newInstance(CellSetModel.class);
-143Unmarshaller ush = 
ctx.createUnmarshaller();
-144CellSetModel model = (CellSetModel) 
ush.unmarshal(response.getStream());
-145int count = 
TestScannerResource.countCellSet(model);
-146assertEquals(10, count);
-147checkRowsNotNull(model);
-148
-149//Test with no limit.
-150builder = new StringBuilder();
-151builder.append("/*");
-152builder.append("?");
-153builder.append(Constants.SCAN_COLUMN 
+ "=" + COLUMN_1);
-154response = client.get("/" + TABLE + 
builder.toString(),
-155  Constants.MIMETYPE_XML);
-156assertEquals(200, 
response.getCode());
-157assertEquals(Constants.MIMETYPE_XML, 
response.getHeader("content-type"));
-158model = (CellSetModel) 
ush.unmarshal(response.getStream());
-159count = 
TestScannerResource.countCellSet(model);
-160assertEquals(expectedRows1, count);
-161checkRowsNotNull(model);
-162
-163//Test with start and end row.
-164builder = new StringBuilder();
-165builder.append("/*");
-166builder.append("?");
-167builder.append(Constants.SCAN_COLUMN 
+ "=" + COLUMN_1);
-168builder.append("&");
-169
builder.append(Constants.SCAN_START_ROW + "=aaa");
-170builder.append("&");
-171builder.append(Constants.SCAN_END_ROW 
+ "=aay");
-172response = client.get("/" + TABLE + 
builder.toString(),
-173  Constants.MIMETYPE_XML);
-174assertEquals(200, 
response.getCode());
-175model = (CellSetModel) 
ush.unmarshal(response.getStream());
-176count = 
TestScannerResource.countCellSet(model);
-177RowModel startRow = 
model.getRows().get(0);
-178assertEquals("aaa", 
Bytes.toString(startRow.getKey()));
-179RowModel endRow = 
model.getRows().get(model.getRows().size() - 1);
-180assertEquals("aax", 
Bytes.toString(endRow.getKey()));
-181assertEquals(24, count);
-182checkRowsNotNul

[40/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/org/apache/hadoop/hbase/snapshot/SnapshotManifest.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/snapshot/SnapshotManifest.html 
b/devapidocs/org/apache/hadoop/hbase/snapshot/SnapshotManifest.html
index fca0402..fa25ee7 100644
--- a/devapidocs/org/apache/hadoop/hbase/snapshot/SnapshotManifest.html
+++ b/devapidocs/org/apache/hadoop/hbase/snapshot/SnapshotManifest.html
@@ -167,29 +167,29 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 desc 
 
 
-private org.apache.hadoop.fs.FileSystem
-fs 
-
-
 private TableDescriptor
 htd 
 
-
+
 private static org.slf4j.Logger
 LOG 
 
-
+
 private int
 manifestSizeLimit 
 
-
+
 private ForeignExceptionSnare
 monitor 
 
-
+
 private https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
 regionManifests 
 
+
+private org.apache.hadoop.fs.FileSystem
+rootFs 
+
 
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 SNAPSHOT_MANIFEST_SIZE_LIMIT_CONF_KEY 
@@ -198,6 +198,10 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 private org.apache.hadoop.fs.Path
 workingDir 
 
+
+private org.apache.hadoop.fs.FileSystem
+workingDirFs 
+
 
 
 
@@ -216,7 +220,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 private 
 SnapshotManifest(org.apache.hadoop.conf.Configuration conf,
-org.apache.hadoop.fs.FileSystem fs,
+org.apache.hadoop.fs.FileSystem rootFs,
 org.apache.hadoop.fs.Path workingDir,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription desc,
 ForeignExceptionSnare monitor) 
@@ -492,13 +496,22 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 private final org.apache.hadoop.fs.Path workingDir
 
 
-
+
+
+
+
+
+rootFs
+private final org.apache.hadoop.fs.FileSystem rootFs
+
+
+
 
 
 
 
-fs
-private final org.apache.hadoop.fs.FileSystem fs
+workingDirFs
+private final org.apache.hadoop.fs.FileSystem workingDirFs
 
 
 
@@ -507,7 +520,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 manifestSizeLimit
-private int manifestSizeLimit
+private int manifestSizeLimit
 
 
 
@@ -524,11 +537,23 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 SnapshotManifest
-private SnapshotManifest(org.apache.hadoop.conf.Configuration conf,
- org.apache.hadoop.fs.FileSystem fs,
+private SnapshotManifest(org.apache.hadoop.conf.Configuration conf,
+ org.apache.hadoop.fs.FileSystem rootFs,
  org.apache.hadoop.fs.Path workingDir,
  
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription desc,
- ForeignExceptionSnare monitor)
+ ForeignExceptionSnare monitor)
+  throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
+
+Parameters:
+conf - configuration file for HBase setup
+rootFs - root filesystem containing HFiles
+workingDir - file path of where the manifest should be 
located
+desc - description of snapshot being taken
+monitor - monitor of foreign exceptions
+Throws:
+https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException - if the working 
directory file system cannot be
+ determined from the config file
+
 
 
 
@@ -545,11 +570,12 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 create
-public static SnapshotManifest create(org.apache.hadoop.conf.Configuration conf,
+public static SnapshotManifest create(org.apache.hadoop.conf.Configuration conf,
   org.apache.hadoop.fs.FileSystem fs,
   
org.apache.hadoop.fs.Path workingDir,
   
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription desc,
-  ForeignExceptionSnare monitor)
+  ForeignExceptionSnare monitor)
+   throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Return a SnapshotManifest instance, used for writing a 
snapshot.
 
  There are two usage pattern:
@@ -561,6 +587,10 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
   - 

[44/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.html
 
b/devapidocs/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.html
index d6b1e65..387c5e5 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.html
@@ -158,7 +158,7 @@ extends TakeSnapshotHandler
-conf,
 fs,
 htd,
 master,
 metricsSnapshot,
 monitor,
 rootDir,
 snapshot,
 snapshotManager, snapshotManifest,
 snapshotTable,
 status,
 tableLock,
 workingDir
+conf,
 htd,
 master,
 metricsSnapshot,
 monitor,
 rootDir,
 rootFs,
 snapshot,
 snapshotManager, snapshotManifest,
 snapshotTable,
 status,
 tableLock,
 workingDir,
 workingDirFs
 
 
 
@@ -220,7 +220,7 @@ extends TakeSnapshotHandler
-cancel,
 completeSnapshot,
 getCompletionTimestamp,
 getException,
 getExceptionIfFailed,
 getSnapshot,
 hasException, isFinished,
 process,
 rethrowException,
 rethrowExceptionIfFailed,
 snapshotDisabledRegion
+cancel,
 completeSnapshot,
 getCompletionTimestamp,
 getException,
 getExceptionIfFailed,
 getSnapshot,
 hasException, isFinished,
 process,
 rethrowException,
 rethrowExceptionIfFailed,
 snapshotDisabledRegion
 
 
 
@@ -273,13 +273,17 @@ extends 
 
 DisabledTableSnapshotHandler
-public DisabledTableSnapshotHandler(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot,
+public DisabledTableSnapshotHandler(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot,
 MasterServices masterServices,
-SnapshotManager snapshotManager)
+SnapshotManager snapshotManager)
+ throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 Parameters:
 snapshot - descriptor of the snapshot to take
 masterServices - master services provider
+Throws:
+https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException - if it cannot 
access the filesystem of the snapshot
+ temporary directory
 
 
 
@@ -297,7 +301,7 @@ extends 
 
 prepare
-public DisabledTableSnapshotHandler prepare()
+public DisabledTableSnapshotHandler prepare()
  throws https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true";
 title="class or interface in java.lang">Exception
 Description copied from 
class: EventHandler
 Event handlers should do all the necessary checks in this 
method (rather than
@@ -321,7 +325,7 @@ extends 
 
 snapshotRegions
-public void snapshotRegions(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List> regionsAndLocations)
+public void snapshotRegions(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List> regionsAndLocations)
  throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException,
 org.apache.zookeeper.KeeperException
 Description copied from 
class: TakeSnapshotHandler

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.html
 
b/devapidocs/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.html
index b735721..4d9e7b1 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.html
@@ -164,7 +164,7 @@ extends TakeSnapshotHandler
-conf,
 fs,
 htd,
 master,
 metricsSnapshot,
 monitor,
 rootDir,
 snapshot,
 snapshotManager, snapshotManifest,
 snapshotTable,
 status,
 tableLock,
 workingDir
+conf,
 htd,
 master,
 metricsSnapshot,
 monitor,
 rootDir,
 rootFs,
 snapshot,
 snapshotManager, snapshotManifest,
 snapshotTable,
 status,
 tableLock,
 workingDir,
 workingDirFs
 
 
 
@@ -232,7 +232,7 @@ extends TakeSnapshotHandler
-cancel,
 completeSnapshot,
 getCompletionTimestamp,
 getException,
 getExceptionIfFailed,
 getSnapshot,
 hasException, isFinished,
 process,
 rethrowException,
 rethrowExceptionIfFailed,
 snapshotDisable

[08/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.PutThread.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.PutThread.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.PutThread.html
index 7ed37ca..907a45d 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.PutThread.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.PutThread.html
@@ -28,713 +28,752 @@
 020import static 
org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
 021import static 
org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
 022import static 
org.junit.Assert.assertEquals;
-023import static 
org.junit.Assert.assertTrue;
-024import static org.junit.Assert.fail;
-025
-026import java.io.IOException;
-027import java.util.ArrayList;
-028import java.util.Arrays;
-029import java.util.List;
-030import java.util.Objects;
-031import java.util.Random;
-032import 
java.util.concurrent.CountDownLatch;
-033import 
java.util.concurrent.atomic.AtomicInteger;
-034import 
java.util.concurrent.atomic.AtomicLong;
-035import 
org.apache.hadoop.conf.Configuration;
-036import org.apache.hadoop.fs.FileSystem;
-037import org.apache.hadoop.fs.Path;
-038import org.apache.hadoop.hbase.Cell;
-039import 
org.apache.hadoop.hbase.CellUtil;
-040import 
org.apache.hadoop.hbase.CompareOperator;
-041import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-042import 
org.apache.hadoop.hbase.HBaseTestingUtility;
-043import 
org.apache.hadoop.hbase.HColumnDescriptor;
-044import 
org.apache.hadoop.hbase.HConstants;
-045import 
org.apache.hadoop.hbase.HRegionInfo;
-046import 
org.apache.hadoop.hbase.HTableDescriptor;
-047import 
org.apache.hadoop.hbase.MultithreadedTestUtil;
-048import 
org.apache.hadoop.hbase.MultithreadedTestUtil.TestContext;
-049import 
org.apache.hadoop.hbase.MultithreadedTestUtil.TestThread;
-050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.hadoop.hbase.client.Append;
-052import 
org.apache.hadoop.hbase.client.Delete;
-053import 
org.apache.hadoop.hbase.client.Durability;
-054import 
org.apache.hadoop.hbase.client.Get;
-055import 
org.apache.hadoop.hbase.client.Increment;
-056import 
org.apache.hadoop.hbase.client.IsolationLevel;
-057import 
org.apache.hadoop.hbase.client.Mutation;
-058import 
org.apache.hadoop.hbase.client.Put;
-059import 
org.apache.hadoop.hbase.client.RegionInfo;
-060import 
org.apache.hadoop.hbase.client.Result;
-061import 
org.apache.hadoop.hbase.client.RowMutations;
-062import 
org.apache.hadoop.hbase.client.Scan;
-063import 
org.apache.hadoop.hbase.client.TableDescriptor;
-064import 
org.apache.hadoop.hbase.filter.BinaryComparator;
-065import 
org.apache.hadoop.hbase.io.HeapSize;
-066import 
org.apache.hadoop.hbase.io.hfile.BlockCache;
-067import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-068import 
org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests;
-069import 
org.apache.hadoop.hbase.util.Bytes;
-070import org.apache.hadoop.hbase.wal.WAL;
-071import org.junit.After;
-072import org.junit.Before;
-073import org.junit.ClassRule;
-074import org.junit.Rule;
-075import org.junit.Test;
-076import 
org.junit.experimental.categories.Category;
-077import org.junit.rules.TestName;
-078import org.slf4j.Logger;
-079import org.slf4j.LoggerFactory;
-080
-081/**
-082 * Testing of 
HRegion.incrementColumnValue, HRegion.increment,
-083 * and HRegion.append
-084 */
-085@Category({VerySlowRegionServerTests.class, 
MediumTests.class}) // Starts 100 threads
-086public class TestAtomicOperation {
-087
-088  @ClassRule
-089  public static final HBaseClassTestRule 
CLASS_RULE =
-090  
HBaseClassTestRule.forClass(TestAtomicOperation.class);
-091
-092  private static final Logger LOG = 
LoggerFactory.getLogger(TestAtomicOperation.class);
-093  @Rule public TestName name = new 
TestName();
-094
-095  HRegion region = null;
-096  private HBaseTestingUtility TEST_UTIL = 
HBaseTestingUtility.createLocalHTU();
-097
-098  // Test names
-099  static  byte[] tableName;
-100  static final byte[] qual1 = 
Bytes.toBytes("qual1");
-101  static final byte[] qual2 = 
Bytes.toBytes("qual2");
-102  static final byte[] qual3 = 
Bytes.toBytes("qual3");
-103  static final byte[] value1 = 
Bytes.toBytes("value1");
-104  static final byte[] value2 = 
Bytes.toBytes("value2");
-105  static final byte [] row = 
Bytes.toBytes("rowA");
-106  static final byte [] row2 = 
Bytes.toBytes("rowB");
-107
-108  @Before
-109  public void setup() {
-110tableName = 
Bytes.toBytes(name.getMethodName());
-111  }
-112
-113  @After
-114  public void teardown() throws 
IOException {
-115if (region != null) {
-116  BlockCache bc = 
region.getStores().get(0).getCacheConfig().getBlockCache();
-117  re

[17/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.html 
b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.html
index 96212ea..b933a5a 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public class TestAtomicOperation
+public class TestAtomicOperation
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 Testing of HRegion.incrementColumnValue, HRegion.increment,
  and HRegion.append
@@ -310,32 +310,36 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 void
-testAppendWithNonExistingFamily() 
+testAppendWithMultipleFamilies() 
 
 
 void
+testAppendWithNonExistingFamily() 
+
+
+void
 testIncrementMultiThreads()
 Test multi-threaded increments.
 
 
-
+
 void
 testIncrementWithNonExistingFamily() 
 
-
+
 void
 testMultiRowMutationMultiThreads()
 Test multi-threaded region mutations.
 
 
-
+
 void
 testPutAndCheckAndPutInParallel()
 Test written as a verifier for HBASE-7051, CheckAndPut 
should properly read
  MVCC.
 
 
-
+
 void
 testRowMutationMultiThreads()
 Test multi-threaded row mutations.
@@ -369,7 +373,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 CLASS_RULE
-public static final HBaseClassTestRule CLASS_RULE
+public static final HBaseClassTestRule CLASS_RULE
 
 
 
@@ -378,7 +382,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 LOG
-private static final org.slf4j.Logger LOG
+private static final org.slf4j.Logger LOG
 
 
 
@@ -387,7 +391,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 name
-public org.junit.rules.TestName name
+public org.junit.rules.TestName name
 
 
 
@@ -396,7 +400,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 region
-org.apache.hadoop.hbase.regionserver.HRegion region
+org.apache.hadoop.hbase.regionserver.HRegion region
 
 
 
@@ -405,7 +409,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 TEST_UTIL
-private HBaseTestingUtility TEST_UTIL
+private HBaseTestingUtility TEST_UTIL
 
 
 
@@ -414,7 +418,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 tableName
-static byte[] tableName
+static byte[] tableName
 
 
 
@@ -423,7 +427,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 qual1
-static final byte[] qual1
+static final byte[] qual1
 
 
 
@@ -432,7 +436,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 qual2
-static final byte[] qual2
+static final byte[] qual2
 
 
 
@@ -441,7 +445,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 qual3
-static final byte[] qual3
+static final byte[] qual3
 
 
 
@@ -450,7 +454,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 value1
-static final byte[] value1
+static final byte[] value1
 
 
 
@@ -459,7 +463,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 value2
-static final byte[] value2
+static final byte[] value2
 
 
 
@@ -468,7 +472,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 row
-static final byte[] row
+static final byte[] row
 
 
 
@@ -477,7 +481,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 row2
-static final byte[] row2
+static final byte[] row2
 
 
 
@@ -486,7 +490,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 latch
-private static https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CountDownLatch.html?is-external=true";
 title="class or interface in java.util.concurrent">CountDownLatch latch
+private static https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CountDownLatch.html?is-external=true";
 title="class or interface in java.util.concurrent">CountDownLatch latch
 
 
 
@@ -495,7 +499,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 testStep
-private static volatile TestAtomicOperation.TestS

[39/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.html
index 9c02160..80ce868 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.html
@@ -62,72 +62,75 @@
 054  /**
 055   * @param snapshot descriptor of the 
snapshot to take
 056   * @param masterServices master 
services provider
-057   */
-058  public 
DisabledTableSnapshotHandler(SnapshotDescription snapshot,
-059  final MasterServices 
masterServices, final SnapshotManager snapshotManager) {
-060super(snapshot, masterServices, 
snapshotManager);
-061  }
-062
-063  @Override
-064  public DisabledTableSnapshotHandler 
prepare() throws Exception {
-065return (DisabledTableSnapshotHandler) 
super.prepare();
-066  }
-067
-068  // TODO consider parallelizing these 
operations since they are independent. Right now its just
-069  // easier to keep them serial though
-070  @Override
-071  public void 
snapshotRegions(List> 
regionsAndLocations)
-072  throws IOException, KeeperException 
{
-073try {
-074  // 1. get all the regions hosting 
this table.
-075
-076  // extract each pair to separate 
lists
-077  Set regions = new 
HashSet<>();
-078  for (Pair p : regionsAndLocations) {
-079// Don't include non-default 
regions
-080RegionInfo hri = p.getFirst();
-081if 
(RegionReplicaUtil.isDefaultReplica(hri)) {
-082  regions.add(hri);
-083}
-084  }
-085  // handle the mob files if any.
-086  boolean mobEnabled = 
MobUtils.hasMobColumns(htd);
-087  if (mobEnabled) {
-088// snapshot the mob files as a 
offline region.
-089RegionInfo mobRegionInfo = 
MobUtils.getMobRegionInfo(htd.getTableName());
-090regions.add(mobRegionInfo);
-091  }
-092
-093  // 2. for each region, write all 
the info to disk
-094  String msg = "Starting to write 
region info and WALs for regions for offline snapshot:"
-095  + 
ClientSnapshotDescriptionUtils.toString(snapshot);
-096  LOG.info(msg);
-097  status.setStatus(msg);
-098
-099  ThreadPoolExecutor exec = 
SnapshotManifest.createExecutor(conf, "DisabledTableSnapshot");
-100  try {
-101
ModifyRegionUtils.editRegions(exec, regions, new 
ModifyRegionUtils.RegionEditTask() {
-102  @Override
-103  public void editRegion(final 
RegionInfo regionInfo) throws IOException {
-104
snapshotManifest.addRegion(FSUtils.getTableDir(rootDir, snapshotTable), 
regionInfo);
-105  }
-106});
-107  } finally {
-108exec.shutdown();
-109  }
-110} catch (Exception e) {
-111  // make sure we capture the 
exception to propagate back to the client later
-112  String reason = "Failed snapshot " 
+ ClientSnapshotDescriptionUtils.toString(snapshot)
-113  + " due to exception:" + 
e.getMessage();
-114  ForeignException ee = new 
ForeignException(reason, e);
-115  monitor.receive(ee);
-116  status.abort("Snapshot of table: "+ 
snapshotTable + " failed because " + e.getMessage());
-117} finally {
-118  LOG.debug("Marking snapshot" + 
ClientSnapshotDescriptionUtils.toString(snapshot)
-119  + " as finished.");
-120}
-121  }
-122}
+057   * @throws IOException if it cannot 
access the filesystem of the snapshot
+058   * temporary directory
+059   */
+060  public 
DisabledTableSnapshotHandler(SnapshotDescription snapshot,
+061  final MasterServices 
masterServices, final SnapshotManager snapshotManager)
+062  throws IOException {
+063super(snapshot, masterServices, 
snapshotManager);
+064  }
+065
+066  @Override
+067  public DisabledTableSnapshotHandler 
prepare() throws Exception {
+068return (DisabledTableSnapshotHandler) 
super.prepare();
+069  }
+070
+071  // TODO consider parallelizing these 
operations since they are independent. Right now its just
+072  // easier to keep them serial though
+073  @Override
+074  public void 
snapshotRegions(List> 
regionsAndLocations)
+075  throws IOException, KeeperException 
{
+076try {
+077  // 1. get all the regions hosting 
this table.
+078
+079  // extract each pair to separate 
lists
+080  Set regions = new 
HashSet<>();
+081  for (Pair p : regionsAndLocations) {
+082// Don't include non-default 
regions
+083RegionInfo hri = p.getFirst();
+084i

[07/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.TestStep.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.TestStep.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.TestStep.html
index 7ed37ca..907a45d 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.TestStep.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.TestStep.html
@@ -28,713 +28,752 @@
 020import static 
org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
 021import static 
org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
 022import static 
org.junit.Assert.assertEquals;
-023import static 
org.junit.Assert.assertTrue;
-024import static org.junit.Assert.fail;
-025
-026import java.io.IOException;
-027import java.util.ArrayList;
-028import java.util.Arrays;
-029import java.util.List;
-030import java.util.Objects;
-031import java.util.Random;
-032import 
java.util.concurrent.CountDownLatch;
-033import 
java.util.concurrent.atomic.AtomicInteger;
-034import 
java.util.concurrent.atomic.AtomicLong;
-035import 
org.apache.hadoop.conf.Configuration;
-036import org.apache.hadoop.fs.FileSystem;
-037import org.apache.hadoop.fs.Path;
-038import org.apache.hadoop.hbase.Cell;
-039import 
org.apache.hadoop.hbase.CellUtil;
-040import 
org.apache.hadoop.hbase.CompareOperator;
-041import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-042import 
org.apache.hadoop.hbase.HBaseTestingUtility;
-043import 
org.apache.hadoop.hbase.HColumnDescriptor;
-044import 
org.apache.hadoop.hbase.HConstants;
-045import 
org.apache.hadoop.hbase.HRegionInfo;
-046import 
org.apache.hadoop.hbase.HTableDescriptor;
-047import 
org.apache.hadoop.hbase.MultithreadedTestUtil;
-048import 
org.apache.hadoop.hbase.MultithreadedTestUtil.TestContext;
-049import 
org.apache.hadoop.hbase.MultithreadedTestUtil.TestThread;
-050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.hadoop.hbase.client.Append;
-052import 
org.apache.hadoop.hbase.client.Delete;
-053import 
org.apache.hadoop.hbase.client.Durability;
-054import 
org.apache.hadoop.hbase.client.Get;
-055import 
org.apache.hadoop.hbase.client.Increment;
-056import 
org.apache.hadoop.hbase.client.IsolationLevel;
-057import 
org.apache.hadoop.hbase.client.Mutation;
-058import 
org.apache.hadoop.hbase.client.Put;
-059import 
org.apache.hadoop.hbase.client.RegionInfo;
-060import 
org.apache.hadoop.hbase.client.Result;
-061import 
org.apache.hadoop.hbase.client.RowMutations;
-062import 
org.apache.hadoop.hbase.client.Scan;
-063import 
org.apache.hadoop.hbase.client.TableDescriptor;
-064import 
org.apache.hadoop.hbase.filter.BinaryComparator;
-065import 
org.apache.hadoop.hbase.io.HeapSize;
-066import 
org.apache.hadoop.hbase.io.hfile.BlockCache;
-067import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-068import 
org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests;
-069import 
org.apache.hadoop.hbase.util.Bytes;
-070import org.apache.hadoop.hbase.wal.WAL;
-071import org.junit.After;
-072import org.junit.Before;
-073import org.junit.ClassRule;
-074import org.junit.Rule;
-075import org.junit.Test;
-076import 
org.junit.experimental.categories.Category;
-077import org.junit.rules.TestName;
-078import org.slf4j.Logger;
-079import org.slf4j.LoggerFactory;
-080
-081/**
-082 * Testing of 
HRegion.incrementColumnValue, HRegion.increment,
-083 * and HRegion.append
-084 */
-085@Category({VerySlowRegionServerTests.class, 
MediumTests.class}) // Starts 100 threads
-086public class TestAtomicOperation {
-087
-088  @ClassRule
-089  public static final HBaseClassTestRule 
CLASS_RULE =
-090  
HBaseClassTestRule.forClass(TestAtomicOperation.class);
-091
-092  private static final Logger LOG = 
LoggerFactory.getLogger(TestAtomicOperation.class);
-093  @Rule public TestName name = new 
TestName();
-094
-095  HRegion region = null;
-096  private HBaseTestingUtility TEST_UTIL = 
HBaseTestingUtility.createLocalHTU();
-097
-098  // Test names
-099  static  byte[] tableName;
-100  static final byte[] qual1 = 
Bytes.toBytes("qual1");
-101  static final byte[] qual2 = 
Bytes.toBytes("qual2");
-102  static final byte[] qual3 = 
Bytes.toBytes("qual3");
-103  static final byte[] value1 = 
Bytes.toBytes("value1");
-104  static final byte[] value2 = 
Bytes.toBytes("value2");
-105  static final byte [] row = 
Bytes.toBytes("rowA");
-106  static final byte [] row2 = 
Bytes.toBytes("rowB");
-107
-108  @Before
-109  public void setup() {
-110tableName = 
Bytes.toBytes(name.getMethodName());
-111  }
-112
-113  @After
-114  public void teardown() throws 
IOException {
-115if (region != null) {
-116  BlockCache bc = 
region.getStores().get(0).getCacheConfig().getBlockCache();
-117  region.

hbase-site git commit: INFRA-10751 Empty commit

2018-09-12 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 705d69c41 -> f1997f363


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/f1997f36
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/f1997f36
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/f1997f36

Branch: refs/heads/asf-site
Commit: f1997f363eb32e25209a9aefba674b7688ecb516
Parents: 705d69c
Author: jenkins 
Authored: Wed Sep 12 14:54:07 2018 +
Committer: jenkins 
Committed: Wed Sep 12 14:54:07 2018 +

--

--




[41/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/org/apache/hadoop/hbase/rest/TableResource.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/rest/TableResource.html 
b/devapidocs/org/apache/hadoop/hbase/rest/TableResource.html
index 6cc0dd7..5bc0f61 100644
--- a/devapidocs/org/apache/hadoop/hbase/rest/TableResource.html
+++ b/devapidocs/org/apache/hadoop/hbase/rest/TableResource.html
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class TableResource
+public class TableResource
 extends ResourceBase
 
 
@@ -288,7 +288,7 @@ extends 
 
 table
-https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String table
+https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String table
 
 
 
@@ -297,7 +297,7 @@ extends 
 
 LOG
-private static final org.slf4j.Logger LOG
+private static final org.slf4j.Logger LOG
 
 
 
@@ -314,7 +314,7 @@ extends 
 
 TableResource
-public TableResource(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String table)
+public TableResource(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String table)
   throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Constructor
 
@@ -339,7 +339,7 @@ extends 
 
 getName
-https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getName()
+https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getName()
 
 Returns:
 the table name
@@ -352,7 +352,7 @@ extends 
 
 exists
-boolean exists()
+boolean exists()
 throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 Returns:
@@ -369,7 +369,7 @@ extends 
 getExistsResource
 @Path(value="exists")
-public ExistsResource getExistsResource()
+public ExistsResource getExistsResource()
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 Throws:
@@ -384,7 +384,7 @@ public 
 getRegionsResource
 @Path(value="regions")
-public RegionsResource getRegionsResource()
+public RegionsResource getRegionsResource()
   throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 Throws:
@@ -399,7 +399,7 @@ public 
 getScannerResource
 @Path(value="scanner")
-public ScannerResource getScannerResource()
+public ScannerResource getScannerResource()
   throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 Throws:
@@ -414,7 +414,7 @@ public 
 getSchemaResource
 @Path(value="schema")
-public SchemaResource getSchemaResource()
+public SchemaResource getSchemaResource()
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 Throws:
@@ -429,7 +429,7 @@ public 
 getMultipleRowResource
 @Path(value="{multiget: multiget.*}")
-public MultiRowResource getMultipleRowResource(@QueryParam(value="v")
+public MultiRowResource getMultipleRowResource(@QueryParam(value="v")

  https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String versions,

  @PathParam(value="multiget")

  https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String path)
@@ -447,7 +447,7 @@ public 
 getRowResource
 @Path(value="{rowspec: [^*]+}")
-public RowResource getRowResource(@PathParam(value="rowspec")
 @Encoded
+public RowResource getRowResource(@PathParam(value="rowspec")
 @Encoded
   https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String rowspec,

[02/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.html 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.html
index 826bcba..5323511 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.html
@@ -98,580 +98,623 @@
 090  private static final String CFB = 
"b";
 091  private static final String COLUMN_1 = 
CFA + ":1";
 092  private static final String COLUMN_2 = 
CFB + ":2";
-093  private static Client client;
-094  private static int expectedRows1;
-095  private static int expectedRows2;
-096  private static Configuration conf;
-097
-098  private static final 
HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
-099  private static final 
HBaseRESTTestingUtility REST_TEST_UTIL =
-100new HBaseRESTTestingUtility();
-101
-102  @BeforeClass
-103  public static void setUpBeforeClass() 
throws Exception {
-104conf = 
TEST_UTIL.getConfiguration();
-105conf.set(Constants.CUSTOM_FILTERS, 
"CustomFilter:" + CustomFilter.class.getName());
-106TEST_UTIL.startMiniCluster();
-107
REST_TEST_UTIL.startServletContainer(conf);
-108client = new Client(new 
Cluster().add("localhost",
-109  
REST_TEST_UTIL.getServletPort()));
-110Admin admin = TEST_UTIL.getAdmin();
-111if (!admin.tableExists(TABLE)) {
-112HTableDescriptor htd = new 
HTableDescriptor(TABLE);
-113htd.addFamily(new 
HColumnDescriptor(CFA));
-114htd.addFamily(new 
HColumnDescriptor(CFB));
-115admin.createTable(htd);
-116expectedRows1 = 
TestScannerResource.insertData(conf, TABLE, COLUMN_1, 1.0);
-117expectedRows2 = 
TestScannerResource.insertData(conf, TABLE, COLUMN_2, 0.5);
-118}
-119  }
-120
-121  @AfterClass
-122  public static void tearDownAfterClass() 
throws Exception {
-123
TEST_UTIL.getAdmin().disableTable(TABLE);
-124
TEST_UTIL.getAdmin().deleteTable(TABLE);
-125
REST_TEST_UTIL.shutdownServletContainer();
-126TEST_UTIL.shutdownMiniCluster();
-127  }
-128
-129  @Test
-130  public void testSimpleScannerXML() 
throws IOException, JAXBException, XMLStreamException {
-131// Test scanning particular columns
-132StringBuilder builder = new 
StringBuilder();
-133builder.append("/*");
-134builder.append("?");
-135builder.append(Constants.SCAN_COLUMN 
+ "=" + COLUMN_1);
-136builder.append("&");
-137builder.append(Constants.SCAN_LIMIT + 
"=10");
-138Response response = client.get("/" + 
TABLE + builder.toString(),
-139  Constants.MIMETYPE_XML);
-140assertEquals(200, 
response.getCode());
-141assertEquals(Constants.MIMETYPE_XML, 
response.getHeader("content-type"));
-142JAXBContext ctx = 
JAXBContext.newInstance(CellSetModel.class);
-143Unmarshaller ush = 
ctx.createUnmarshaller();
-144CellSetModel model = (CellSetModel) 
ush.unmarshal(response.getStream());
-145int count = 
TestScannerResource.countCellSet(model);
-146assertEquals(10, count);
-147checkRowsNotNull(model);
-148
-149//Test with no limit.
-150builder = new StringBuilder();
-151builder.append("/*");
-152builder.append("?");
-153builder.append(Constants.SCAN_COLUMN 
+ "=" + COLUMN_1);
-154response = client.get("/" + TABLE + 
builder.toString(),
-155  Constants.MIMETYPE_XML);
-156assertEquals(200, 
response.getCode());
-157assertEquals(Constants.MIMETYPE_XML, 
response.getHeader("content-type"));
-158model = (CellSetModel) 
ush.unmarshal(response.getStream());
-159count = 
TestScannerResource.countCellSet(model);
-160assertEquals(expectedRows1, count);
-161checkRowsNotNull(model);
-162
-163//Test with start and end row.
-164builder = new StringBuilder();
-165builder.append("/*");
-166builder.append("?");
-167builder.append(Constants.SCAN_COLUMN 
+ "=" + COLUMN_1);
-168builder.append("&");
-169
builder.append(Constants.SCAN_START_ROW + "=aaa");
-170builder.append("&");
-171builder.append(Constants.SCAN_END_ROW 
+ "=aay");
-172response = client.get("/" + TABLE + 
builder.toString(),
-173  Constants.MIMETYPE_XML);
-174assertEquals(200, 
response.getCode());
-175model = (CellSetModel) 
ush.unmarshal(response.getStream());
-176count = 
TestScannerResource.countCellSet(model);
-177RowModel startRow = 
model.getRows().get(0);
-178assertEquals("aaa", 
Bytes.toString(startRow.getKey()));
-179RowModel endRow = 
model.getRows().get(model.getRows().size() - 1);
-180assertEquals("aax", 
Bytes.toString(endRow.getKey()));
-181assertEquals(24, count);
-182checkRowsNotNull(model);
-183
-184//Test with start row and limit.
-185builder = new StringBuilder();
-186builder.append(

[49/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/705d69c4
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/705d69c4
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/705d69c4

Branch: refs/heads/asf-site
Commit: 705d69c414b36c9f727d82caa1315bed0ea76b07
Parents: 8ea88a7
Author: jenkins 
Authored: Wed Sep 12 14:53:40 2018 +
Committer: jenkins 
Committed: Wed Sep 12 14:53:40 2018 +

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 29812 +
 .../hadoop/hbase/snapshot/ExportSnapshot.html   | 6 +-
 .../hadoop/hbase/snapshot/ExportSnapshot.html   |   340 +-
 book.html   |17 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 15924 +
 checkstyle.rss  |64 +-
 coc.html| 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html | 4 +-
 dependency-info.html| 4 +-
 dependency-management.html  | 4 +-
 devapidocs/constant-values.html |13 +-
 devapidocs/index-all.html   |68 +-
 .../hadoop/hbase/backup/package-tree.html   | 4 +-
 .../hadoop/hbase/client/package-tree.html   |24 +-
 .../class-use/ForeignExceptionSnare.html| 2 +-
 .../hadoop/hbase/executor/package-tree.html | 2 +-
 .../hadoop/hbase/filter/package-tree.html   | 8 +-
 .../hadoop/hbase/io/hfile/package-tree.html | 4 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   | 2 +-
 .../hadoop/hbase/mapreduce/package-tree.html| 4 +-
 .../hbase/master/class-use/MasterServices.html  | 4 +-
 .../hadoop/hbase/master/package-tree.html   | 4 +-
 .../hbase/master/procedure/package-tree.html| 2 +-
 .../snapshot/DisabledTableSnapshotHandler.html  |16 +-
 .../snapshot/EnabledTableSnapshotHandler.html   |11 +-
 .../master/snapshot/MasterSnapshotVerifier.html |49 +-
 .../hbase/master/snapshot/SnapshotManager.html  |70 +-
 .../master/snapshot/TakeSnapshotHandler.html|   136 +-
 .../org/apache/hadoop/hbase/package-tree.html   |16 +-
 .../hadoop/hbase/procedure2/package-tree.html   | 4 +-
 .../store/wal/WALProcedureStore.PushType.html   | 8 +-
 .../procedure2/store/wal/WALProcedureStore.html |   193 +-
 .../hadoop/hbase/quotas/package-tree.html   |10 +-
 .../hadoop/hbase/regionserver/package-tree.html |14 +-
 .../hbase/regionserver/wal/package-tree.html| 2 +-
 .../hadoop/hbase/replication/package-tree.html  | 2 +-
 .../apache/hadoop/hbase/rest/TableResource.html |30 +-
 .../hadoop/hbase/rest/model/package-tree.html   | 2 +-
 .../hbase/security/access/package-tree.html | 2 +-
 .../hadoop/hbase/security/package-tree.html | 4 +-
 .../hadoop/hbase/snapshot/ExportSnapshot.html   | 6 +-
 .../snapshot/SnapshotDescriptionUtils.html  |   219 +-
 .../SnapshotManifest.RegionVisitor.html |12 +-
 .../hadoop/hbase/snapshot/SnapshotManifest.html |   116 +-
 .../SnapshotManifestV1.ManifestBuilder.html |44 +-
 .../hbase/snapshot/SnapshotManifestV1.html  | 6 +-
 .../SnapshotManifestV2.ManifestBuilder.html |22 +-
 .../hbase/snapshot/SnapshotManifestV2.html  | 6 +-
 .../class-use/HBaseSnapshotException.html   |19 -
 .../class-use/SnapshotCreationException.html| 7 +-
 .../apache/hadoop/hbase/util/package-tree.html  | 8 +-
 .../org/apache/hadoop/hbase/Version.html| 6 +-
 .../snapshot/DisabledTableSnapshotHandler.html  |   135 +-
 .../snapshot/EnabledTableSnapshotHandler.html   | 2 +-
 .../master/snapshot/MasterSnapshotVerifier.html |   307 +-
 .../hbase/master/snapshot/SnapshotManager.html  |  1450 +-
 .../master/snapshot/TakeSnapshotHandler.html|   621 +-
 .../wal/WALProcedureStore.LeaseRecovery.html|  2300 +-
 .../store/wal/WALProcedureStore.PushType.html   |  2300 +-
 .../wal/WALProcedureStore.SyncMetrics.html  |  2300 +-
 .../procedure2/store/wal/WALProcedureStore.html |  2300 +-
 .../HRegion.BatchOperation.Visitor.html | 2 +-
 .../regionserver/HRegion.BatchOperation.html| 2 +-
 .../regionserver/HRegion.BulkLoadListener.html  | 2 +-
 .../HRegion.FlushResult.Result.html | 2 +-
 .../hbase/regionserver/HRegion.FlushResult.html | 2 +-
 .../regionserver/HRegion.FlushResultImpl.html   | 2 +-
 .../HRegion.MutationBatchOperation.html | 2 +-
 .../HRegion.ObservedExceptionsInBatch.html  | 2 +-
 .../HRegion.PrepareFlushResult

[06/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.html
index 7ed37ca..907a45d 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.html
@@ -28,713 +28,752 @@
 020import static 
org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
 021import static 
org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
 022import static 
org.junit.Assert.assertEquals;
-023import static 
org.junit.Assert.assertTrue;
-024import static org.junit.Assert.fail;
-025
-026import java.io.IOException;
-027import java.util.ArrayList;
-028import java.util.Arrays;
-029import java.util.List;
-030import java.util.Objects;
-031import java.util.Random;
-032import 
java.util.concurrent.CountDownLatch;
-033import 
java.util.concurrent.atomic.AtomicInteger;
-034import 
java.util.concurrent.atomic.AtomicLong;
-035import 
org.apache.hadoop.conf.Configuration;
-036import org.apache.hadoop.fs.FileSystem;
-037import org.apache.hadoop.fs.Path;
-038import org.apache.hadoop.hbase.Cell;
-039import 
org.apache.hadoop.hbase.CellUtil;
-040import 
org.apache.hadoop.hbase.CompareOperator;
-041import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-042import 
org.apache.hadoop.hbase.HBaseTestingUtility;
-043import 
org.apache.hadoop.hbase.HColumnDescriptor;
-044import 
org.apache.hadoop.hbase.HConstants;
-045import 
org.apache.hadoop.hbase.HRegionInfo;
-046import 
org.apache.hadoop.hbase.HTableDescriptor;
-047import 
org.apache.hadoop.hbase.MultithreadedTestUtil;
-048import 
org.apache.hadoop.hbase.MultithreadedTestUtil.TestContext;
-049import 
org.apache.hadoop.hbase.MultithreadedTestUtil.TestThread;
-050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.hadoop.hbase.client.Append;
-052import 
org.apache.hadoop.hbase.client.Delete;
-053import 
org.apache.hadoop.hbase.client.Durability;
-054import 
org.apache.hadoop.hbase.client.Get;
-055import 
org.apache.hadoop.hbase.client.Increment;
-056import 
org.apache.hadoop.hbase.client.IsolationLevel;
-057import 
org.apache.hadoop.hbase.client.Mutation;
-058import 
org.apache.hadoop.hbase.client.Put;
-059import 
org.apache.hadoop.hbase.client.RegionInfo;
-060import 
org.apache.hadoop.hbase.client.Result;
-061import 
org.apache.hadoop.hbase.client.RowMutations;
-062import 
org.apache.hadoop.hbase.client.Scan;
-063import 
org.apache.hadoop.hbase.client.TableDescriptor;
-064import 
org.apache.hadoop.hbase.filter.BinaryComparator;
-065import 
org.apache.hadoop.hbase.io.HeapSize;
-066import 
org.apache.hadoop.hbase.io.hfile.BlockCache;
-067import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-068import 
org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests;
-069import 
org.apache.hadoop.hbase.util.Bytes;
-070import org.apache.hadoop.hbase.wal.WAL;
-071import org.junit.After;
-072import org.junit.Before;
-073import org.junit.ClassRule;
-074import org.junit.Rule;
-075import org.junit.Test;
-076import 
org.junit.experimental.categories.Category;
-077import org.junit.rules.TestName;
-078import org.slf4j.Logger;
-079import org.slf4j.LoggerFactory;
-080
-081/**
-082 * Testing of 
HRegion.incrementColumnValue, HRegion.increment,
-083 * and HRegion.append
-084 */
-085@Category({VerySlowRegionServerTests.class, 
MediumTests.class}) // Starts 100 threads
-086public class TestAtomicOperation {
-087
-088  @ClassRule
-089  public static final HBaseClassTestRule 
CLASS_RULE =
-090  
HBaseClassTestRule.forClass(TestAtomicOperation.class);
-091
-092  private static final Logger LOG = 
LoggerFactory.getLogger(TestAtomicOperation.class);
-093  @Rule public TestName name = new 
TestName();
-094
-095  HRegion region = null;
-096  private HBaseTestingUtility TEST_UTIL = 
HBaseTestingUtility.createLocalHTU();
-097
-098  // Test names
-099  static  byte[] tableName;
-100  static final byte[] qual1 = 
Bytes.toBytes("qual1");
-101  static final byte[] qual2 = 
Bytes.toBytes("qual2");
-102  static final byte[] qual3 = 
Bytes.toBytes("qual3");
-103  static final byte[] value1 = 
Bytes.toBytes("value1");
-104  static final byte[] value2 = 
Bytes.toBytes("value2");
-105  static final byte [] row = 
Bytes.toBytes("rowA");
-106  static final byte [] row2 = 
Bytes.toBytes("rowB");
-107
-108  @Before
-109  public void setup() {
-110tableName = 
Bytes.toBytes(name.getMethodName());
-111  }
-112
-113  @After
-114  public void teardown() throws 
IOException {
-115if (region != null) {
-116  BlockCache bc = 
region.getStores().get(0).getCacheConfig().getBlockCache();
-117  region.close();
-118  WAL wal = region.getWAL();

[26/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.html
index 7462d5b..63a00a7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.html
@@ -122,294 +122,339 @@
 114  /** Temporary directory under the 
snapshot directory to store in-progress snapshots */
 115  public static final String 
SNAPSHOT_TMP_DIR_NAME = ".tmp";
 116
-117  /** This tag will be created in 
in-progess snapshots */
-118  public static final String 
SNAPSHOT_IN_PROGRESS = ".inprogress";
-119  // snapshot operation values
-120  /** Default value if no start time is 
specified */
-121  public static final long 
NO_SNAPSHOT_START_TIME_SPECIFIED = 0;
+117  /**
+118   * The configuration property that 
determines the filepath of the snapshot
+119   * base working directory
+120   */
+121  public static final String 
SNAPSHOT_WORKING_DIR = "hbase.snapshot.working.dir";
 122
-123
-124  public static final String 
MASTER_SNAPSHOT_TIMEOUT_MILLIS = "hbase.snapshot.master.timeout.millis";
-125
-126  /** By default, wait 300 seconds for a 
snapshot to complete */
-127  public static final long 
DEFAULT_MAX_WAIT_TIME = 6 * 5 ;
+123  /** This tag will be created in 
in-progess snapshots */
+124  public static final String 
SNAPSHOT_IN_PROGRESS = ".inprogress";
+125  // snapshot operation values
+126  /** Default value if no start time is 
specified */
+127  public static final long 
NO_SNAPSHOT_START_TIME_SPECIFIED = 0;
 128
 129
-130  /**
-131   * By default, check to see if the 
snapshot is complete (ms)
-132   * @deprecated Use {@link 
#DEFAULT_MAX_WAIT_TIME} instead.
-133   * */
-134  @Deprecated
-135  public static final int 
SNAPSHOT_TIMEOUT_MILLIS_DEFAULT = 6 * 5;
-136
-137  /**
-138   * Conf key for # of ms elapsed before 
injecting a snapshot timeout error when waiting for
-139   * completion.
-140   * @deprecated Use {@link 
#MASTER_SNAPSHOT_TIMEOUT_MILLIS} instead.
-141   */
-142  @Deprecated
-143  public static final String 
SNAPSHOT_TIMEOUT_MILLIS_KEY = "hbase.snapshot.master.timeoutMillis";
-144
-145  private SnapshotDescriptionUtils() {
-146// private constructor for utility 
class
-147  }
-148
-149  /**
-150   * @param conf {@link Configuration} 
from which to check for the timeout
-151   * @param type type of snapshot being 
taken
-152   * @param defaultMaxWaitTime Default 
amount of time to wait, if none is in the configuration
-153   * @return the max amount of time the 
master should wait for a snapshot to complete
-154   */
-155  public static long 
getMaxMasterTimeout(Configuration conf, SnapshotDescription.Type type,
-156  long defaultMaxWaitTime) {
-157String confKey;
-158switch (type) {
-159case DISABLED:
-160default:
-161  confKey = 
MASTER_SNAPSHOT_TIMEOUT_MILLIS;
-162}
-163return Math.max(conf.getLong(confKey, 
defaultMaxWaitTime),
-164
conf.getLong(SNAPSHOT_TIMEOUT_MILLIS_KEY, defaultMaxWaitTime));
-165  }
-166
-167  /**
-168   * Get the snapshot root directory. All 
the snapshots are kept under this directory, i.e.
-169   * ${hbase.rootdir}/.snapshot
-170   * @param rootDir hbase root 
directory
-171   * @return the base directory in which 
all snapshots are kept
-172   */
-173  public static Path 
getSnapshotRootDir(final Path rootDir) {
-174return new Path(rootDir, 
HConstants.SNAPSHOT_DIR_NAME);
-175  }
-176
-177  /**
-178   * Get the directory for a specified 
snapshot. This directory is a sub-directory of snapshot root
-179   * directory and all the data files for 
a snapshot are kept under this directory.
-180   * @param snapshot snapshot being 
taken
-181   * @param rootDir hbase root 
directory
-182   * @return the final directory for the 
completed snapshot
-183   */
-184  public static Path 
getCompletedSnapshotDir(final SnapshotDescription snapshot, final Path rootDir) 
{
-185return 
getCompletedSnapshotDir(snapshot.getName(), rootDir);
-186  }
-187
-188  /**
-189   * Get the directory for a completed 
snapshot. This directory is a sub-directory of snapshot root
-190   * directory and all the data files for 
a snapshot are kept under this directory.
-191   * @param snapshotName name of the 
snapshot being taken
-192   * @param rootDir hbase root 
directory
-193   * @return the final directory for the 
completed snapshot
-194   */
-195  public static Path 
getCompletedSnapshotDir(final String snapshotName, final Path rootDir) {
-196return 
getCompletedSnapshotDir(getSnapshotsDir(rootDir), snapshotName);
-197  }
-198
-199  /**
-200   * Get the general working directory 
for snapshots

[15/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestSnapshotTemporaryDirectory.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestSnapshotTemporaryDirectory.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestSnapshotTemporaryDirectory.html
new file mode 100644
index 000..149326d
--- /dev/null
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestSnapshotTemporaryDirectory.html
@@ -0,0 +1,543 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+Source code
+
+
+
+
+001/**
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License.  You may obtain a 
copy of the License at
+009 * 

+010 * http://www.apache.org/licenses/LICENSE-2.0 +011 *

+012 * Unless required by applicable law or agreed to in writing, software +013 * distributed under the License is distributed on an "AS IS" BASIS, +014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +015 * See the License for the specific language governing permissions and +016 * limitations under the License. +017 */ +018package org.apache.hadoop.hbase.client; +019 +020import static org.junit.Assert.assertEquals; +021import static org.junit.Assert.assertFalse; +022import static org.junit.Assert.assertTrue; +023 +024import java.io.File; +025import java.io.IOException; +026import java.nio.file.Paths; +027import java.util.ArrayList; +028import java.util.Arrays; +029import java.util.Iterator; +030import java.util.List; +031import java.util.UUID; +032import org.apache.commons.io.FileUtils; +033import org.apache.hadoop.conf.Configuration; +034import org.apache.hadoop.fs.FileSystem; +035import org.apache.hadoop.fs.Path; +036import org.apache.hadoop.hbase.HBaseClassTestRule; +037import org.apache.hadoop.hbase.HBaseTestingUtility; +038import org.apache.hadoop.hbase.HConstants; +039import org.apache.hadoop.hbase.HTableDescriptor; +040import org.apache.hadoop.hbase.TableName; +041import org.apache.hadoop.hbase.master.snapshot.SnapshotManager; +042import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy; +043import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils; +044import org.apache.hadoop.hbase.snapshot.SnapshotDoesNotExistException; +045import org.apache.hadoop.hbase.snapshot.SnapshotManifestV1; +046import org.apache.hadoop.hbase.snapshot.SnapshotManifestV2; +047import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils; +048import org.apache.hadoop.hbase.testclassification.LargeTests; +049import org.apache.hadoop.hbase.util.Bytes; +050import org.apache.hadoop.hbase.util.FSUtils; +051import org.junit.After; +052import org.junit.AfterClass; +053import org.junit.Before; +054import org.junit.BeforeClass; +055import org.junit.ClassRule; +056import org.junit.Test; +057import org.junit.experimental.categories.Category; +058import org.junit.runner.RunWith; +059import org.junit.runners.Parameterized; +060import org.slf4j.Logger; +061import org.slf4j.LoggerFactory; +062 +063import org.apache.hbase.thirdparty.com.google.common.collect.Lists; +064import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; +065 +066/** +067 * This class tests that the use of a temporary snapshot directory supports snapshot functionality +068 * while the temporary directory is on a different file system than the root directory +069 *

+070 * This is an end-to-end test for the snapshot utility +071 */ +072@Category(LargeTests.class) +073@RunWith(Parameterized.class) +074public class TestSnapshotTemporaryDirectory { +075 +076 @ClassRule public static final HBaseClassTestRule CLASS_RULE = +077 HBaseClassTestRule.forClass(TestSnapshotTemporaryDirectory.class); +078 +079 @Parameterized.Parameters public static Iterable data() { +080return Arrays +081 .asList(SnapshotManifestV1.DESCRIPTOR_VERSION, SnapshotManifestV2.DESCRIPTOR_VERSION); +082 } +083 +084 @Parameterized.Parameter public int manifestVersion; +085 +086 private static final Logger LOG = LoggerFactory.getLogger(TestSnapshotTemporaryDirectory.class); +087 protected static final int NUM_RS = 2; +088 protected static String TEMP_DIR = +089 Paths.get("").toAbsolutePath().toString() + Path.SEPARATOR + UUID.randomUUID().toString(); +090 +091 protected static Admin admin; +092 protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); +093 protected static final String STRING_TABLE_NAME = "test"; +094 protected static final byte[] TEST_FAM = Bytes.toByt


[28/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html
index 7d49582..01861a7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html
@@ -980,175 +980,177 @@
 972FileSystem outputFs = 
FileSystem.get(outputRoot.toUri(), destConf);
 973LOG.debug("outputFs=" + 
outputFs.getUri().toString() + " outputRoot=" + outputRoot.toString());
 974
-975boolean skipTmp = 
conf.getBoolean(CONF_SKIP_TMP, false);
-976
-977Path snapshotDir = 
SnapshotDescriptionUtils.getCompletedSnapshotDir(snapshotName, inputRoot);
-978Path snapshotTmpDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(targetName, outputRoot);
-979Path outputSnapshotDir = 
SnapshotDescriptionUtils.getCompletedSnapshotDir(targetName, outputRoot);
-980Path initialOutputSnapshotDir = 
skipTmp ? outputSnapshotDir : snapshotTmpDir;
-981
-982// Find the necessary directory which 
need to change owner and group
-983Path needSetOwnerDir = 
SnapshotDescriptionUtils.getSnapshotRootDir(outputRoot);
-984if (outputFs.exists(needSetOwnerDir)) 
{
-985  if (skipTmp) {
-986needSetOwnerDir = 
outputSnapshotDir;
-987  } else {
-988needSetOwnerDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(outputRoot);
-989if 
(outputFs.exists(needSetOwnerDir)) {
-990  needSetOwnerDir = 
snapshotTmpDir;
-991}
-992  }
-993}
-994
-995// Check if the snapshot already 
exists
-996if 
(outputFs.exists(outputSnapshotDir)) {
-997  if (overwrite) {
-998if 
(!outputFs.delete(outputSnapshotDir, true)) {
-999  System.err.println("Unable to 
remove existing snapshot directory: " + outputSnapshotDir);
-1000  return 1;
-1001}
-1002  } else {
-1003System.err.println("The snapshot 
'" + targetName +
-1004  "' already exists in the 
destination: " + outputSnapshotDir);
-1005return 1;
-1006  }
-1007}
-1008
-1009if (!skipTmp) {
-1010  // Check if the snapshot already 
in-progress
-1011  if 
(outputFs.exists(snapshotTmpDir)) {
-1012if (overwrite) {
-1013  if 
(!outputFs.delete(snapshotTmpDir, true)) {
-1014System.err.println("Unable 
to remove existing snapshot tmp directory: "+snapshotTmpDir);
-1015return 1;
-1016  }
-1017} else {
-1018  System.err.println("A snapshot 
with the same name '"+ targetName +"' may be in-progress");
-1019  System.err.println("Please 
check "+snapshotTmpDir+". If the snapshot has completed, ");
-1020  System.err.println("consider 
removing "+snapshotTmpDir+" by using the -overwrite option");
-1021  return 1;
-1022}
-1023  }
-1024}
-1025
-1026// Step 1 - Copy 
fs1:/.snapshot/ to  fs2:/.snapshot/.tmp/
-1027// The snapshot references must be 
copied before the hfiles otherwise the cleaner
-1028// will remove them because they are 
unreferenced.
-1029List travesedPaths = new 
ArrayList<>();
-1030boolean copySucceeded = false;
-1031try {
-1032  LOG.info("Copy Snapshot Manifest 
from " + snapshotDir + " to " + initialOutputSnapshotDir);
-1033  travesedPaths =
-1034  
FSUtils.copyFilesParallel(inputFs, snapshotDir, outputFs, 
initialOutputSnapshotDir, conf,
-1035  
conf.getInt(CONF_COPY_MANIFEST_THREADS, DEFAULT_COPY_MANIFEST_THREADS));
-1036  copySucceeded = true;
-1037} catch (IOException e) {
-1038  throw new 
ExportSnapshotException("Failed to copy the snapshot directory: from=" +
-1039snapshotDir + " to=" + 
initialOutputSnapshotDir, e);
-1040} finally {
-1041  if (copySucceeded) {
-1042if (filesUser != null || 
filesGroup != null) {
-1043  LOG.warn((filesUser == null ? 
"" : "Change the owner of " + needSetOwnerDir + " to "
-1044  + filesUser)
-1045  + (filesGroup == null ? "" 
: ", Change the group of " + needSetOwnerDir + " to "
-1046  + filesGroup));
-1047  setOwnerParallel(outputFs, 
filesUser, filesGroup, conf, travesedPaths);
-1048}
-1049if (filesMode > 0) {
-1050  LOG.warn("Change the 
permission of " + needSetOwnerDir + " to " + filesMode);
-1051  
setPermissionParallel(outputFs, (short)filesMode, travesedPaths, conf);
-1052}
-1053  }
-1054}
-1055
-1056// Write a new .snapshotinfo if the 
target name is different from the source name
-1057if 
(!targetName.equals(snapshotName)) {
-1058  SnapshotDesc

[25/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotManifest.RegionVisitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotManifest.RegionVisitor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotManifest.RegionVisitor.html
index 025b084..b9f473a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotManifest.RegionVisitor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotManifest.RegionVisitor.html
@@ -89,504 +89,517 @@
 081  private final ForeignExceptionSnare 
monitor;
 082  private final Configuration conf;
 083  private final Path workingDir;
-084  private final FileSystem fs;
-085  private int manifestSizeLimit;
-086
-087  private SnapshotManifest(final 
Configuration conf, final FileSystem fs,
-088  final Path workingDir, final 
SnapshotDescription desc,
-089  final ForeignExceptionSnare 
monitor) {
-090this.monitor = monitor;
-091this.desc = desc;
-092this.workingDir = workingDir;
-093this.conf = conf;
-094this.fs = fs;
-095
-096this.manifestSizeLimit = 
conf.getInt(SNAPSHOT_MANIFEST_SIZE_LIMIT_CONF_KEY, 64 * 1024 * 1024);
-097  }
-098
-099  /**
-100   * Return a SnapshotManifest instance, 
used for writing a snapshot.
-101   *
-102   * There are two usage pattern:
-103   *  - The Master will create a 
manifest, add the descriptor, offline regions
-104   *and consolidate the snapshot by 
writing all the pending stuff on-disk.
-105   *  manifest = 
SnapshotManifest.create(...)
-106   *  manifest.addRegion(tableDir, 
hri)
-107   *  manifest.consolidate()
-108   *  - The RegionServer will create a 
single region manifest
-109   *  manifest = 
SnapshotManifest.create(...)
-110   *  manifest.addRegion(region)
-111   */
-112  public static SnapshotManifest 
create(final Configuration conf, final FileSystem fs,
-113  final Path workingDir, final 
SnapshotDescription desc,
-114  final ForeignExceptionSnare 
monitor) {
-115return new SnapshotManifest(conf, fs, 
workingDir, desc, monitor);
-116
-117  }
-118
-119  /**
-120   * Return a SnapshotManifest instance 
with the information already loaded in-memory.
-121   *SnapshotManifest manifest = 
SnapshotManifest.open(...)
-122   *TableDescriptor htd = 
manifest.getTableDescriptor()
-123   *for (SnapshotRegionManifest 
regionManifest: manifest.getRegionManifests())
-124   *  hri = 
regionManifest.getRegionInfo()
-125   *  for 
(regionManifest.getFamilyFiles())
-126   *...
-127   */
-128  public static SnapshotManifest 
open(final Configuration conf, final FileSystem fs,
-129  final Path workingDir, final 
SnapshotDescription desc) throws IOException {
-130SnapshotManifest manifest = new 
SnapshotManifest(conf, fs, workingDir, desc, null);
-131manifest.load();
-132return manifest;
-133  }
-134
-135
-136  /**
-137   * Add the table descriptor to the 
snapshot manifest
+084  private final FileSystem rootFs;
+085  private final FileSystem 
workingDirFs;
+086  private int manifestSizeLimit;
+087
+088  /**
+089   *
+090   * @param conf configuration file for 
HBase setup
+091   * @param rootFs root filesystem 
containing HFiles
+092   * @param workingDir file path of where 
the manifest should be located
+093   * @param desc description of snapshot 
being taken
+094   * @param monitor monitor of foreign 
exceptions
+095   * @throws IOException if the working 
directory file system cannot be
+096   * determined from 
the config file
+097   */
+098  private SnapshotManifest(final 
Configuration conf, final FileSystem rootFs,
+099  final Path workingDir, final 
SnapshotDescription desc,
+100  final ForeignExceptionSnare 
monitor) throws IOException {
+101this.monitor = monitor;
+102this.desc = desc;
+103this.workingDir = workingDir;
+104this.conf = conf;
+105this.rootFs = rootFs;
+106this.workingDirFs = 
this.workingDir.getFileSystem(this.conf);
+107this.manifestSizeLimit = 
conf.getInt(SNAPSHOT_MANIFEST_SIZE_LIMIT_CONF_KEY, 64 * 1024 * 1024);
+108  }
+109
+110  /**
+111   * Return a SnapshotManifest instance, 
used for writing a snapshot.
+112   *
+113   * There are two usage pattern:
+114   *  - The Master will create a 
manifest, add the descriptor, offline regions
+115   *and consolidate the snapshot by 
writing all the pending stuff on-disk.
+116   *  manifest = 
SnapshotManifest.create(...)
+117   *  manifest.addRegion(tableDir, 
hri)
+118   *  manifest.consolidate()
+119   *  - The RegionServer will create a 
single region manifest
+120   *  manifest = 
SnapshotManifest.create(...)
+121   *  manifest.addRegion(region)
+122   */
+123  public static SnapshotManifest 
create(final Configuration conf, final FileSystem fs,
+124  final Path w

[38/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.html
index a6e1a7b..768cf5d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.html
@@ -284,11 +284,11 @@
 276   */
 277  void resetTempDir() throws IOException 
{
 278// cleanup any existing snapshots.
-279Path tmpdir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(rootDir);
-280if 
(master.getMasterFileSystem().getFileSystem().exists(tmpdir)) {
-281  if 
(!master.getMasterFileSystem().getFileSystem().delete(tmpdir, true)) {
-282LOG.warn("Couldn't delete working 
snapshot directory: " + tmpdir);
-283  }
+279Path tmpdir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(rootDir,
+280master.getConfiguration());
+281FileSystem tmpFs = 
tmpdir.getFileSystem(master.getConfiguration());
+282if (!tmpFs.delete(tmpdir, true)) {
+283  LOG.warn("Couldn't delete working 
snapshot directory: " + tmpdir);
 284}
 285  }
 286
@@ -441,8 +441,8 @@
 433   */
 434  private synchronized void 
prepareToTakeSnapshot(SnapshotDescription snapshot)
 435  throws HBaseSnapshotException {
-436FileSystem fs = 
master.getMasterFileSystem().getFileSystem();
-437Path workingDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(snapshot, rootDir);
+436Path workingDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(snapshot, rootDir,
+437master.getConfiguration());
 438TableName snapshotTable =
 439
TableName.valueOf(snapshot.getTable());
 440
@@ -465,15 +465,15 @@
 457}
 458
 459try {
-460  // delete the working directory, 
since we aren't running the snapshot. Likely leftovers
-461  // from a failed attempt.
-462  fs.delete(workingDir, true);
-463
-464  // recreate the working directory 
for the snapshot
-465  if (!fs.mkdirs(workingDir)) {
-466throw new 
SnapshotCreationException(
-467"Couldn't create working 
directory (" + workingDir + ") for snapshot",
-468
ProtobufUtil.createSnapshotDesc(snapshot));
+460  FileSystem workingDirFS = 
workingDir.getFileSystem(master.getConfiguration());
+461  // delete the working directory, 
since we aren't running the snapshot. Likely leftovers
+462  // from a failed attempt.
+463  workingDirFS.delete(workingDir, 
true);
+464
+465  // recreate the working directory 
for the snapshot
+466  if 
(!workingDirFS.mkdirs(workingDir)) {
+467throw new 
SnapshotCreationException("Couldn't create working directory (" + workingDir
+468+ ") for snapshot" , 
ProtobufUtil.createSnapshotDesc(snapshot));
 469  }
 470} catch (HBaseSnapshotException e) 
{
 471  throw e;
@@ -487,718 +487,722 @@
 479  /**
 480   * Take a snapshot of a disabled 
table.
 481   * @param snapshot description of the 
snapshot to take. Modified to be {@link Type#DISABLED}.
-482   * @throws HBaseSnapshotException if 
the snapshot could not be started
-483   */
-484  private synchronized void 
snapshotDisabledTable(SnapshotDescription snapshot)
-485  throws HBaseSnapshotException {
-486// setup the snapshot
-487prepareToTakeSnapshot(snapshot);
-488
-489// set the snapshot to be a disabled 
snapshot, since the client doesn't know about that
-490snapshot = 
snapshot.toBuilder().setType(Type.DISABLED).build();
-491
-492// Take the snapshot of the disabled 
table
-493DisabledTableSnapshotHandler handler 
=
-494new 
DisabledTableSnapshotHandler(snapshot, master, this);
-495snapshotTable(snapshot, handler);
-496  }
-497
-498  /**
-499   * Take a snapshot of an enabled 
table.
-500   * @param snapshot description of the 
snapshot to take.
-501   * @throws HBaseSnapshotException if 
the snapshot could not be started
-502   */
-503  private synchronized void 
snapshotEnabledTable(SnapshotDescription snapshot)
-504  throws HBaseSnapshotException {
-505// setup the snapshot
-506prepareToTakeSnapshot(snapshot);
-507
-508// Take the snapshot of the enabled 
table
-509EnabledTableSnapshotHandler handler 
=
-510new 
EnabledTableSnapshotHandler(snapshot, master, this);
-511snapshotTable(snapshot, handler);
-512  }
-513
-514  /**
-515   * Take a snapshot using the specified 
handler.
-516   * On failure the snapshot temporary 
working directory is removed.
-517   * NOTE: prepareToTakeSnapshot() called 
before this one takes care of the rejecting the
-518   *   snapshot request if the table 
is busy with another snapshot/restore operation.
-519   * @param snapshot the snapsh

[21/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/index-all.html
--
diff --git a/testdevapidocs/index-all.html b/testdevapidocs/index-all.html
index d5b615b..491160c 100644
--- a/testdevapidocs/index-all.html
+++ b/testdevapidocs/index-all.html
@@ -891,6 +891,8 @@
  
 admin
 - Variable in class org.apache.hadoop.hbase.client.TestSnapshotMetadata
  
+admin
 - Static variable in class org.apache.hadoop.hbase.client.TestSnapshotTemporaryDirectory
+ 
 admin
 - Static variable in class org.apache.hadoop.hbase.client.TestSnapshotWithAcl
  
 admin
 - Variable in class org.apache.hadoop.hbase.client.TestTableFavoredNodes
@@ -4483,6 +4485,8 @@
  
 CLASS_RULE
 - Static variable in class org.apache.hadoop.hbase.client.TestSnapshotCloneIndependence
  
+CLASS_RULE
 - Static variable in class org.apache.hadoop.hbase.client.TestSnapshotDFSTemporaryDirectory
+ 
 CLASS_RULE
 - Static variable in class org.apache.hadoop.hbase.client.TestSnapshotFromAdmin
  
 CLASS_RULE
 - Static variable in class org.apache.hadoop.hbase.client.TestSnapshotFromClient
@@ -4491,6 +4495,10 @@
  
 CLASS_RULE
 - Static variable in class org.apache.hadoop.hbase.client.TestSnapshotMetadata
  
+CLASS_RULE
 - Static variable in class org.apache.hadoop.hbase.client.TestSnapshotTemporaryDirectory
+ 
+CLASS_RULE
 - Static variable in class org.apache.hadoop.hbase.client.TestSnapshotTemporaryDirectoryWithRegionReplicas
+ 
 CLASS_RULE
 - Static variable in class org.apache.hadoop.hbase.client.TestSnapshotWithAcl
  
 CLASS_RULE
 - Static variable in class org.apache.hadoop.hbase.client.TestSplitOrMergeStatus
@@ -6181,6 +6189,8 @@
  
 CLASS_RULE
 - Static variable in class org.apache.hadoop.hbase.snapshot.TestExportSnapshotNoCluster
  
+CLASS_RULE
 - Static variable in class org.apache.hadoop.hbase.snapshot.TestExportSnapshotWithTemporaryDirectory
+ 
 CLASS_RULE
 - Static variable in class org.apache.hadoop.hbase.snapshot.TestFlushSnapshotFromClient
  
 CLASS_RULE
 - Static variable in class org.apache.hadoop.hbase.snapshot.TestMobExportSnapshot
@@ -6789,6 +6799,8 @@
  
 cleanupTest()
 - Static method in class org.apache.hadoop.hbase.client.TestSnapshotMetadata
  
+cleanupTest()
 - Static method in class org.apache.hadoop.hbase.client.TestSnapshotTemporaryDirectory
+ 
 cleanupTest()
 - Static method in class org.apache.hadoop.hbase.master.assignment.TestMasterAbortWhileMergingTable
  
 cleanupTest()
 - Static method in class org.apache.hadoop.hbase.master.assignment.TestMergeTableRegionsProcedure
@@ -7479,6 +7491,8 @@
  
 COLUMN_COUNT
 - Static variable in class org.apache.hadoop.hbase.test.IntegrationTestBigLinkedList
  
+COLUMN_EMPTY
 - Static variable in class org.apache.hadoop.hbase.rest.TestTableScan
+ 
 COLUMN_FAMILY
 - Static variable in class org.apache.hadoop.hbase.client.TestSmallReversedScanner
  
 COLUMN_FAMILY
 - Static variable in class org.apache.hadoop.hbase.filter.TestColumnPaginationFilter
@@ -10784,6 +10798,8 @@
  
 createWALKey(TableName,
 HRegionInfo, MultiVersionConcurrencyControl, NavigableMap) - Method in class 
org.apache.hadoop.hbase.regionserver.wal.AbstractTestWALReplay
  
+createWALProcedureStore(Configuration)
 - Method in class org.apache.hadoop.hbase.procedure2.store.wal.TestWALProcedureStore
+ 
 createWALReaderForPrimary()
 - Method in class org.apache.hadoop.hbase.regionserver.TestHRegionReplayEvents
  
 createWALRootDir()
 - Method in class org.apache.hadoop.hbase.HBaseTestingUtility
@@ -11068,6 +11084,8 @@
  
 data
 - Static variable in class org.apache.hadoop.hbase.client.TestBlockEvictionFromClient
  
+data()
 - Static method in class org.apache.hadoop.hbase.client.TestSnapshotTemporaryDirectory
+ 
 data()
 - Static method in class org.apache.hadoop.hbase.io.asyncfs.TestSaslFanOutOneBlockAsyncDFSOutput
  
 data()
 - Static method in class org.apache.hadoop.hbase.io.hfile.bucket.TestBucketCache
@@ -13514,6 +13532,8 @@
  
 expectedRows2
 - Static variable in class org.apache.hadoop.hbase.rest.TestTableScan
  
+expectedRows3
 - Static variable in class org.apache.hadoop.hbase.rest.TestTableScan
+ 
 expectedSize
 - Variable in class org.apache.hadoop.hbase.quotas.TestFileSystemUtilizationChore.ExpectedRegionSizeSummationAnswer
  
 expectedVisString
 - Static variable in class org.apache.hadoop.hbase.security.visibility.TestVisibilityLabelsReplication
@@ -17620,6 +17640,10 @@
  
 getNumReplicas()
 - Method in class org.apache.hadoop.hbase.client.TestSnapshotFromClientWithRegionReplicas
  
+getNumReplicas()
 - Method in class org.apache.hadoop.hbase.client.TestSnapshotTemporaryDirectory
+ 
+getNumReplicas()
 - Method in class org.apache.hadoop.hbase.client.TestSnapshotTemporaryDirectoryWithRegionReplicas
+ 
 getNumReplicas()
 - Method in class org.apache.hadoop.hbase.master.procedure.TestCloneSnapshotProcedure
  
 getNumReplicas()
 - Method in class org.apache.hadoop.hbase.master.procedure.TestRestoreSnapshotProc

[13/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.AtomicOperation.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.AtomicOperation.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.AtomicOperation.html
index 7ed37ca..907a45d 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.AtomicOperation.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.AtomicOperation.html
@@ -28,713 +28,752 @@
 020import static 
org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
 021import static 
org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
 022import static 
org.junit.Assert.assertEquals;
-023import static 
org.junit.Assert.assertTrue;
-024import static org.junit.Assert.fail;
-025
-026import java.io.IOException;
-027import java.util.ArrayList;
-028import java.util.Arrays;
-029import java.util.List;
-030import java.util.Objects;
-031import java.util.Random;
-032import 
java.util.concurrent.CountDownLatch;
-033import 
java.util.concurrent.atomic.AtomicInteger;
-034import 
java.util.concurrent.atomic.AtomicLong;
-035import 
org.apache.hadoop.conf.Configuration;
-036import org.apache.hadoop.fs.FileSystem;
-037import org.apache.hadoop.fs.Path;
-038import org.apache.hadoop.hbase.Cell;
-039import 
org.apache.hadoop.hbase.CellUtil;
-040import 
org.apache.hadoop.hbase.CompareOperator;
-041import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-042import 
org.apache.hadoop.hbase.HBaseTestingUtility;
-043import 
org.apache.hadoop.hbase.HColumnDescriptor;
-044import 
org.apache.hadoop.hbase.HConstants;
-045import 
org.apache.hadoop.hbase.HRegionInfo;
-046import 
org.apache.hadoop.hbase.HTableDescriptor;
-047import 
org.apache.hadoop.hbase.MultithreadedTestUtil;
-048import 
org.apache.hadoop.hbase.MultithreadedTestUtil.TestContext;
-049import 
org.apache.hadoop.hbase.MultithreadedTestUtil.TestThread;
-050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.hadoop.hbase.client.Append;
-052import 
org.apache.hadoop.hbase.client.Delete;
-053import 
org.apache.hadoop.hbase.client.Durability;
-054import 
org.apache.hadoop.hbase.client.Get;
-055import 
org.apache.hadoop.hbase.client.Increment;
-056import 
org.apache.hadoop.hbase.client.IsolationLevel;
-057import 
org.apache.hadoop.hbase.client.Mutation;
-058import 
org.apache.hadoop.hbase.client.Put;
-059import 
org.apache.hadoop.hbase.client.RegionInfo;
-060import 
org.apache.hadoop.hbase.client.Result;
-061import 
org.apache.hadoop.hbase.client.RowMutations;
-062import 
org.apache.hadoop.hbase.client.Scan;
-063import 
org.apache.hadoop.hbase.client.TableDescriptor;
-064import 
org.apache.hadoop.hbase.filter.BinaryComparator;
-065import 
org.apache.hadoop.hbase.io.HeapSize;
-066import 
org.apache.hadoop.hbase.io.hfile.BlockCache;
-067import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-068import 
org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests;
-069import 
org.apache.hadoop.hbase.util.Bytes;
-070import org.apache.hadoop.hbase.wal.WAL;
-071import org.junit.After;
-072import org.junit.Before;
-073import org.junit.ClassRule;
-074import org.junit.Rule;
-075import org.junit.Test;
-076import 
org.junit.experimental.categories.Category;
-077import org.junit.rules.TestName;
-078import org.slf4j.Logger;
-079import org.slf4j.LoggerFactory;
-080
-081/**
-082 * Testing of 
HRegion.incrementColumnValue, HRegion.increment,
-083 * and HRegion.append
-084 */
-085@Category({VerySlowRegionServerTests.class, 
MediumTests.class}) // Starts 100 threads
-086public class TestAtomicOperation {
-087
-088  @ClassRule
-089  public static final HBaseClassTestRule 
CLASS_RULE =
-090  
HBaseClassTestRule.forClass(TestAtomicOperation.class);
-091
-092  private static final Logger LOG = 
LoggerFactory.getLogger(TestAtomicOperation.class);
-093  @Rule public TestName name = new 
TestName();
-094
-095  HRegion region = null;
-096  private HBaseTestingUtility TEST_UTIL = 
HBaseTestingUtility.createLocalHTU();
-097
-098  // Test names
-099  static  byte[] tableName;
-100  static final byte[] qual1 = 
Bytes.toBytes("qual1");
-101  static final byte[] qual2 = 
Bytes.toBytes("qual2");
-102  static final byte[] qual3 = 
Bytes.toBytes("qual3");
-103  static final byte[] value1 = 
Bytes.toBytes("value1");
-104  static final byte[] value2 = 
Bytes.toBytes("value2");
-105  static final byte [] row = 
Bytes.toBytes("rowA");
-106  static final byte [] row2 = 
Bytes.toBytes("rowB");
-107
-108  @Before
-109  public void setup() {
-110tableName = 
Bytes.toBytes(name.getMethodName());
-111  }
-112
-113  @After
-114  public void teardown() throws 
IOException {
-115if (region != null) {
-116  BlockCache bc = 
region.getStores().get(0).getCacheConfig()

[16/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/org/apache/hadoop/hbase/snapshot/TestExportSnapshotWithTemporaryDirectory.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/snapshot/TestExportSnapshotWithTemporaryDirectory.html
 
b/testdevapidocs/org/apache/hadoop/hbase/snapshot/TestExportSnapshotWithTemporaryDirectory.html
new file mode 100644
index 000..5273b00
--- /dev/null
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/snapshot/TestExportSnapshotWithTemporaryDirectory.html
@@ -0,0 +1,391 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+
+TestExportSnapshotWithTemporaryDirectory (Apache HBase 3.0.0-SNAPSHOT 
Test API)
+
+
+
+
+
+var methods = {"i0":9,"i1":9,"i2":9};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev Class
+Next Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.snapshot
+Class 
TestExportSnapshotWithTemporaryDirectory
+
+
+
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.snapshot.TestExportSnapshot
+
+
+org.apache.hadoop.hbase.snapshot.TestExportSnapshotWithTemporaryDirectory
+
+
+
+
+
+
+
+
+
+
+public class TestExportSnapshotWithTemporaryDirectory
+extends TestExportSnapshot
+
+
+
+
+
+
+
+
+
+
+
+Nested Class Summary
+
+
+
+
+Nested classes/interfaces inherited from 
class org.apache.hadoop.hbase.snapshot.TestExportSnapshot
+TestExportSnapshot.RegionPredicate
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields 
+
+Modifier and Type
+Field and Description
+
+
+static HBaseClassTestRule
+CLASS_RULE 
+
+
+protected static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+TEMP_DIR 
+
+
+
+
+
+
+Fields inherited from class org.apache.hadoop.hbase.snapshot.TestExportSnapshot
+FAMILY,
 tableName,
 TEST_UTIL,
 testName
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors 
+
+Constructor and Description
+
+
+TestExportSnapshotWithTemporaryDirectory() 
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All Methods Static Methods Concrete Methods 
+
+Modifier and Type
+Method and Description
+
+
+static void
+setUpBaseConf(org.apache.hadoop.conf.Configuration conf) 
+
+
+static void
+setUpBeforeClass() 
+
+
+static void
+tearDownAfterClass() 
+
+
+
+
+
+
+Methods inherited from class org.apache.hadoop.hbase.snapshot.TestExportSnapshot
+createTable,
 getBypassRegionPredicate,
 setUp,
 tearDown,
 testConsecutiveExports,
 testEmptyExportFileSystemState,
 testExportFailure,
 testExport
 FileSystemState, testExportFileSystemState,
 testExportFileSystemState,
 testExportFileSystemStateWithSkipTmp,
 testExportRetry,
 testExportWithTargetName, verifySnapshot,
 verifySnapshot,
 verifySnapshotDir
+
+
+
+
+
+Methods inherited from class java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--";
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-";
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--";
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--";
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--";
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--";
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, https://docs.oracle.

[11/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.Incrementer.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.Incrementer.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.Incrementer.html
index 7ed37ca..907a45d 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.Incrementer.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.Incrementer.html
@@ -28,713 +28,752 @@
 020import static 
org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
 021import static 
org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
 022import static 
org.junit.Assert.assertEquals;
-023import static 
org.junit.Assert.assertTrue;
-024import static org.junit.Assert.fail;
-025
-026import java.io.IOException;
-027import java.util.ArrayList;
-028import java.util.Arrays;
-029import java.util.List;
-030import java.util.Objects;
-031import java.util.Random;
-032import 
java.util.concurrent.CountDownLatch;
-033import 
java.util.concurrent.atomic.AtomicInteger;
-034import 
java.util.concurrent.atomic.AtomicLong;
-035import 
org.apache.hadoop.conf.Configuration;
-036import org.apache.hadoop.fs.FileSystem;
-037import org.apache.hadoop.fs.Path;
-038import org.apache.hadoop.hbase.Cell;
-039import 
org.apache.hadoop.hbase.CellUtil;
-040import 
org.apache.hadoop.hbase.CompareOperator;
-041import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-042import 
org.apache.hadoop.hbase.HBaseTestingUtility;
-043import 
org.apache.hadoop.hbase.HColumnDescriptor;
-044import 
org.apache.hadoop.hbase.HConstants;
-045import 
org.apache.hadoop.hbase.HRegionInfo;
-046import 
org.apache.hadoop.hbase.HTableDescriptor;
-047import 
org.apache.hadoop.hbase.MultithreadedTestUtil;
-048import 
org.apache.hadoop.hbase.MultithreadedTestUtil.TestContext;
-049import 
org.apache.hadoop.hbase.MultithreadedTestUtil.TestThread;
-050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.hadoop.hbase.client.Append;
-052import 
org.apache.hadoop.hbase.client.Delete;
-053import 
org.apache.hadoop.hbase.client.Durability;
-054import 
org.apache.hadoop.hbase.client.Get;
-055import 
org.apache.hadoop.hbase.client.Increment;
-056import 
org.apache.hadoop.hbase.client.IsolationLevel;
-057import 
org.apache.hadoop.hbase.client.Mutation;
-058import 
org.apache.hadoop.hbase.client.Put;
-059import 
org.apache.hadoop.hbase.client.RegionInfo;
-060import 
org.apache.hadoop.hbase.client.Result;
-061import 
org.apache.hadoop.hbase.client.RowMutations;
-062import 
org.apache.hadoop.hbase.client.Scan;
-063import 
org.apache.hadoop.hbase.client.TableDescriptor;
-064import 
org.apache.hadoop.hbase.filter.BinaryComparator;
-065import 
org.apache.hadoop.hbase.io.HeapSize;
-066import 
org.apache.hadoop.hbase.io.hfile.BlockCache;
-067import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-068import 
org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests;
-069import 
org.apache.hadoop.hbase.util.Bytes;
-070import org.apache.hadoop.hbase.wal.WAL;
-071import org.junit.After;
-072import org.junit.Before;
-073import org.junit.ClassRule;
-074import org.junit.Rule;
-075import org.junit.Test;
-076import 
org.junit.experimental.categories.Category;
-077import org.junit.rules.TestName;
-078import org.slf4j.Logger;
-079import org.slf4j.LoggerFactory;
-080
-081/**
-082 * Testing of 
HRegion.incrementColumnValue, HRegion.increment,
-083 * and HRegion.append
-084 */
-085@Category({VerySlowRegionServerTests.class, 
MediumTests.class}) // Starts 100 threads
-086public class TestAtomicOperation {
-087
-088  @ClassRule
-089  public static final HBaseClassTestRule 
CLASS_RULE =
-090  
HBaseClassTestRule.forClass(TestAtomicOperation.class);
-091
-092  private static final Logger LOG = 
LoggerFactory.getLogger(TestAtomicOperation.class);
-093  @Rule public TestName name = new 
TestName();
-094
-095  HRegion region = null;
-096  private HBaseTestingUtility TEST_UTIL = 
HBaseTestingUtility.createLocalHTU();
-097
-098  // Test names
-099  static  byte[] tableName;
-100  static final byte[] qual1 = 
Bytes.toBytes("qual1");
-101  static final byte[] qual2 = 
Bytes.toBytes("qual2");
-102  static final byte[] qual3 = 
Bytes.toBytes("qual3");
-103  static final byte[] value1 = 
Bytes.toBytes("value1");
-104  static final byte[] value2 = 
Bytes.toBytes("value2");
-105  static final byte [] row = 
Bytes.toBytes("rowA");
-106  static final byte [] row2 = 
Bytes.toBytes("rowB");
-107
-108  @Before
-109  public void setup() {
-110tableName = 
Bytes.toBytes(name.getMethodName());
-111  }
-112
-113  @After
-114  public void teardown() throws 
IOException {
-115if (region != null) {
-116  BlockCache bc = 
region.getStores().get(0).getCacheConfig().getBlockCache();
-1

[12/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.CheckAndPutThread.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.CheckAndPutThread.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.CheckAndPutThread.html
index 7ed37ca..907a45d 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.CheckAndPutThread.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.CheckAndPutThread.html
@@ -28,713 +28,752 @@
 020import static 
org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
 021import static 
org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
 022import static 
org.junit.Assert.assertEquals;
-023import static 
org.junit.Assert.assertTrue;
-024import static org.junit.Assert.fail;
-025
-026import java.io.IOException;
-027import java.util.ArrayList;
-028import java.util.Arrays;
-029import java.util.List;
-030import java.util.Objects;
-031import java.util.Random;
-032import 
java.util.concurrent.CountDownLatch;
-033import 
java.util.concurrent.atomic.AtomicInteger;
-034import 
java.util.concurrent.atomic.AtomicLong;
-035import 
org.apache.hadoop.conf.Configuration;
-036import org.apache.hadoop.fs.FileSystem;
-037import org.apache.hadoop.fs.Path;
-038import org.apache.hadoop.hbase.Cell;
-039import 
org.apache.hadoop.hbase.CellUtil;
-040import 
org.apache.hadoop.hbase.CompareOperator;
-041import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-042import 
org.apache.hadoop.hbase.HBaseTestingUtility;
-043import 
org.apache.hadoop.hbase.HColumnDescriptor;
-044import 
org.apache.hadoop.hbase.HConstants;
-045import 
org.apache.hadoop.hbase.HRegionInfo;
-046import 
org.apache.hadoop.hbase.HTableDescriptor;
-047import 
org.apache.hadoop.hbase.MultithreadedTestUtil;
-048import 
org.apache.hadoop.hbase.MultithreadedTestUtil.TestContext;
-049import 
org.apache.hadoop.hbase.MultithreadedTestUtil.TestThread;
-050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.hadoop.hbase.client.Append;
-052import 
org.apache.hadoop.hbase.client.Delete;
-053import 
org.apache.hadoop.hbase.client.Durability;
-054import 
org.apache.hadoop.hbase.client.Get;
-055import 
org.apache.hadoop.hbase.client.Increment;
-056import 
org.apache.hadoop.hbase.client.IsolationLevel;
-057import 
org.apache.hadoop.hbase.client.Mutation;
-058import 
org.apache.hadoop.hbase.client.Put;
-059import 
org.apache.hadoop.hbase.client.RegionInfo;
-060import 
org.apache.hadoop.hbase.client.Result;
-061import 
org.apache.hadoop.hbase.client.RowMutations;
-062import 
org.apache.hadoop.hbase.client.Scan;
-063import 
org.apache.hadoop.hbase.client.TableDescriptor;
-064import 
org.apache.hadoop.hbase.filter.BinaryComparator;
-065import 
org.apache.hadoop.hbase.io.HeapSize;
-066import 
org.apache.hadoop.hbase.io.hfile.BlockCache;
-067import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-068import 
org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests;
-069import 
org.apache.hadoop.hbase.util.Bytes;
-070import org.apache.hadoop.hbase.wal.WAL;
-071import org.junit.After;
-072import org.junit.Before;
-073import org.junit.ClassRule;
-074import org.junit.Rule;
-075import org.junit.Test;
-076import 
org.junit.experimental.categories.Category;
-077import org.junit.rules.TestName;
-078import org.slf4j.Logger;
-079import org.slf4j.LoggerFactory;
-080
-081/**
-082 * Testing of 
HRegion.incrementColumnValue, HRegion.increment,
-083 * and HRegion.append
-084 */
-085@Category({VerySlowRegionServerTests.class, 
MediumTests.class}) // Starts 100 threads
-086public class TestAtomicOperation {
-087
-088  @ClassRule
-089  public static final HBaseClassTestRule 
CLASS_RULE =
-090  
HBaseClassTestRule.forClass(TestAtomicOperation.class);
-091
-092  private static final Logger LOG = 
LoggerFactory.getLogger(TestAtomicOperation.class);
-093  @Rule public TestName name = new 
TestName();
-094
-095  HRegion region = null;
-096  private HBaseTestingUtility TEST_UTIL = 
HBaseTestingUtility.createLocalHTU();
-097
-098  // Test names
-099  static  byte[] tableName;
-100  static final byte[] qual1 = 
Bytes.toBytes("qual1");
-101  static final byte[] qual2 = 
Bytes.toBytes("qual2");
-102  static final byte[] qual3 = 
Bytes.toBytes("qual3");
-103  static final byte[] value1 = 
Bytes.toBytes("value1");
-104  static final byte[] value2 = 
Bytes.toBytes("value2");
-105  static final byte [] row = 
Bytes.toBytes("rowA");
-106  static final byte [] row2 = 
Bytes.toBytes("rowB");
-107
-108  @Before
-109  public void setup() {
-110tableName = 
Bytes.toBytes(name.getMethodName());
-111  }
-112
-113  @After
-114  public void teardown() throws 
IOException {
-115if (region != null) {
-116  BlockCache bc = 
region.getStores().get(0).getCac

[37/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.html
index d2de64a..4ebdb3f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.html
@@ -27,309 +27,338 @@
 019
 020import java.io.FileNotFoundException;
 021import java.io.IOException;
-022import java.util.HashSet;
-023import java.util.List;
-024import java.util.Set;
-025import 
java.util.concurrent.CancellationException;
-026import 
java.util.concurrent.locks.ReentrantLock;
-027
-028import 
org.apache.hadoop.conf.Configuration;
-029import org.apache.hadoop.fs.FileSystem;
-030import org.apache.hadoop.fs.Path;
-031import 
org.apache.hadoop.hbase.MetaTableAccessor;
-032import 
org.apache.hadoop.hbase.ServerName;
-033import 
org.apache.hadoop.hbase.TableName;
-034import 
org.apache.hadoop.hbase.client.RegionInfo;
-035import 
org.apache.hadoop.hbase.client.TableDescriptor;
-036import 
org.apache.hadoop.hbase.errorhandling.ForeignException;
-037import 
org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
-038import 
org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
-039import 
org.apache.hadoop.hbase.executor.EventHandler;
-040import 
org.apache.hadoop.hbase.executor.EventType;
-041import 
org.apache.hadoop.hbase.master.MasterServices;
-042import 
org.apache.hadoop.hbase.master.MetricsSnapshot;
-043import 
org.apache.hadoop.hbase.master.SnapshotSentinel;
-044import 
org.apache.hadoop.hbase.master.locking.LockManager;
-045import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
-046import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-047import 
org.apache.hadoop.hbase.procedure2.LockType;
-048import 
org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
-049import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-050import 
org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
-051import 
org.apache.hadoop.hbase.snapshot.SnapshotManifest;
-052import 
org.apache.hadoop.hbase.util.FSUtils;
-053import 
org.apache.hadoop.hbase.util.Pair;
-054import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-055import 
org.apache.yetus.audience.InterfaceAudience;
-056import 
org.apache.zookeeper.KeeperException;
-057import org.slf4j.Logger;
-058import org.slf4j.LoggerFactory;
-059import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
-060
-061/**
-062 * A handler for taking snapshots from 
the master.
-063 *
-064 * This is not a subclass of 
TableEventHandler because using that would incur an extra hbase:meta scan.
-065 *
-066 * The {@link #snapshotRegions(List)} 
call should get implemented for each snapshot flavor.
-067 */
-068@InterfaceAudience.Private
-069public abstract class TakeSnapshotHandler 
extends EventHandler implements SnapshotSentinel,
-070ForeignExceptionSnare {
-071  private static final Logger LOG = 
LoggerFactory.getLogger(TakeSnapshotHandler.class);
-072
-073  private volatile boolean finished;
-074
-075  // none of these should ever be null
-076  protected final MasterServices 
master;
-077  protected final MetricsSnapshot 
metricsSnapshot = new MetricsSnapshot();
-078  protected final SnapshotDescription 
snapshot;
-079  protected final Configuration conf;
-080  protected final FileSystem fs;
-081  protected final Path rootDir;
-082  private final Path snapshotDir;
-083  protected final Path workingDir;
-084  private final MasterSnapshotVerifier 
verifier;
-085  protected final 
ForeignExceptionDispatcher monitor;
-086  protected final LockManager.MasterLock 
tableLock;
-087  protected final MonitoredTask status;
-088  protected final TableName 
snapshotTable;
-089  protected final SnapshotManifest 
snapshotManifest;
-090  protected final SnapshotManager 
snapshotManager;
-091
-092  protected TableDescriptor htd;
-093
-094  /**
-095   * @param snapshot descriptor of the 
snapshot to take
-096   * @param masterServices master 
services provider
-097   */
-098  public 
TakeSnapshotHandler(SnapshotDescription snapshot, final MasterServices 
masterServices,
-099 final 
SnapshotManager snapshotManager) {
-100super(masterServices, 
EventType.C_M_SNAPSHOT_TABLE);
-101assert snapshot != null : 
"SnapshotDescription must not be nul1";
-102assert masterServices != null : 
"MasterServices must not be nul1";
-103
-104this.master = masterServices;
-105this.snapshot = snapshot;
-106this.snapshotManager = 
snapshotManager;
-107this.snapshotTable = 
TableName.valueOf(snapshot.getTable());
-108this.conf = 
this.master.getConfiguratio

[27/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.CompletedSnaphotDirectoriesFilter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.CompletedSnaphotDirectoriesFilter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.CompletedSnaphotDirectoriesFilter.html
index 7462d5b..63a00a7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.CompletedSnaphotDirectoriesFilter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.CompletedSnaphotDirectoriesFilter.html
@@ -122,294 +122,339 @@
 114  /** Temporary directory under the 
snapshot directory to store in-progress snapshots */
 115  public static final String 
SNAPSHOT_TMP_DIR_NAME = ".tmp";
 116
-117  /** This tag will be created in 
in-progess snapshots */
-118  public static final String 
SNAPSHOT_IN_PROGRESS = ".inprogress";
-119  // snapshot operation values
-120  /** Default value if no start time is 
specified */
-121  public static final long 
NO_SNAPSHOT_START_TIME_SPECIFIED = 0;
+117  /**
+118   * The configuration property that 
determines the filepath of the snapshot
+119   * base working directory
+120   */
+121  public static final String 
SNAPSHOT_WORKING_DIR = "hbase.snapshot.working.dir";
 122
-123
-124  public static final String 
MASTER_SNAPSHOT_TIMEOUT_MILLIS = "hbase.snapshot.master.timeout.millis";
-125
-126  /** By default, wait 300 seconds for a 
snapshot to complete */
-127  public static final long 
DEFAULT_MAX_WAIT_TIME = 6 * 5 ;
+123  /** This tag will be created in 
in-progess snapshots */
+124  public static final String 
SNAPSHOT_IN_PROGRESS = ".inprogress";
+125  // snapshot operation values
+126  /** Default value if no start time is 
specified */
+127  public static final long 
NO_SNAPSHOT_START_TIME_SPECIFIED = 0;
 128
 129
-130  /**
-131   * By default, check to see if the 
snapshot is complete (ms)
-132   * @deprecated Use {@link 
#DEFAULT_MAX_WAIT_TIME} instead.
-133   * */
-134  @Deprecated
-135  public static final int 
SNAPSHOT_TIMEOUT_MILLIS_DEFAULT = 6 * 5;
-136
-137  /**
-138   * Conf key for # of ms elapsed before 
injecting a snapshot timeout error when waiting for
-139   * completion.
-140   * @deprecated Use {@link 
#MASTER_SNAPSHOT_TIMEOUT_MILLIS} instead.
-141   */
-142  @Deprecated
-143  public static final String 
SNAPSHOT_TIMEOUT_MILLIS_KEY = "hbase.snapshot.master.timeoutMillis";
-144
-145  private SnapshotDescriptionUtils() {
-146// private constructor for utility 
class
-147  }
-148
-149  /**
-150   * @param conf {@link Configuration} 
from which to check for the timeout
-151   * @param type type of snapshot being 
taken
-152   * @param defaultMaxWaitTime Default 
amount of time to wait, if none is in the configuration
-153   * @return the max amount of time the 
master should wait for a snapshot to complete
-154   */
-155  public static long 
getMaxMasterTimeout(Configuration conf, SnapshotDescription.Type type,
-156  long defaultMaxWaitTime) {
-157String confKey;
-158switch (type) {
-159case DISABLED:
-160default:
-161  confKey = 
MASTER_SNAPSHOT_TIMEOUT_MILLIS;
-162}
-163return Math.max(conf.getLong(confKey, 
defaultMaxWaitTime),
-164
conf.getLong(SNAPSHOT_TIMEOUT_MILLIS_KEY, defaultMaxWaitTime));
-165  }
-166
-167  /**
-168   * Get the snapshot root directory. All 
the snapshots are kept under this directory, i.e.
-169   * ${hbase.rootdir}/.snapshot
-170   * @param rootDir hbase root 
directory
-171   * @return the base directory in which 
all snapshots are kept
-172   */
-173  public static Path 
getSnapshotRootDir(final Path rootDir) {
-174return new Path(rootDir, 
HConstants.SNAPSHOT_DIR_NAME);
-175  }
-176
-177  /**
-178   * Get the directory for a specified 
snapshot. This directory is a sub-directory of snapshot root
-179   * directory and all the data files for 
a snapshot are kept under this directory.
-180   * @param snapshot snapshot being 
taken
-181   * @param rootDir hbase root 
directory
-182   * @return the final directory for the 
completed snapshot
-183   */
-184  public static Path 
getCompletedSnapshotDir(final SnapshotDescription snapshot, final Path rootDir) 
{
-185return 
getCompletedSnapshotDir(snapshot.getName(), rootDir);
-186  }
-187
-188  /**
-189   * Get the directory for a completed 
snapshot. This directory is a sub-directory of snapshot root
-190   * directory and all the data files for 
a snapshot are kept under this directory.
-191   * @param snapshotName name of the 
snapshot being taken
-192   * @param rootDir hbase root 
directory
-193   * @return the final directory for the 
completed snapshot
-194   */
-195  public static Path 
getCompletedSnapshotDir(final String snapshotName, final Path ro

[09/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.MockHRegion.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.MockHRegion.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.MockHRegion.html
index 7ed37ca..907a45d 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.MockHRegion.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.MockHRegion.html
@@ -28,713 +28,752 @@
 020import static 
org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
 021import static 
org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
 022import static 
org.junit.Assert.assertEquals;
-023import static 
org.junit.Assert.assertTrue;
-024import static org.junit.Assert.fail;
-025
-026import java.io.IOException;
-027import java.util.ArrayList;
-028import java.util.Arrays;
-029import java.util.List;
-030import java.util.Objects;
-031import java.util.Random;
-032import 
java.util.concurrent.CountDownLatch;
-033import 
java.util.concurrent.atomic.AtomicInteger;
-034import 
java.util.concurrent.atomic.AtomicLong;
-035import 
org.apache.hadoop.conf.Configuration;
-036import org.apache.hadoop.fs.FileSystem;
-037import org.apache.hadoop.fs.Path;
-038import org.apache.hadoop.hbase.Cell;
-039import 
org.apache.hadoop.hbase.CellUtil;
-040import 
org.apache.hadoop.hbase.CompareOperator;
-041import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-042import 
org.apache.hadoop.hbase.HBaseTestingUtility;
-043import 
org.apache.hadoop.hbase.HColumnDescriptor;
-044import 
org.apache.hadoop.hbase.HConstants;
-045import 
org.apache.hadoop.hbase.HRegionInfo;
-046import 
org.apache.hadoop.hbase.HTableDescriptor;
-047import 
org.apache.hadoop.hbase.MultithreadedTestUtil;
-048import 
org.apache.hadoop.hbase.MultithreadedTestUtil.TestContext;
-049import 
org.apache.hadoop.hbase.MultithreadedTestUtil.TestThread;
-050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.hadoop.hbase.client.Append;
-052import 
org.apache.hadoop.hbase.client.Delete;
-053import 
org.apache.hadoop.hbase.client.Durability;
-054import 
org.apache.hadoop.hbase.client.Get;
-055import 
org.apache.hadoop.hbase.client.Increment;
-056import 
org.apache.hadoop.hbase.client.IsolationLevel;
-057import 
org.apache.hadoop.hbase.client.Mutation;
-058import 
org.apache.hadoop.hbase.client.Put;
-059import 
org.apache.hadoop.hbase.client.RegionInfo;
-060import 
org.apache.hadoop.hbase.client.Result;
-061import 
org.apache.hadoop.hbase.client.RowMutations;
-062import 
org.apache.hadoop.hbase.client.Scan;
-063import 
org.apache.hadoop.hbase.client.TableDescriptor;
-064import 
org.apache.hadoop.hbase.filter.BinaryComparator;
-065import 
org.apache.hadoop.hbase.io.HeapSize;
-066import 
org.apache.hadoop.hbase.io.hfile.BlockCache;
-067import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-068import 
org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests;
-069import 
org.apache.hadoop.hbase.util.Bytes;
-070import org.apache.hadoop.hbase.wal.WAL;
-071import org.junit.After;
-072import org.junit.Before;
-073import org.junit.ClassRule;
-074import org.junit.Rule;
-075import org.junit.Test;
-076import 
org.junit.experimental.categories.Category;
-077import org.junit.rules.TestName;
-078import org.slf4j.Logger;
-079import org.slf4j.LoggerFactory;
-080
-081/**
-082 * Testing of 
HRegion.incrementColumnValue, HRegion.increment,
-083 * and HRegion.append
-084 */
-085@Category({VerySlowRegionServerTests.class, 
MediumTests.class}) // Starts 100 threads
-086public class TestAtomicOperation {
-087
-088  @ClassRule
-089  public static final HBaseClassTestRule 
CLASS_RULE =
-090  
HBaseClassTestRule.forClass(TestAtomicOperation.class);
-091
-092  private static final Logger LOG = 
LoggerFactory.getLogger(TestAtomicOperation.class);
-093  @Rule public TestName name = new 
TestName();
-094
-095  HRegion region = null;
-096  private HBaseTestingUtility TEST_UTIL = 
HBaseTestingUtility.createLocalHTU();
-097
-098  // Test names
-099  static  byte[] tableName;
-100  static final byte[] qual1 = 
Bytes.toBytes("qual1");
-101  static final byte[] qual2 = 
Bytes.toBytes("qual2");
-102  static final byte[] qual3 = 
Bytes.toBytes("qual3");
-103  static final byte[] value1 = 
Bytes.toBytes("value1");
-104  static final byte[] value2 = 
Bytes.toBytes("value2");
-105  static final byte [] row = 
Bytes.toBytes("rowA");
-106  static final byte [] row2 = 
Bytes.toBytes("rowB");
-107
-108  @Before
-109  public void setup() {
-110tableName = 
Bytes.toBytes(name.getMethodName());
-111  }
-112
-113  @After
-114  public void teardown() throws 
IOException {
-115if (region != null) {
-116  BlockCache bc = 
region.getStores().get(0).getCacheConfig().getBlockCache();
-1

[01/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 8ea88a7b6 -> 705d69c41


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.SnapshotMock.RegionData.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.SnapshotMock.RegionData.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.SnapshotMock.RegionData.html
index 110e99c..d115ecb 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.SnapshotMock.RegionData.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.SnapshotMock.RegionData.html
@@ -515,7 +515,7 @@
 507this.htd = htd;
 508this.desc = desc;
 509this.tableRegions = 
tableRegions;
-510this.snapshotDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir);
+510this.snapshotDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir, conf);
 511new FSTableDescriptors(conf)
 512  
.createTableDescriptorForTableDirectory(snapshotDir, htd, false);
 513  }
@@ -695,7 +695,7 @@
 687.setVersion(version)
 688.build();
 689
-690  Path workingDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir);
+690  Path workingDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir, conf);
 691  
SnapshotDescriptionUtils.writeSnapshotInfo(desc, workingDir, fs);
 692  return new SnapshotBuilder(conf, 
fs, rootDir, htd, desc, regions);
 693}

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.SnapshotMock.SnapshotBuilder.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.SnapshotMock.SnapshotBuilder.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.SnapshotMock.SnapshotBuilder.html
index 110e99c..d115ecb 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.SnapshotMock.SnapshotBuilder.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.SnapshotMock.SnapshotBuilder.html
@@ -515,7 +515,7 @@
 507this.htd = htd;
 508this.desc = desc;
 509this.tableRegions = 
tableRegions;
-510this.snapshotDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir);
+510this.snapshotDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir, conf);
 511new FSTableDescriptors(conf)
 512  
.createTableDescriptorForTableDirectory(snapshotDir, htd, false);
 513  }
@@ -695,7 +695,7 @@
 687.setVersion(version)
 688.build();
 689
-690  Path workingDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir);
+690  Path workingDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir, conf);
 691  
SnapshotDescriptionUtils.writeSnapshotInfo(desc, workingDir, fs);
 692  return new SnapshotBuilder(conf, 
fs, rootDir, htd, desc, regions);
 693}

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.SnapshotMock.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.SnapshotMock.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.SnapshotMock.html
index 110e99c..d115ecb 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.SnapshotMock.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.SnapshotMock.html
@@ -515,7 +515,7 @@
 507this.htd = htd;
 508this.desc = desc;
 509this.tableRegions = 
tableRegions;
-510this.snapshotDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir);
+510this.snapshotDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir, conf);
 511new FSTableDescriptors(conf)
 512  
.createTableDescriptorForTableDirectory(snapshotDir, htd, false);
 513  }
@@ -695,7 +695,7 @@
 687.setVersion(version)
 688.build();
 689
-690  Path workingDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir);
+690  Path workingDir = 
SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir, conf);
 691  
SnapshotDescriptionUtils.writeSnapshotInfo(desc, workingDir, fs);
 692  return new SnapshotBuilder(conf, 
fs, rootDir, htd, desc, regions);
 693}

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdeva

[03/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.CustomFilter.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.CustomFilter.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.CustomFilter.html
index 826bcba..5323511 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.CustomFilter.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.CustomFilter.html
@@ -98,580 +98,623 @@
 090  private static final String CFB = 
"b";
 091  private static final String COLUMN_1 = 
CFA + ":1";
 092  private static final String COLUMN_2 = 
CFB + ":2";
-093  private static Client client;
-094  private static int expectedRows1;
-095  private static int expectedRows2;
-096  private static Configuration conf;
-097
-098  private static final 
HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
-099  private static final 
HBaseRESTTestingUtility REST_TEST_UTIL =
-100new HBaseRESTTestingUtility();
-101
-102  @BeforeClass
-103  public static void setUpBeforeClass() 
throws Exception {
-104conf = 
TEST_UTIL.getConfiguration();
-105conf.set(Constants.CUSTOM_FILTERS, 
"CustomFilter:" + CustomFilter.class.getName());
-106TEST_UTIL.startMiniCluster();
-107
REST_TEST_UTIL.startServletContainer(conf);
-108client = new Client(new 
Cluster().add("localhost",
-109  
REST_TEST_UTIL.getServletPort()));
-110Admin admin = TEST_UTIL.getAdmin();
-111if (!admin.tableExists(TABLE)) {
-112HTableDescriptor htd = new 
HTableDescriptor(TABLE);
-113htd.addFamily(new 
HColumnDescriptor(CFA));
-114htd.addFamily(new 
HColumnDescriptor(CFB));
-115admin.createTable(htd);
-116expectedRows1 = 
TestScannerResource.insertData(conf, TABLE, COLUMN_1, 1.0);
-117expectedRows2 = 
TestScannerResource.insertData(conf, TABLE, COLUMN_2, 0.5);
-118}
-119  }
-120
-121  @AfterClass
-122  public static void tearDownAfterClass() 
throws Exception {
-123
TEST_UTIL.getAdmin().disableTable(TABLE);
-124
TEST_UTIL.getAdmin().deleteTable(TABLE);
-125
REST_TEST_UTIL.shutdownServletContainer();
-126TEST_UTIL.shutdownMiniCluster();
-127  }
-128
-129  @Test
-130  public void testSimpleScannerXML() 
throws IOException, JAXBException, XMLStreamException {
-131// Test scanning particular columns
-132StringBuilder builder = new 
StringBuilder();
-133builder.append("/*");
-134builder.append("?");
-135builder.append(Constants.SCAN_COLUMN 
+ "=" + COLUMN_1);
-136builder.append("&");
-137builder.append(Constants.SCAN_LIMIT + 
"=10");
-138Response response = client.get("/" + 
TABLE + builder.toString(),
-139  Constants.MIMETYPE_XML);
-140assertEquals(200, 
response.getCode());
-141assertEquals(Constants.MIMETYPE_XML, 
response.getHeader("content-type"));
-142JAXBContext ctx = 
JAXBContext.newInstance(CellSetModel.class);
-143Unmarshaller ush = 
ctx.createUnmarshaller();
-144CellSetModel model = (CellSetModel) 
ush.unmarshal(response.getStream());
-145int count = 
TestScannerResource.countCellSet(model);
-146assertEquals(10, count);
-147checkRowsNotNull(model);
-148
-149//Test with no limit.
-150builder = new StringBuilder();
-151builder.append("/*");
-152builder.append("?");
-153builder.append(Constants.SCAN_COLUMN 
+ "=" + COLUMN_1);
-154response = client.get("/" + TABLE + 
builder.toString(),
-155  Constants.MIMETYPE_XML);
-156assertEquals(200, 
response.getCode());
-157assertEquals(Constants.MIMETYPE_XML, 
response.getHeader("content-type"));
-158model = (CellSetModel) 
ush.unmarshal(response.getStream());
-159count = 
TestScannerResource.countCellSet(model);
-160assertEquals(expectedRows1, count);
-161checkRowsNotNull(model);
-162
-163//Test with start and end row.
-164builder = new StringBuilder();
-165builder.append("/*");
-166builder.append("?");
-167builder.append(Constants.SCAN_COLUMN 
+ "=" + COLUMN_1);
-168builder.append("&");
-169
builder.append(Constants.SCAN_START_ROW + "=aaa");
-170builder.append("&");
-171builder.append(Constants.SCAN_END_ROW 
+ "=aay");
-172response = client.get("/" + TABLE + 
builder.toString(),
-173  Constants.MIMETYPE_XML);
-174assertEquals(200, 
response.getCode());
-175model = (CellSetModel) 
ush.unmarshal(response.getStream());
-176count = 
TestScannerResource.countCellSet(model);
-177RowModel startRow = 
model.getRows().get(0);
-178assertEquals("aaa", 
Bytes.toString(startRow.getKey()));
-179RowModel endRow = 
model.getRows().get(model.getRows().size() - 1);
-180assertEquals("aax", 
Bytes.toString(endRow.getKey()));
-181assertEquals(24, count);
-182checkRowsNotNull(model);
-183
-184//Test with start row and l

[05/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.ClientSideCellSetModel.Listener.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.ClientSideCellSetModel.Listener.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.ClientSideCellSetModel.Listener.html
index 826bcba..5323511 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.ClientSideCellSetModel.Listener.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.ClientSideCellSetModel.Listener.html
@@ -98,580 +98,623 @@
 090  private static final String CFB = 
"b";
 091  private static final String COLUMN_1 = 
CFA + ":1";
 092  private static final String COLUMN_2 = 
CFB + ":2";
-093  private static Client client;
-094  private static int expectedRows1;
-095  private static int expectedRows2;
-096  private static Configuration conf;
-097
-098  private static final 
HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
-099  private static final 
HBaseRESTTestingUtility REST_TEST_UTIL =
-100new HBaseRESTTestingUtility();
-101
-102  @BeforeClass
-103  public static void setUpBeforeClass() 
throws Exception {
-104conf = 
TEST_UTIL.getConfiguration();
-105conf.set(Constants.CUSTOM_FILTERS, 
"CustomFilter:" + CustomFilter.class.getName());
-106TEST_UTIL.startMiniCluster();
-107
REST_TEST_UTIL.startServletContainer(conf);
-108client = new Client(new 
Cluster().add("localhost",
-109  
REST_TEST_UTIL.getServletPort()));
-110Admin admin = TEST_UTIL.getAdmin();
-111if (!admin.tableExists(TABLE)) {
-112HTableDescriptor htd = new 
HTableDescriptor(TABLE);
-113htd.addFamily(new 
HColumnDescriptor(CFA));
-114htd.addFamily(new 
HColumnDescriptor(CFB));
-115admin.createTable(htd);
-116expectedRows1 = 
TestScannerResource.insertData(conf, TABLE, COLUMN_1, 1.0);
-117expectedRows2 = 
TestScannerResource.insertData(conf, TABLE, COLUMN_2, 0.5);
-118}
-119  }
-120
-121  @AfterClass
-122  public static void tearDownAfterClass() 
throws Exception {
-123
TEST_UTIL.getAdmin().disableTable(TABLE);
-124
TEST_UTIL.getAdmin().deleteTable(TABLE);
-125
REST_TEST_UTIL.shutdownServletContainer();
-126TEST_UTIL.shutdownMiniCluster();
-127  }
-128
-129  @Test
-130  public void testSimpleScannerXML() 
throws IOException, JAXBException, XMLStreamException {
-131// Test scanning particular columns
-132StringBuilder builder = new 
StringBuilder();
-133builder.append("/*");
-134builder.append("?");
-135builder.append(Constants.SCAN_COLUMN 
+ "=" + COLUMN_1);
-136builder.append("&");
-137builder.append(Constants.SCAN_LIMIT + 
"=10");
-138Response response = client.get("/" + 
TABLE + builder.toString(),
-139  Constants.MIMETYPE_XML);
-140assertEquals(200, 
response.getCode());
-141assertEquals(Constants.MIMETYPE_XML, 
response.getHeader("content-type"));
-142JAXBContext ctx = 
JAXBContext.newInstance(CellSetModel.class);
-143Unmarshaller ush = 
ctx.createUnmarshaller();
-144CellSetModel model = (CellSetModel) 
ush.unmarshal(response.getStream());
-145int count = 
TestScannerResource.countCellSet(model);
-146assertEquals(10, count);
-147checkRowsNotNull(model);
-148
-149//Test with no limit.
-150builder = new StringBuilder();
-151builder.append("/*");
-152builder.append("?");
-153builder.append(Constants.SCAN_COLUMN 
+ "=" + COLUMN_1);
-154response = client.get("/" + TABLE + 
builder.toString(),
-155  Constants.MIMETYPE_XML);
-156assertEquals(200, 
response.getCode());
-157assertEquals(Constants.MIMETYPE_XML, 
response.getHeader("content-type"));
-158model = (CellSetModel) 
ush.unmarshal(response.getStream());
-159count = 
TestScannerResource.countCellSet(model);
-160assertEquals(expectedRows1, count);
-161checkRowsNotNull(model);
-162
-163//Test with start and end row.
-164builder = new StringBuilder();
-165builder.append("/*");
-166builder.append("?");
-167builder.append(Constants.SCAN_COLUMN 
+ "=" + COLUMN_1);
-168builder.append("&");
-169
builder.append(Constants.SCAN_START_ROW + "=aaa");
-170builder.append("&");
-171builder.append(Constants.SCAN_END_ROW 
+ "=aay");
-172response = client.get("/" + TABLE + 
builder.toString(),
-173  Constants.MIMETYPE_XML);
-174assertEquals(200, 
response.getCode());
-175model = (CellSetModel) 
ush.unmarshal(response.getStream());
-176count = 
TestScannerResource.countCellSet(model);
-177RowModel startRow = 
model.getRows().get(0);
-178assertEquals("aaa", 
Bytes.toString(startRow.getKey()));
-179RowModel endRow = 
model.getRows().get(model.getRows().size() - 1);
-180assertEquals("aax", 
Bytes.toString(endRow.getKey()));
-181ass

hbase git commit: HBASE-21172 Reimplement the retry backoff logic for ReopenTableRegionsProcedure

2018-09-12 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 9de388cf3 -> 0476f4e4b


HBASE-21172 Reimplement the retry backoff logic for ReopenTableRegionsProcedure


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0476f4e4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0476f4e4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0476f4e4

Branch: refs/heads/branch-2.0
Commit: 0476f4e4b047b8f64a954bb488fa9a3c92cdcddb
Parents: 9de388c
Author: Duo Zhang 
Authored: Tue Sep 11 17:09:27 2018 +0800
Committer: Duo Zhang 
Committed: Wed Sep 12 16:02:17 2018 +0800

--
 .../hadoop/hbase/procedure2/Procedure.java  |  11 +-
 .../hadoop/hbase/procedure2/ProcedureUtil.java  |  57 +
 .../hbase/procedure2/TestProcedureUtil.java |  15 ++-
 .../assignment/RegionTransitionProcedure.java   |   9 +-
 .../procedure/ReopenTableRegionsProcedure.java  |  31 +++--
 .../TestReopenTableRegionsProcedureBackoff.java | 118 +++
 6 files changed, 202 insertions(+), 39 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0476f4e4/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
index b2685f6..a832c78 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
@@ -769,14 +769,21 @@ public abstract class Procedure implements 
Comparable
+   * Another usage for this method is to implement retrying. A procedure can 
set the state to
+   * {@code WAITING_TIMEOUT} by calling {@code setState} method, and throw a
+   * {@link ProcedureSuspendedException} to halt the execution of the 
procedure, and do not forget a
+   * call {@link #setTimeout(int)} method to set the timeout. And you should 
also override this
+   * method to wake up the procedure, and also return false to tell the 
ProcedureExecutor that the
+   * timeout event has been handled.
* @return true to let the framework handle the timeout as abort, false in 
case the procedure
* handled the timeout itself.
*/
   protected synchronized boolean setTimeoutFailure(TEnvironment env) {
 if (state == ProcedureState.WAITING_TIMEOUT) {
   long timeDiff = EnvironmentEdgeManager.currentTime() - lastUpdate;
-  setFailure("ProcedureExecutor", new TimeoutIOException(
-"Operation timed out after " + StringUtils.humanTimeDiff(timeDiff)));
+  setFailure("ProcedureExecutor",
+new TimeoutIOException("Operation timed out after " + 
StringUtils.humanTimeDiff(timeDiff)));
   return true;
 }
 return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/0476f4e4/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureUtil.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureUtil.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureUtil.java
index 8a438d4..8c8746e 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureUtil.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureUtil.java
@@ -44,40 +44,42 @@ public final class ProcedureUtil {
   // ==
   //  Reflection helpers to create/validate a Procedure object
   // ==
-  public static Procedure newProcedure(final String className) throws 
BadProcedureException {
+  private static Procedure newProcedure(String className) throws 
BadProcedureException {
 try {
-  final Class clazz = Class.forName(className);
+  Class clazz = Class.forName(className);
   if (!Modifier.isPublic(clazz.getModifiers())) {
 throw new Exception("the " + clazz + " class is not public");
   }
 
-  final Constructor ctor = clazz.getConstructor();
+  @SuppressWarnings("rawtypes")
+  Constructor ctor = 
clazz.asSubclass(Procedure.class).getConstructor();
   assert ctor != null : "no constructor found";
   if (!Modifier.isPublic(ctor.getModifiers())) {
 throw new Exception("the " + clazz + " constructor is not public");
   }
-  return (Procedure)ctor.newInstance();
+  return ctor.newInstance();
 } catch (Exception e) {
-  throw new BadProcedureException("The procedure class " + className +
-  " must be accessible and have an

hbase git commit: HBASE-21172 Reimplement the retry backoff logic for ReopenTableRegionsProcedure

2018-09-12 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-2.1 ea4194039 -> 2da6dbe56


HBASE-21172 Reimplement the retry backoff logic for ReopenTableRegionsProcedure


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2da6dbe5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2da6dbe5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2da6dbe5

Branch: refs/heads/branch-2.1
Commit: 2da6dbe563fd67c34f2a489f1f742d31b00fa159
Parents: ea41940
Author: Duo Zhang 
Authored: Tue Sep 11 17:09:27 2018 +0800
Committer: Duo Zhang 
Committed: Wed Sep 12 16:01:55 2018 +0800

--
 .../hadoop/hbase/procedure2/Procedure.java  |  11 +-
 .../hadoop/hbase/procedure2/ProcedureUtil.java  |  57 +
 .../hbase/procedure2/TestProcedureUtil.java |  15 ++-
 .../assignment/RegionTransitionProcedure.java   |   9 +-
 .../procedure/ReopenTableRegionsProcedure.java  |  31 +++--
 .../TestReopenTableRegionsProcedureBackoff.java | 118 +++
 6 files changed, 202 insertions(+), 39 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2da6dbe5/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
index b2685f6..a832c78 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
@@ -769,14 +769,21 @@ public abstract class Procedure implements 
Comparable
+   * Another usage for this method is to implement retrying. A procedure can 
set the state to
+   * {@code WAITING_TIMEOUT} by calling {@code setState} method, and throw a
+   * {@link ProcedureSuspendedException} to halt the execution of the 
procedure, and do not forget a
+   * call {@link #setTimeout(int)} method to set the timeout. And you should 
also override this
+   * method to wake up the procedure, and also return false to tell the 
ProcedureExecutor that the
+   * timeout event has been handled.
* @return true to let the framework handle the timeout as abort, false in 
case the procedure
* handled the timeout itself.
*/
   protected synchronized boolean setTimeoutFailure(TEnvironment env) {
 if (state == ProcedureState.WAITING_TIMEOUT) {
   long timeDiff = EnvironmentEdgeManager.currentTime() - lastUpdate;
-  setFailure("ProcedureExecutor", new TimeoutIOException(
-"Operation timed out after " + StringUtils.humanTimeDiff(timeDiff)));
+  setFailure("ProcedureExecutor",
+new TimeoutIOException("Operation timed out after " + 
StringUtils.humanTimeDiff(timeDiff)));
   return true;
 }
 return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/2da6dbe5/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureUtil.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureUtil.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureUtil.java
index 8a438d4..8c8746e 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureUtil.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureUtil.java
@@ -44,40 +44,42 @@ public final class ProcedureUtil {
   // ==
   //  Reflection helpers to create/validate a Procedure object
   // ==
-  public static Procedure newProcedure(final String className) throws 
BadProcedureException {
+  private static Procedure newProcedure(String className) throws 
BadProcedureException {
 try {
-  final Class clazz = Class.forName(className);
+  Class clazz = Class.forName(className);
   if (!Modifier.isPublic(clazz.getModifiers())) {
 throw new Exception("the " + clazz + " class is not public");
   }
 
-  final Constructor ctor = clazz.getConstructor();
+  @SuppressWarnings("rawtypes")
+  Constructor ctor = 
clazz.asSubclass(Procedure.class).getConstructor();
   assert ctor != null : "no constructor found";
   if (!Modifier.isPublic(ctor.getModifiers())) {
 throw new Exception("the " + clazz + " constructor is not public");
   }
-  return (Procedure)ctor.newInstance();
+  return ctor.newInstance();
 } catch (Exception e) {
-  throw new BadProcedureException("The procedure class " + className +
-  " must be accessible and have an