hbase git commit: HBASE-20695 Implement table level RegionServer replication metrics

2018-06-14 Thread zghao
Repository: hbase
Updated Branches:
  refs/heads/branch-2 a4cb98b1c -> b68746c0b


HBASE-20695 Implement table level RegionServer replication metrics

Signed-off-by: Guanghao Zhang 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b68746c0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b68746c0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b68746c0

Branch: refs/heads/branch-2
Commit: b68746c0b273f8ffd2924807f9a82be9f91f28ac
Parents: a4cb98b
Author: Xu Cang 
Authored: Wed Jun 13 00:06:05 2018 -0700
Committer: Guanghao Zhang 
Committed: Fri Jun 15 10:45:13 2018 +0800

--
 .../replication/regionserver/MetricsSource.java | 27 ++--
 .../regionserver/ReplicationSourceShipper.java  |  9 +--
 .../replication/TestReplicationEndpoint.java| 22 +++-
 3 files changed, 53 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b68746c0/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
index a59dd72..7bc7084 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
@@ -29,6 +29,8 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience;
 import org.apache.hadoop.hbase.metrics.BaseSource;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 
+import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
+
 /**
  * This class is for maintaining the various replication statistics for a 
source and publishing them
  * through the metrics interfaces.
@@ -45,7 +47,7 @@ public class MetricsSource implements BaseSource {
 
   private final MetricsReplicationSourceSource singleSourceSource;
   private final MetricsReplicationSourceSource globalSourceSource;
-
+  private Map 
singleSourceSourceByTable;
 
   /**
* Constructor used to register the metrics
@@ -58,6 +60,7 @@ public class MetricsSource implements BaseSource {
 
CompatibilitySingletonFactory.getInstance(MetricsReplicationSourceFactory.class)
 .getSource(id);
 globalSourceSource = 
CompatibilitySingletonFactory.getInstance(MetricsReplicationSourceFactory.class).getGlobalSource();
+singleSourceSourceByTable = new HashMap<>();
   }
 
   /**
@@ -67,10 +70,12 @@ public class MetricsSource implements BaseSource {
* @param globalSourceSource Class to monitor global-scoped metrics
*/
   public MetricsSource(String id, MetricsReplicationSourceSource 
singleSourceSource,
-   MetricsReplicationSourceSource globalSourceSource) {
+   MetricsReplicationSourceSource globalSourceSource,
+   Map 
singleSourceSourceByTable) {
 this.id = id;
 this.singleSourceSource = singleSourceSource;
 this.globalSourceSource = globalSourceSource;
+this.singleSourceSourceByTable = singleSourceSourceByTable;
   }
 
   /**
@@ -86,6 +91,19 @@ public class MetricsSource implements BaseSource {
   }
 
   /**
+   * Set the age of the last edit that was shipped group by table
+   * @param timestamp write time of the edit
+   * @param tableName String as group and tableName
+   */
+  public void setAgeOfLastShippedOpByTable(long timestamp, String tableName) {
+long age = EnvironmentEdgeManager.currentTime() - timestamp;
+this.getSingleSourceSourceByTable().computeIfAbsent(
+tableName, t -> CompatibilitySingletonFactory
+.getInstance(MetricsReplicationSourceFactory.class).getSource(t))
+.setLastShippedAge(age);
+  }
+
+  /**
* Convenience method to use the last given timestamp to refresh the age of 
the last edit. Used
* when replication fails and need to keep that metric accurate.
* @param walGroupId id of the group to update
@@ -349,4 +367,9 @@ public class MetricsSource implements BaseSource {
   public String getMetricsName() {
 return globalSourceSource.getMetricsName();
   }
+
+  @VisibleForTesting
+  public Map 
getSingleSourceSourceByTable() {
+return singleSourceSourceByTable;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b68746c0/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.java
 

hbase git commit: HBASE-20695 Implement table level RegionServer replication metrics

2018-06-14 Thread zghao
Repository: hbase
Updated Branches:
  refs/heads/master 04db90077 -> 86653c708


HBASE-20695 Implement table level RegionServer replication metrics

Signed-off-by: Guanghao Zhang 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/86653c70
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/86653c70
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/86653c70

Branch: refs/heads/master
Commit: 86653c708f6ec2bdbeb27baa0444b19957d911cc
Parents: 04db900
Author: Xu Cang 
Authored: Wed Jun 13 00:06:05 2018 -0700
Committer: Guanghao Zhang 
Committed: Fri Jun 15 10:38:49 2018 +0800

--
 .../replication/regionserver/MetricsSource.java | 27 ++--
 .../regionserver/ReplicationSourceShipper.java  |  9 +--
 .../replication/TestReplicationEndpoint.java| 22 +++-
 3 files changed, 53 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/86653c70/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
index a59dd72..7bc7084 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
@@ -29,6 +29,8 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience;
 import org.apache.hadoop.hbase.metrics.BaseSource;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 
+import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
+
 /**
  * This class is for maintaining the various replication statistics for a 
source and publishing them
  * through the metrics interfaces.
@@ -45,7 +47,7 @@ public class MetricsSource implements BaseSource {
 
   private final MetricsReplicationSourceSource singleSourceSource;
   private final MetricsReplicationSourceSource globalSourceSource;
-
+  private Map 
singleSourceSourceByTable;
 
   /**
* Constructor used to register the metrics
@@ -58,6 +60,7 @@ public class MetricsSource implements BaseSource {
 
CompatibilitySingletonFactory.getInstance(MetricsReplicationSourceFactory.class)
 .getSource(id);
 globalSourceSource = 
CompatibilitySingletonFactory.getInstance(MetricsReplicationSourceFactory.class).getGlobalSource();
+singleSourceSourceByTable = new HashMap<>();
   }
 
   /**
@@ -67,10 +70,12 @@ public class MetricsSource implements BaseSource {
* @param globalSourceSource Class to monitor global-scoped metrics
*/
   public MetricsSource(String id, MetricsReplicationSourceSource 
singleSourceSource,
-   MetricsReplicationSourceSource globalSourceSource) {
+   MetricsReplicationSourceSource globalSourceSource,
+   Map 
singleSourceSourceByTable) {
 this.id = id;
 this.singleSourceSource = singleSourceSource;
 this.globalSourceSource = globalSourceSource;
+this.singleSourceSourceByTable = singleSourceSourceByTable;
   }
 
   /**
@@ -86,6 +91,19 @@ public class MetricsSource implements BaseSource {
   }
 
   /**
+   * Set the age of the last edit that was shipped group by table
+   * @param timestamp write time of the edit
+   * @param tableName String as group and tableName
+   */
+  public void setAgeOfLastShippedOpByTable(long timestamp, String tableName) {
+long age = EnvironmentEdgeManager.currentTime() - timestamp;
+this.getSingleSourceSourceByTable().computeIfAbsent(
+tableName, t -> CompatibilitySingletonFactory
+.getInstance(MetricsReplicationSourceFactory.class).getSource(t))
+.setLastShippedAge(age);
+  }
+
+  /**
* Convenience method to use the last given timestamp to refresh the age of 
the last edit. Used
* when replication fails and need to keep that metric accurate.
* @param walGroupId id of the group to update
@@ -349,4 +367,9 @@ public class MetricsSource implements BaseSource {
   public String getMetricsName() {
 return globalSourceSource.getMetricsName();
   }
+
+  @VisibleForTesting
+  public Map 
getSingleSourceSourceByTable() {
+return singleSourceSourceByTable;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/86653c70/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.java
 

[03/12] hbase git commit: HBASE-20722 Make RegionServerTracker only depend on children changed event

2018-06-14 Thread busbey
HBASE-20722 Make RegionServerTracker only depend on children changed event


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/423a0ab7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/423a0ab7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/423a0ab7

Branch: refs/heads/HBASE-20331
Commit: 423a0ab71a4e588f3efd2d2ab96a36c16e8b37b1
Parents: ec66434
Author: zhangduo 
Authored: Wed Jun 13 21:11:23 2018 +0800
Committer: zhangduo 
Committed: Thu Jun 14 08:36:37 2018 +0800

--
 .../hadoop/hbase/client/VersionInfoUtil.java|   2 +-
 .../org/apache/hadoop/hbase/master/HMaster.java |  64 +++---
 .../hbase/master/RegionServerTracker.java   | 225 ++-
 .../hadoop/hbase/master/ServerManager.java  |  17 +-
 .../hbase/master/TestAssignmentListener.java|  98 +---
 .../hbase/master/TestClockSkewDetection.java|   2 +-
 .../hbase/master/TestMasterNoCluster.java   |  18 +-
 .../hbase/master/TestShutdownBackupMaster.java  |   2 +-
 8 files changed, 167 insertions(+), 261 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/423a0ab7/hbase-server/src/main/java/org/apache/hadoop/hbase/client/VersionInfoUtil.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/VersionInfoUtil.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/VersionInfoUtil.java
index 95984de..cde59eb 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/VersionInfoUtil.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/VersionInfoUtil.java
@@ -102,7 +102,7 @@ public final class VersionInfoUtil {
* @param versionInfo the VersionInfo object to pack
* @return the version number as int. (e.g. 0x0103004 is 1.3.4)
*/
-  private static int getVersionNumber(final HBaseProtos.VersionInfo 
versionInfo) {
+  public static int getVersionNumber(final HBaseProtos.VersionInfo 
versionInfo) {
 if (versionInfo != null) {
   try {
 final String[] components = getVersionComponents(versionInfo);

http://git-wip-us.apache.org/repos/asf/hbase/blob/423a0ab7/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index f20cc61..883bb4f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -72,7 +72,6 @@ import org.apache.hadoop.hbase.NamespaceDescriptor;
 import org.apache.hadoop.hbase.PleaseHoldException;
 import org.apache.hadoop.hbase.ReplicationPeerNotFoundException;
 import org.apache.hadoop.hbase.ScheduledChore;
-import org.apache.hadoop.hbase.ServerMetricsBuilder;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableDescriptors;
 import org.apache.hadoop.hbase.TableName;
@@ -87,6 +86,7 @@ import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 import org.apache.hadoop.hbase.client.TableState;
+import org.apache.hadoop.hbase.client.VersionInfoUtil;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.exceptions.MergeRegionException;
@@ -212,7 +212,6 @@ import 
org.apache.hbase.thirdparty.com.google.common.collect.Maps;
 
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
@@ -297,7 +296,7 @@ public class HMaster extends HRegionServer implements 
MasterServices {
   // Manager and zk listener for master election
   private final ActiveMasterManager activeMasterManager;
   // Region server tracker
-  RegionServerTracker regionServerTracker;
+  private RegionServerTracker regionServerTracker;
   // Draining region server tracker
   private DrainingServerTracker drainingServerTracker;
   // Tracker for load balancer state
@@ -725,10 +724,16 @@ public class HMaster extends HRegionServer implements 
MasterServices {
   }
 
   /**
+   * 
* Initialize all ZK based system trackers.
+ 

[10/12] hbase git commit: HBASE-20615 emphasize shaded artifacts in client tarball.

2018-06-14 Thread busbey
HBASE-20615 emphasize shaded artifacts in client tarball.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5c65a9f9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5c65a9f9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5c65a9f9

Branch: refs/heads/HBASE-20331
Commit: 5c65a9f94003000daac6619ef9e84ed8ee3b9487
Parents: b5017ea
Author: Sean Busbey 
Authored: Fri May 18 11:11:42 2018 -0500
Committer: Sean Busbey 
Committed: Thu Jun 14 20:56:47 2018 -0500

--
 bin/hbase   | 226 ---
 bin/hbase-config.sh |  10 +
 hbase-assembly/pom.xml  |  21 +-
 .../src/main/assembly/client-components.xml |  43 +++-
 hbase-assembly/src/main/assembly/client.xml | 131 ++-
 hbase-assembly/src/main/assembly/components.xml |   3 +-
 .../src/main/assembly/hadoop-two-compat.xml |  80 ++-
 pom.xml |   6 +
 8 files changed, 417 insertions(+), 103 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5c65a9f9/bin/hbase
--
diff --git a/bin/hbase b/bin/hbase
index 4f1c854..559a02e 100755
--- a/bin/hbase
+++ b/bin/hbase
@@ -71,11 +71,18 @@ if [ -d "${HBASE_HOME}/target" ]; then
   in_dev_env=true
 fi
 
+# Detect if we are in the omnibus tarball
+in_omnibus_tarball="false"
+if [ -f "${HBASE_HOME}/bin/hbase-daemons.sh" ]; then
+  in_omnibus_tarball="true"
+fi
+
 read -d '' options_string << EOF
 Options:
-  --config DIR Configuration direction to use. Default: ./conf
-  --hosts HOSTSOverride the list in 'regionservers' file
-  --auth-as-server Authenticate to ZooKeeper using servers configuration
+  --config DIR Configuration direction to use. Default: ./conf
+  --hosts HOSTSOverride the list in 'regionservers' file
+  --auth-as-server Authenticate to ZooKeeper using servers configuration
+  --internal-classpath Skip attempting to use client facing jars (WARNING: 
unstable results between versions)
 EOF
 # if no args specified, show usage
 if [ $# = 0 ]; then
@@ -87,16 +94,18 @@ if [ $# = 0 ]; then
   echo "  shell   Run the HBase shell"
   echo "  hbckRun the hbase 'fsck' tool"
   echo "  snapshotTool for managing snapshots"
-  echo "  wal Write-ahead-log analyzer"
-  echo "  hfile   Store file analyzer"
-  echo "  zkcli   Run the ZooKeeper shell"
-  echo "  master  Run an HBase HMaster node"
-  echo "  regionserverRun an HBase HRegionServer node"
-  echo "  zookeeper   Run a ZooKeeper server"
-  echo "  restRun an HBase REST server"
-  echo "  thrift  Run the HBase Thrift server"
-  echo "  thrift2 Run the HBase Thrift2 server"
-  echo "  clean   Run the HBase clean up script"
+  if [ "${in_omnibus_tarball}" = "true" ]; then
+echo "  wal Write-ahead-log analyzer"
+echo "  hfile   Store file analyzer"
+echo "  zkcli   Run the ZooKeeper shell"
+echo "  master  Run an HBase HMaster node"
+echo "  regionserverRun an HBase HRegionServer node"
+echo "  zookeeper   Run a ZooKeeper server"
+echo "  restRun an HBase REST server"
+echo "  thrift  Run the HBase Thrift server"
+echo "  thrift2 Run the HBase Thrift2 server"
+echo "  clean   Run the HBase clean up script"
+  fi
   echo "  classpath   Dump hbase CLASSPATH"
   echo "  mapredcpDump CLASSPATH entries required by mapreduce"
   echo "  pe  Run PerformanceEvaluation"
@@ -186,9 +195,99 @@ for f in $HBASE_HOME/hbase-jars/hbase*.jar; do
   fi
 done
 
+#If avail, add Hadoop to the CLASSPATH and to the JAVA_LIBRARY_PATH
+# Allow this functionality to be disabled
+if [ "$HBASE_DISABLE_HADOOP_CLASSPATH_LOOKUP" != "true" ] ; then
+  HADOOP_IN_PATH=$(PATH="${HADOOP_HOME:-${HADOOP_PREFIX}}/bin:$PATH" which 
hadoop 2>/dev/null)
+fi
+
 # Add libs to CLASSPATH
-for f in $HBASE_HOME/lib/*.jar; do
-  CLASSPATH=${CLASSPATH}:$f;
+declare shaded_jar
+
+if [ "${INTERNAL_CLASSPATH}" != "true" ]; then
+  # find our shaded jars
+  declare shaded_client
+  declare shaded_client_byo_hadoop
+  declare shaded_mapreduce
+  for f in "${HBASE_HOME}"/lib/shaded-clients/hbase-shaded-client*.jar; do
+if [[ "${f}" =~ byo-hadoop ]]; then
+  shaded_client_byo_hadoop="${f}"
+else
+  shaded_client="${f}"
+fi
+  done
+  for f in "${HBASE_HOME}"/lib/shaded-clients/hbase-shaded-mapreduce*.jar; do
+shaded_mapreduce="${f}"
+  done
+
+  # If command can use our shaded client, use it
+  declare -a commands_in_client_jar=("classpath" "version")
+  for c in 

[12/12] hbase git commit: WIP tune down build retention while troubleshooting.

2018-06-14 Thread busbey
WIP tune down build retention while troubleshooting.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/acaa4a5f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/acaa4a5f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/acaa4a5f

Branch: refs/heads/HBASE-20331
Commit: acaa4a5f8fe567d8a164a82c1519709404e4498a
Parents: e6f8941
Author: Sean Busbey 
Authored: Wed May 23 14:14:45 2018 -0500
Committer: Sean Busbey 
Committed: Thu Jun 14 20:56:47 2018 -0500

--
 dev-support/Jenkinsfile | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/acaa4a5f/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index 59d3227..64144e9 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -24,7 +24,7 @@ pipeline {
 cron('@daily')
   }
   options {
-buildDiscarder(logRotator(numToKeepStr: '30'))
+buildDiscarder(logRotator(numToKeepStr: '1'))
 timeout (time: 9, unit: 'HOURS')
 timestamps()
 skipDefaultCheckout()



[01/12] hbase git commit: HBASE-19377 Update Java API CC version [Forced Update!]

2018-06-14 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/HBASE-20331 ca0b54884 -> acaa4a5f8 (forced update)


HBASE-19377 Update Java API CC version

Compatibility checker complaining about hash collisions, newer versions
use longer id strings.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8648af07
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8648af07
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8648af07

Branch: refs/heads/HBASE-20331
Commit: 8648af07d4acc6cd31e0a55b6958aa739c7d0551
Parents: edf60b9
Author: Mike Drob 
Authored: Tue Jun 12 13:23:13 2018 -0500
Committer: Mike Drob 
Committed: Tue Jun 12 14:19:13 2018 -0500

--
 dev-support/checkcompatibility.py | 11 +--
 1 file changed, 9 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8648af07/dev-support/checkcompatibility.py
--
diff --git a/dev-support/checkcompatibility.py 
b/dev-support/checkcompatibility.py
index ea9c229..d132c35 100755
--- a/dev-support/checkcompatibility.py
+++ b/dev-support/checkcompatibility.py
@@ -156,7 +156,7 @@ def checkout_java_acc(force):
 
 logging.info("Downloading Java ACC...")
 
-url = "https://github.com/lvc/japi-compliance-checker/archive/2.1.tar.gz;
+url = "https://github.com/lvc/japi-compliance-checker/archive/2.4.tar.gz;
 scratch_dir = get_scratch_dir()
 path = os.path.join(scratch_dir, os.path.basename(url))
 jacc = urllib2.urlopen(url)
@@ -166,7 +166,7 @@ def checkout_java_acc(force):
 subprocess.check_call(["tar", "xzf", path],
   cwd=scratch_dir)
 
-shutil.move(os.path.join(scratch_dir, "japi-compliance-checker-2.1"),
+shutil.move(os.path.join(scratch_dir, "japi-compliance-checker-2.4"),
 os.path.join(acc_dir))
 
 
@@ -266,6 +266,12 @@ def process_java_acc_output(output):
 return_value[line[:6]] = values
 return return_value
 
+def log_java_acc_version():
+java_acc_path = os.path.join(
+get_java_acc_dir(), "japi-compliance-checker.pl")
+
+args = ["perl", java_acc_path, "-dumpversion"]
+logging.info("Java ACC version: " + check_output(args))
 
 def run_java_acc(src_name, src_jars, dst_name, dst_jars, annotations, 
skip_annotations, name):
 """ Run the compliance checker to compare 'src' and 'dst'. """
@@ -479,6 +485,7 @@ def main():
 
 # Download deps.
 checkout_java_acc(args.force_download)
+log_java_acc_version()
 
 # Set up the build.
 scratch_dir = get_scratch_dir()



[09/12] hbase git commit: HBASE-20333 Provide a shaded client that allows downstream to provide Hadoop needs.

2018-06-14 Thread busbey
HBASE-20333 Provide a shaded client that allows downstream to provide Hadoop 
needs.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4a1408f9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4a1408f9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4a1408f9

Branch: refs/heads/HBASE-20331
Commit: 4a1408f9f6d4d8649cb1eec424fad26e46a88384
Parents: 2541b3b
Author: Sean Busbey 
Authored: Tue Apr 24 14:51:12 2018 -0500
Committer: Sean Busbey 
Committed: Thu Jun 14 20:56:46 2018 -0500

--
 .../hbase-shaded-check-invariants/pom.xml   |  5 ++
 .../hbase-shaded-client-byo-hadoop/pom.xml  | 70 
 hbase-shaded/hbase-shaded-client/pom.xml| 35 --
 hbase-shaded/hbase-shaded-mapreduce/pom.xml | 30 ++---
 hbase-shaded/pom.xml|  6 ++
 5 files changed, 115 insertions(+), 31 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4a1408f9/hbase-shaded/hbase-shaded-check-invariants/pom.xml
--
diff --git a/hbase-shaded/hbase-shaded-check-invariants/pom.xml 
b/hbase-shaded/hbase-shaded-check-invariants/pom.xml
index 7ba4a41..287a986 100644
--- a/hbase-shaded/hbase-shaded-check-invariants/pom.xml
+++ b/hbase-shaded/hbase-shaded-check-invariants/pom.xml
@@ -48,6 +48,11 @@
   hbase-shaded-mapreduce
   ${project.version}
 
+
+  org.apache.hbase
+  hbase-shaded-client-byo-hadoop
+  ${project.version}
+
 
 
   com.github.stephenc.findbugs

http://git-wip-us.apache.org/repos/asf/hbase/blob/4a1408f9/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml
--
diff --git a/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml 
b/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml
new file mode 100644
index 000..c51a1af
--- /dev/null
+++ b/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml
@@ -0,0 +1,70 @@
+http://maven.apache.org/POM/4.0.0;
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance;
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd;>
+
+4.0.0
+
+hbase-shaded
+org.apache.hbase
+3.0.0-SNAPSHOT
+..
+
+hbase-shaded-client-byo-hadoop
+Apache HBase - Shaded - Client
+
+
+
+org.apache.maven.plugins
+maven-site-plugin
+
+true
+
+
+
+
+maven-assembly-plugin
+
+true
+
+
+
+
+
+
+org.apache.hbase
+hbase-client
+
+
+
+
+
+release
+
+
+
+org.apache.maven.plugins
+maven-shade-plugin
+
+
+
+
+
+

http://git-wip-us.apache.org/repos/asf/hbase/blob/4a1408f9/hbase-shaded/hbase-shaded-client/pom.xml
--
diff --git a/hbase-shaded/hbase-shaded-client/pom.xml 
b/hbase-shaded/hbase-shaded-client/pom.xml
index 72a5b60..6152aad 100644
--- a/hbase-shaded/hbase-shaded-client/pom.xml
+++ b/hbase-shaded/hbase-shaded-client/pom.xml
@@ -28,7 +28,7 @@
 ..
 
 hbase-shaded-client
-Apache HBase - Shaded - Client
+Apache HBase - Shaded - Client (with Hadoop bundled)
 
 
 
@@ -51,6 +51,7 @@
 
 org.apache.hbase
 hbase-client
+${project.version}
 
 
 
@@ -59,10 +60,34 @@
 release
 
 
-
-org.apache.maven.plugins
-maven-shade-plugin
-
+
+org.apache.maven.plugins
+maven-shade-plugin
+
+
+
aggregate-into-a-jar-with-relocated-third-parties
+
+
+
+
+
+
org.apache.hbase:hbase-resource-bundle
+org.slf4j:*
+
com.google.code.findbugs:*
+
com.github.stephenc.findbugs:*
+org.apache.htrace:*
+   

[05/12] hbase git commit: HBASE-20625 refactor some WALCellCodec related code

2018-06-14 Thread busbey
HBASE-20625 refactor some WALCellCodec related code

Signed-off-by: Guanghao Zhang 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0b28155d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0b28155d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0b28155d

Branch: refs/heads/HBASE-20331
Commit: 0b28155d274910b4e667b949d51f78809a1eff0b
Parents: 9e9db32
Author: jingyuntian 
Authored: Thu Jun 14 10:25:24 2018 +0800
Committer: Guanghao Zhang 
Committed: Thu Jun 14 19:37:01 2018 +0800

--
 .../hbase/protobuf/ReplicationProtbufUtil.java  | 61 --
 .../wal/AbstractProtobufLogWriter.java  |  3 +
 .../wal/AsyncProtobufLogWriter.java |  1 -
 .../regionserver/wal/CompressionContext.java| 54 ++--
 .../regionserver/wal/ProtobufLogReader.java |  2 +
 .../regionserver/wal/ProtobufLogWriter.java |  1 -
 .../hbase/regionserver/wal/ReaderBase.java  |  3 -
 .../wal/SecureProtobufLogReader.java|  1 +
 .../hbase/regionserver/wal/WALCellCodec.java| 87 ++--
 .../replication/ClusterMarkingEntryFilter.java  |  2 -
 .../java/org/apache/hadoop/hbase/wal/WAL.java   | 11 ---
 .../org/apache/hadoop/hbase/wal/WALKeyImpl.java | 60 +-
 .../wal/FaultyProtobufLogReader.java|  3 -
 13 files changed, 134 insertions(+), 155 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0b28155d/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
index 81dd59e..157ad1b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
@@ -24,29 +24,25 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
-import java.util.Map;
-import java.util.NavigableMap;
-import java.util.UUID;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellScanner;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.PrivateCellUtil;
-import org.apache.hadoop.hbase.wal.WALKeyImpl;
+import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.io.SizedCellScanner;
 import org.apache.hadoop.hbase.ipc.HBaseRpcController;
 import org.apache.hadoop.hbase.ipc.HBaseRpcControllerImpl;
 import org.apache.hadoop.hbase.wal.WALEdit;
-import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.hbase.wal.WAL.Entry;
 
+import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
+
 @InterfaceAudience.Private
 public class ReplicationProtbufUtil {
   /**
@@ -81,7 +77,7 @@ public class ReplicationProtbufUtil {
* found.
*/
   public static Pair
-  buildReplicateWALEntryRequest(final Entry[] entries) {
+  buildReplicateWALEntryRequest(final Entry[] entries) throws IOException {
 return buildReplicateWALEntryRequest(entries, null, null, null, null);
   }
 
@@ -97,53 +93,30 @@ public class ReplicationProtbufUtil {
*/
   public static Pair
   buildReplicateWALEntryRequest(final Entry[] entries, byte[] 
encodedRegionName,
-  String replicationClusterId, Path sourceBaseNamespaceDir, Path 
sourceHFileArchiveDir) {
+  String replicationClusterId, Path sourceBaseNamespaceDir, Path 
sourceHFileArchiveDir)
+  throws IOException {
 // Accumulate all the Cells seen in here.
 List> allCells = new ArrayList<>(entries.length);
 int size = 0;
-WALProtos.FamilyScope.Builder scopeBuilder = 
WALProtos.FamilyScope.newBuilder();
 AdminProtos.WALEntry.Builder entryBuilder = 
AdminProtos.WALEntry.newBuilder();
 AdminProtos.ReplicateWALEntryRequest.Builder builder =
   AdminProtos.ReplicateWALEntryRequest.newBuilder();
-HBaseProtos.UUID.Builder uuidBuilder = HBaseProtos.UUID.newBuilder();
+
 for (Entry entry: entries) {
   entryBuilder.clear();
-  // TODO: this duplicates a lot in WALKeyImpl#getBuilder
-  WALProtos.WALKey.Builder keyBuilder = 

[04/12] hbase git commit: HBASE-20630 B: Delete command enhancements

2018-06-14 Thread busbey
HBASE-20630 B: Delete command enhancements

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9e9db324
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9e9db324
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9e9db324

Branch: refs/heads/HBASE-20331
Commit: 9e9db3245f2d825837f231c004eac99f83829a95
Parents: 423a0ab
Author: Vladimir Rodionov 
Authored: Wed Jun 6 16:49:15 2018 -0700
Committer: tedyu 
Committed: Wed Jun 13 18:46:48 2018 -0700

--
 .../hadoop/hbase/backup/BackupDriver.java   |  9 ++-
 .../hbase/backup/BackupRestoreConstants.java|  7 +-
 .../hbase/backup/impl/BackupCommands.java   | 85 +---
 .../hadoop/hbase/backup/TestBackupDelete.java   | 58 -
 4 files changed, 146 insertions(+), 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9e9db324/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupDriver.java
--
diff --git 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupDriver.java 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupDriver.java
index 8baf2f0..6644d89 100644
--- 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupDriver.java
+++ 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupDriver.java
@@ -17,10 +17,14 @@
  */
 package org.apache.hadoop.hbase.backup;
 
+import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BACKUP_LIST_DESC;
 import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
 import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
 import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
 import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP;
+import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP_DESC;
+import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_LIST;
 import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
 import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
 import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
@@ -46,14 +50,13 @@ import org.apache.hadoop.hbase.backup.impl.BackupManager;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.util.ToolRunner;
+import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
 import org.apache.log4j.Level;
 import org.apache.log4j.LogManager;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
-
 /**
  *
  * Command-line entry point for backup operation
@@ -152,10 +155,12 @@ public class BackupDriver extends AbstractHBaseTool {
 addOptNoArg(OPTION_DEBUG, OPTION_DEBUG_DESC);
 addOptWithArg(OPTION_TABLE, OPTION_TABLE_DESC);
 addOptWithArg(OPTION_BANDWIDTH, OPTION_BANDWIDTH_DESC);
+addOptWithArg(OPTION_LIST, OPTION_BACKUP_LIST_DESC);
 addOptWithArg(OPTION_WORKERS, OPTION_WORKERS_DESC);
 addOptWithArg(OPTION_RECORD_NUMBER, OPTION_RECORD_NUMBER_DESC);
 addOptWithArg(OPTION_SET, OPTION_SET_DESC);
 addOptWithArg(OPTION_PATH, OPTION_PATH_DESC);
+addOptWithArg(OPTION_KEEP, OPTION_KEEP_DESC);
 addOptWithArg(OPTION_YARN_QUEUE_NAME, OPTION_YARN_QUEUE_NAME_DESC);
 
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/9e9db324/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupRestoreConstants.java
--
diff --git 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupRestoreConstants.java
 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupRestoreConstants.java
index eaeef22..16ec3d2 100644
--- 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupRestoreConstants.java
+++ 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupRestoreConstants.java
@@ -65,8 +65,9 @@ public interface BackupRestoreConstants {
   String OPTION_TABLE_DESC = "Table name. If specified, only backup images,"
   + " which contain this table will be listed.";
 
-  String OPTION_TABLE_LIST = "l";
+  String OPTION_LIST = "l";
   String OPTION_TABLE_LIST_DESC = "Table name list, comma-separated.";
+  String OPTION_BACKUP_LIST_DESC = "Backup ids list, comma-separated.";
 
   String OPTION_BANDWIDTH = "b";
   String OPTION_BANDWIDTH_DESC = "Bandwidth per task 

[06/12] hbase git commit: HBASE-20733 QABot should run checkstyle tests if the checkstyle configs change

2018-06-14 Thread busbey
HBASE-20733 QABot should run checkstyle tests if the checkstyle configs change

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/04db9007
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/04db9007
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/04db9007

Branch: refs/heads/HBASE-20331
Commit: 04db900772889d70836dbd733f844782fb7adecd
Parents: 0b28155
Author: Sean Busbey 
Authored: Thu Jun 14 13:00:08 2018 -0500
Committer: Sean Busbey 
Committed: Thu Jun 14 20:24:29 2018 -0500

--
 dev-support/hbase-personality.sh | 51 +++
 1 file changed, 28 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/04db9007/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index 2c6e4a8..8004167 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -67,10 +67,12 @@ function personality_globals
 
   # Yetus 0.7.0 enforces limits. Default proclimit is 1000.
   # Up it. See HBASE-19902 for how we arrived at this number.
+  #shellcheck disable=SC2034
   PROCLIMIT=1
 
   # Set docker container to run with 20g. Default is 4g in yetus.
   # See HBASE-19902 for how we arrived at 20g.
+  #shellcheck disable=SC2034
   DOCKERMEMLIMIT=20g
 }
 
@@ -106,7 +108,7 @@ function personality_modules
   local repostatus=$1
   local testtype=$2
   local extra=""
-  local MODULES=(${CHANGED_MODULES[@]})
+  local MODULES=("${CHANGED_MODULES[@]}")
 
   yetus_info "Personality: ${repostatus} ${testtype}"
 
@@ -130,6 +132,11 @@ function personality_modules
 MODULES=(.)
   fi
 
+  # If the checkstyle configs change, check everything.
+  if [[ "${testtype}" == checkstyle ]] && [[ "${MODULES[*]}" =~ 
hbase-checkstyle ]]; then
+MODULES=(.)
+  fi
+
   if [[ ${testtype} == mvninstall ]]; then
 # shellcheck disable=SC2086
 personality_enqueue_module . ${extra}
@@ -188,16 +195,31 @@ function personality_modules
 function personality_file_tests
 {
   local filename=$1
+  yetus_debug "HBase specific personality_file_tests"
   # If the change is to the refguide, then we don't need any builtin yetus 
tests
   # the refguide test (below) will suffice for coverage.
   if [[ ${filename} =~ src/main/asciidoc ]] ||
  [[ ${filename} =~ src/main/xslt ]]; then
 yetus_debug "Skipping builtin yetus checks for ${filename}. refguide test 
should pick it up."
-  # fallback to checking which tests based on what yetus would do by default
-  elif declare -f "${BUILDTOOL}_builtin_personality_file_tests" >/dev/null; 
then
-"${BUILDTOOL}_builtin_personality_file_tests" "${filename}"
-  elif declare -f builtin_personality_file_tests >/dev/null; then
-builtin_personality_file_tests "${filename}"
+  else
+# If we change our asciidoc, rebuild mvnsite
+if [[ ${BUILDTOOL} = maven ]]; then
+  if [[ ${filename} =~ src/site || ${filename} =~ src/main/asciidoc ]]; 
then
+yetus_debug "tests/mvnsite: ${filename}"
+add_test mvnsite
+  fi
+fi
+# If we change checkstyle configs, run checkstyle
+if [[ ${filename} =~ checkstyle.*\.xml ]]; then
+  yetus_debug "tests/checkstyle: ${filename}"
+  add_test checkstyle
+fi
+# fallback to checking which tests based on what yetus would do by default
+if declare -f "${BUILDTOOL}_builtin_personality_file_tests" >/dev/null; 
then
+  "${BUILDTOOL}_builtin_personality_file_tests" "${filename}"
+elif declare -f builtin_personality_file_tests >/dev/null; then
+  builtin_personality_file_tests "${filename}"
+fi
   fi
 }
 
@@ -648,23 +670,6 @@ function hbaseanti_patchfile
   return 0
 }
 
-
-## @description  hbase custom mvnsite file filter.  See HBASE-15042
-## @audience private
-## @stabilityevolving
-## @paramfilename
-function mvnsite_filefilter
-{
-  local filename=$1
-
-  if [[ ${BUILDTOOL} = maven ]]; then
-if [[ ${filename} =~ src/site || ${filename} =~ src/main/asciidoc ]]; then
-  yetus_debug "tests/mvnsite: ${filename}"
-  add_test mvnsite
-fi
-  fi
-}
-
 ## This is named so that yetus will check us right after running tests.
 ## Essentially, we check for normal failures and then we look for zombies.
 #function hbase_unit_logfilter



[02/12] hbase git commit: HBASE-20561 The way we stop a ReplicationSource may cause the RS down

2018-06-14 Thread busbey
HBASE-20561 The way we stop a ReplicationSource may cause the RS down


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ec664343
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ec664343
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ec664343

Branch: refs/heads/HBASE-20331
Commit: ec66434380aee62289ccf7b173d765bbe7083718
Parents: 8648af0
Author: Guanghao Zhang 
Authored: Tue Jun 12 22:19:39 2018 +0800
Committer: Guanghao Zhang 
Committed: Wed Jun 13 17:58:59 2018 +0800

--
 .../regionserver/ReplicationSource.java | 24 +++--
 .../regionserver/ReplicationSourceManager.java  | 28 +---
 .../hadoop/hbase/zookeeper/ZKWatcher.java   |  4 ++-
 3 files changed, 49 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ec664343/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
index d21d83c..b63712b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
@@ -499,9 +499,29 @@ public class ReplicationSource implements 
ReplicationSourceInterface {
 Collection workers = workerThreads.values();
 for (ReplicationSourceShipper worker : workers) {
   worker.stopWorker();
-  worker.entryReader.interrupt();
-  worker.interrupt();
+  worker.entryReader.setReaderRunning(false);
 }
+
+for (ReplicationSourceShipper worker : workers) {
+  if (worker.isAlive() || worker.entryReader.isAlive()) {
+try {
+  // Wait worker to stop
+  Thread.sleep(this.sleepForRetries);
+} catch (InterruptedException e) {
+  LOG.info("Interrupted while waiting " + worker.getName() + " to 
stop");
+  Thread.currentThread().interrupt();
+}
+// If worker still is alive after waiting, interrupt it
+if (worker.isAlive()) {
+  worker.interrupt();
+}
+// If entry reader is alive after waiting, interrupt it
+if (worker.entryReader.isAlive()) {
+  worker.entryReader.interrupt();
+}
+  }
+}
+
 if (this.replicationEndpoint != null) {
   this.replicationEndpoint.stop();
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/ec664343/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
index 9b4a22c..a370867 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
@@ -59,6 +59,7 @@ import org.apache.hadoop.hbase.replication.ReplicationTracker;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -449,6 +450,24 @@ public class ReplicationSourceManager implements 
ReplicationListener {
 void exec() throws ReplicationException;
   }
 
+  /**
+   * Refresh replication source will terminate the old source first, then the 
source thread will be
+   * interrupted. Need to handle it instead of abort the region server.
+   */
+  private void interruptOrAbortWhenFail(ReplicationQueueOperation op) {
+try {
+  op.exec();
+} catch (ReplicationException e) {
+  if (e.getCause() != null && e.getCause() instanceof 
KeeperException.SystemErrorException
+  && e.getCause().getCause() != null && e.getCause()
+  .getCause() instanceof InterruptedException) {
+throw new RuntimeException(
+"Thread is interrupted, the replication source may be terminated");
+  }
+  server.abort("Failed to operate on replication queue", e);
+}
+  }
+
   private void abortWhenFail(ReplicationQueueOperation op) {
 try {
   op.exec();
@@ -484,8 +503,9 @@ public class ReplicationSourceManager implements 
ReplicationListener {
   public void 

[11/12] hbase git commit: HBASE-20334 add a test that verifies basic client and MR integration

2018-06-14 Thread busbey
HBASE-20334 add a test that verifies basic client and MR integration


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e6f89414
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e6f89414
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e6f89414

Branch: refs/heads/HBASE-20331
Commit: e6f89414ffd52e4052f916adc30082b6547aae83
Parents: 5c65a9f
Author: Sean Busbey 
Authored: Tue May 1 14:28:52 2018 -0500
Committer: Sean Busbey 
Committed: Thu Jun 14 20:56:47 2018 -0500

--
 dev-support/Jenkinsfile | 233 +++--
 .../hbase_nightly_pseudo-distributed-test.sh| 516 +++
 dev-support/hbase_nightly_source-artifact.sh|  14 +-
 .../cache-apache-project-artifact.sh| 131 +
 4 files changed, 836 insertions(+), 58 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e6f89414/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index 2311e35..59d3227 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -60,54 +60,109 @@ pipeline {
 booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
   }
   stages {
-stage ('yetus install') {
+stage ('scm-checkout') {
   steps {
-sh  '''#!/usr/bin/env bash
-set -e
-echo "Ensure we have a copy of Apache Yetus."
-if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
-  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
-  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
-  if [ ! -d "${YETUS_DIR}" ]; then
-echo "New download of Apache Yetus version ${YETUS_RELEASE}."
-rm -rf "${WORKSPACE}/.gpg"
-mkdir -p "${WORKSPACE}/.gpg"
-chmod -R 700 "${WORKSPACE}/.gpg"
-
-echo "install yetus project KEYS"
-curl -L --fail -o "${WORKSPACE}/KEYS_YETUS" 
https://dist.apache.org/repos/dist/release/yetus/KEYS
-gpg --homedir "${WORKSPACE}/.gpg" --import "${WORKSPACE}/KEYS_YETUS"
-
-echo "download yetus release ${YETUS_RELEASE}"
-curl -L --fail -O 
"https://dist.apache.org/repos/dist/release/yetus/${YETUS_RELEASE}/yetus-${YETUS_RELEASE}-bin.tar.gz;
-curl -L --fail -O 
"https://dist.apache.org/repos/dist/release/yetus/${YETUS_RELEASE}/yetus-${YETUS_RELEASE}-bin.tar.gz.asc;
-echo "verifying yetus release"
-gpg --homedir "${WORKSPACE}/.gpg" --verify 
"yetus-${YETUS_RELEASE}-bin.tar.gz.asc"
-mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
-  else
-echo "Reusing cached download of Apache Yetus version ${YETUS_RELEASE}."
-  fi
-else
-  YETUS_DIR="${WORKSPACE}/yetus-git"
-  rm -rf "${YETUS_DIR}"
-  echo "downloading from github"
-  curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o 
yetus.tar.gz
-fi
-if [ ! -d "${YETUS_DIR}" ]; then
-  echo "unpacking yetus into '${YETUS_DIR}'"
-  mkdir -p "${YETUS_DIR}"
-  gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
-fi
-'''
-// Set up the file we need at PERSONALITY_FILE location
-dir ("tools") {
-  sh """#!/usr/bin/env bash
-set -e
-echo "Downloading Project personality."
-curl -L  -o personality.sh "${env.PROJECT_PERSONALITY}"
-  """
+dir('component') {
+  checkout scm
+}
+  }
+}
+stage ('thirdparty installs') {
+  parallel {
+stage ('yetus install') {
+  steps {
+// directory must be unique for each parallel stage, because 
jenkins runs them in the same workspace :(
+dir('downloads-yetus') {
+  // can't just do a simple echo or the directory won't be 
created. :(
+  sh '''#!/usr/bin/env bash
+echo "Make sure we have a directory for downloading 
dependencies: $(pwd)"
+'''
+}
+sh  '''#!/usr/bin/env bash
+  set -e
+  echo "Ensure we have a copy of Apache Yetus."
+  if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+if [ ! -d "${YETUS_DIR}" ]; then
+  
"${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh"
 \
+  --working-dir "${WORKSPACE}/downloads-yetus" \
+  --keys 'https://www.apache.org/dist/yetus/KEYS' \
+  "${WORKSPACE}/yetus-${YETUS_RELEASE}-bin.tar.gz" \
+  
"yetus/${YETUS_RELEASE}/yetus-${YETUS_RELEASE}-bin.tar.gz"
+  mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
+else
+  echo "Reusing cached install of Apache 

[08/12] hbase git commit: HBASE-19735 Create a client-tarball assembly

2018-06-14 Thread busbey
HBASE-19735 Create a client-tarball assembly

Provides an extra client descriptor to build a second
tarball with a reduced set of dependencies. Not of great
impact now, but will build the way for better in the future.

Signed-off-by: Sean Busbey 

 Conflicts:
hbase-assembly/pom.xml


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b5017ead
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b5017ead
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b5017ead

Branch: refs/heads/HBASE-20331
Commit: b5017ead7472b621841bf54ec99a516ecf487af5
Parents: 4a1408f
Author: Josh Elser 
Authored: Wed Feb 7 18:37:39 2018 -0500
Committer: Sean Busbey 
Committed: Thu Jun 14 20:56:46 2018 -0500

--
 hbase-assembly/pom.xml  |  33 +++--
 .../src/main/assembly/client-components.xml |  92 +
 hbase-assembly/src/main/assembly/client.xml | 137 +++
 hbase-procedure/pom.xml |   4 +-
 hbase-spark/pom.xml |   6 +
 pom.xml |  10 ++
 6 files changed, 268 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b5017ead/hbase-assembly/pom.xml
--
diff --git a/hbase-assembly/pom.xml b/hbase-assembly/pom.xml
index 5da105b..4fa859a 100644
--- a/hbase-assembly/pom.xml
+++ b/hbase-assembly/pom.xml
@@ -95,6 +95,7 @@
   gnu
   
 ${assembly.file}
+src/main/assembly/client.xml
   
 
   
@@ -220,22 +221,22 @@
   test
 
 
-org.apache.hbase
-hbase-hadoop-compat
-
-  
-com.google.guava
-guava
-  
-
+  org.apache.hbase
+  hbase-hadoop-compat
+  
+
+  com.google.guava
+  guava
+
+  
 
 
-org.apache.hbase
-${compat.module}
+  org.apache.hbase
+  ${compat.module}
 
 
-   org.apache.hbase
-   hbase-shell
+  org.apache.hbase
+  hbase-shell
 
 
   org.apache.hbase
@@ -315,6 +316,14 @@
   jline
   jline
 
+
+  org.apache.hbase
+  hbase-shaded-client
+
+
+  org.apache.hbase
+  hbase-shaded-mapreduce
+
   
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5017ead/hbase-assembly/src/main/assembly/client-components.xml
--
diff --git a/hbase-assembly/src/main/assembly/client-components.xml 
b/hbase-assembly/src/main/assembly/client-components.xml
new file mode 100644
index 000..2fd1b57
--- /dev/null
+++ b/hbase-assembly/src/main/assembly/client-components.xml
@@ -0,0 +1,92 @@
+
+
+
+
+
+  
+
+
+  ${project.basedir}/../target/site
+  docs
+
+
+
+  ${project.basedir}/..
+  .
+  
+CHANGES.txt
+README.txt
+  
+  0644
+
+
+
+  ${project.basedir}/../conf
+  conf
+  0644
+  0755
+
+
+
+
+  ${project.basedir}/../bin
+  bin
+  
+get-active-master.rb
+hbase
+hbase-common.sh
+hbase-config.sh
+hbase-jruby
+hirb.rb
+
+  
+  0755
+  0755
+
+
+
+  ${project.basedir}/../bin
+  bin
+  
+hbase.cmd
+hbase-config.cmd
+  
+
+
+
+  ${project.basedir}/../hbase-shell/src/main/ruby
+  lib/ruby
+  0644
+  0755
+
+
+
+  ${project.basedir}/../hbase-server/target/native
+  lib/native
+  0755
+  0755
+  
+*.so
+*.dylib
+  
+
+  
+

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5017ead/hbase-assembly/src/main/assembly/client.xml
--
diff --git a/hbase-assembly/src/main/assembly/client.xml 
b/hbase-assembly/src/main/assembly/client.xml
new file mode 100644
index 000..7951961
--- /dev/null
+++ b/hbase-assembly/src/main/assembly/client.xml
@@ -0,0 +1,137 @@
+
+http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.1; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance; 
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.1
 http://maven.apache.org/xsd/assembly-1.1.1.xsd;>
+
+
+  
+  client-bin
+  
+tar.gz
+  
+  
+  hbase-${project.version}-client
+  
+  
src/main/assembly/client-components.xml
+  
+  
+
+  true
+  
+
+org.apache.hbase:hbase-annotations
+org.apache.hbase:hbase-client
+org.apache.hbase:hbase-common
+org.apache.hbase:hbase-hadoop-compat
+

[07/12] hbase git commit: HBASE-20332 shaded mapreduce module shouldn't include hadoop

2018-06-14 Thread busbey
HBASE-20332 shaded mapreduce module shouldn't include hadoop

* modify the jar checking script to take args; make hadoop stuff optional
* separate out checking the artifacts that have hadoop vs those that don't.
* * Unfortunately means we need two modules for checking things
* * put in a safety check that the support script for checking jar contents is 
maintained in both modules
* * have to carve out an exception for o.a.hadoop.metrics2. :(
* fix duplicated class warning
* clean up dependencies in hbase-server and some modules that depend on it.
* allow Hadoop to have its own htrace where it needs it
* add a precommit check to make sure we're not using old htrace imports


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2541b3bb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2541b3bb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2541b3bb

Branch: refs/heads/HBASE-20331
Commit: 2541b3bbe8c5e0884575d748266035eb708adbe8
Parents: 04db900
Author: Sean Busbey 
Authored: Mon Apr 9 13:37:44 2018 -0500
Committer: Sean Busbey 
Committed: Thu Jun 14 20:56:46 2018 -0500

--
 hbase-backup/pom.xml|  12 +-
 .../src/main/resources/hbase/checkstyle.xml |   4 +-
 hbase-client/pom.xml|   4 -
 hbase-common/pom.xml|  12 --
 hbase-endpoint/pom.xml  |  14 --
 hbase-examples/pom.xml  |  12 --
 hbase-external-blockcache/pom.xml   |   4 -
 hbase-hadoop2-compat/pom.xml|   6 -
 hbase-it/pom.xml|   6 -
 hbase-mapreduce/pom.xml |  30 +--
 hbase-replication/pom.xml   |   4 -
 hbase-rest/pom.xml  |  19 +-
 hbase-rsgroup/pom.xml   |   4 -
 hbase-server/pom.xml| 103 -
 .../hbase-shaded-check-invariants/pom.xml   |  54 +++--
 .../ensure-jars-have-correct-contents.sh|  92 ++--
 hbase-shaded/hbase-shaded-mapreduce/pom.xml | 190 +++-
 .../pom.xml | 215 +++
 .../ensure-jars-have-correct-contents.sh| 129 +++
 hbase-shaded/pom.xml|  13 ++
 hbase-shell/pom.xml |  14 --
 hbase-testing-util/pom.xml  |  16 --
 hbase-thrift/pom.xml|  16 --
 pom.xml |  65 +++---
 24 files changed, 773 insertions(+), 265 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2541b3bb/hbase-backup/pom.xml
--
diff --git a/hbase-backup/pom.xml b/hbase-backup/pom.xml
index 7afd51e..00a996f 100644
--- a/hbase-backup/pom.xml
+++ b/hbase-backup/pom.xml
@@ -155,10 +155,6 @@
   hadoop-common
   
 
-  org.apache.htrace
-  htrace-core
-
-
   net.java.dev.jets3t
   jets3t
 
@@ -264,9 +260,6 @@
   3.0
 
   
-  
-3.0-SNAPSHOT
-  
   
 
   org.apache.hadoop
@@ -276,6 +269,11 @@
   org.apache.hadoop
   hadoop-mapreduce-client-core
 
+
+  org.apache.hadoop
+  hadoop-distcp
+  ${hadoop.version}
+
   
 
   

http://git-wip-us.apache.org/repos/asf/hbase/blob/2541b3bb/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
--
diff --git a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml 
b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
index 7ad797c..148e256 100644
--- a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
+++ b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
@@ -86,8 +86,10 @@
   org.apache.commons.lang,
   org.apache.curator.shaded,
   org.apache.hadoop.classification,
+  org.apache.htrace,
   org.apache.htrace.shaded,
-  org.codehaus.jackson"/>
+  org.codehaus.jackson,
+  org.htrace"/>
   

http://git-wip-us.apache.org/repos/asf/hbase/blob/2541b3bb/hbase-client/pom.xml
--
diff --git a/hbase-client/pom.xml b/hbase-client/pom.xml
index f6247e3..bb99eec 100644
--- a/hbase-client/pom.xml
+++ b/hbase-client/pom.xml
@@ -235,10 +235,6 @@
   hadoop-common
   
 
-  org.apache.htrace
-  htrace-core
-
-
   net.java.dev.jets3t
   jets3t
 

[hbase] Git Push Summary

2018-06-14 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/HBASE-20733 [deleted] 73366c862


[3/6] hbase git commit: HBASE-20733 QABot should run checkstyle tests if the checkstyle configs change

2018-06-14 Thread busbey
HBASE-20733 QABot should run checkstyle tests if the checkstyle configs change

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/43b51a36
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/43b51a36
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/43b51a36

Branch: refs/heads/branch-1
Commit: 43b51a36dd72153866465630bda06f6cf68a78eb
Parents: b862641
Author: Sean Busbey 
Authored: Thu Jun 14 13:00:08 2018 -0500
Committer: Sean Busbey 
Committed: Thu Jun 14 20:26:59 2018 -0500

--
 dev-support/hbase-personality.sh | 51 +++
 1 file changed, 28 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/43b51a36/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index 9828564..0b19690 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -67,10 +67,12 @@ function personality_globals
 
   # Yetus 0.7.0 enforces limits. Default proclimit is 1000.
   # Up it. See HBASE-19902 for how we arrived at this number.
+  #shellcheck disable=SC2034
   PROCLIMIT=1
 
   # Set docker container to run with 20g. Default is 4g in yetus.
   # See HBASE-19902 for how we arrived at 20g.
+  #shellcheck disable=SC2034
   DOCKERMEMLIMIT=20g
 }
 
@@ -106,7 +108,7 @@ function personality_modules
   local repostatus=$1
   local testtype=$2
   local extra=""
-  local MODULES=(${CHANGED_MODULES[@]})
+  local MODULES=("${CHANGED_MODULES[@]}")
 
   yetus_info "Personality: ${repostatus} ${testtype}"
 
@@ -129,6 +131,11 @@ function personality_modules
 MODULES=(.)
   fi
 
+  # If the checkstyle configs change, check everything.
+  if [[ "${testtype}" == checkstyle ]] && [[ "${MODULES[*]}" =~ 
hbase-checkstyle ]]; then
+MODULES=(.)
+  fi
+
   if [[ ${testtype} == mvninstall ]]; then
 # shellcheck disable=SC2086
 personality_enqueue_module . ${extra}
@@ -187,16 +194,31 @@ function personality_modules
 function personality_file_tests
 {
   local filename=$1
+  yetus_debug "HBase specific personality_file_tests"
   # If the change is to the refguide, then we don't need any builtin yetus 
tests
   # the refguide test (below) will suffice for coverage.
   if [[ ${filename} =~ src/main/asciidoc ]] ||
  [[ ${filename} =~ src/main/xslt ]]; then
 yetus_debug "Skipping builtin yetus checks for ${filename}. refguide test 
should pick it up."
-  # fallback to checking which tests based on what yetus would do by default
-  elif declare -f "${BUILDTOOL}_builtin_personality_file_tests" >/dev/null; 
then
-"${BUILDTOOL}_builtin_personality_file_tests" "${filename}"
-  elif declare -f builtin_personality_file_tests >/dev/null; then
-builtin_personality_file_tests "${filename}"
+  else
+# If we change our asciidoc, rebuild mvnsite
+if [[ ${BUILDTOOL} = maven ]]; then
+  if [[ ${filename} =~ src/site || ${filename} =~ src/main/asciidoc ]]; 
then
+yetus_debug "tests/mvnsite: ${filename}"
+add_test mvnsite
+  fi
+fi
+# If we change checkstyle configs, run checkstyle
+if [[ ${filename} =~ checkstyle.*\.xml ]]; then
+  yetus_debug "tests/checkstyle: ${filename}"
+  add_test checkstyle
+fi
+# fallback to checking which tests based on what yetus would do by default
+if declare -f "${BUILDTOOL}_builtin_personality_file_tests" >/dev/null; 
then
+  "${BUILDTOOL}_builtin_personality_file_tests" "${filename}"
+elif declare -f builtin_personality_file_tests >/dev/null; then
+  builtin_personality_file_tests "${filename}"
+fi
   fi
 }
 
@@ -653,23 +675,6 @@ function hbaseanti_patchfile
   return 0
 }
 
-
-## @description  hbase custom mvnsite file filter.  See HBASE-15042
-## @audience private
-## @stabilityevolving
-## @paramfilename
-function mvnsite_filefilter
-{
-  local filename=$1
-
-  if [[ ${BUILDTOOL} = maven ]]; then
-if [[ ${filename} =~ src/site || ${filename} =~ src/main/asciidoc ]]; then
-  yetus_debug "tests/mvnsite: ${filename}"
-  add_test mvnsite
-fi
-  fi
-}
-
 ## This is named so that yetus will check us right after running tests.
 ## Essentially, we check for normal failures and then we look for zombies.
 #function hbase_unit_logfilter



[1/6] hbase git commit: HBASE-20733 QABot should run checkstyle tests if the checkstyle configs change

2018-06-14 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-1 b86264156 -> 43b51a36d
  refs/heads/branch-1.2 595307f25 -> 20772f139
  refs/heads/branch-1.3 7d51f6077 -> b96cf3da5
  refs/heads/branch-1.4 2a35205fd -> b4f463a5c
  refs/heads/branch-2 bde9f08a8 -> a4cb98b1c
  refs/heads/branch-2.0 987f7b6d3 -> cb2dfd117


HBASE-20733 QABot should run checkstyle tests if the checkstyle configs change

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a4cb98b1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a4cb98b1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a4cb98b1

Branch: refs/heads/branch-2
Commit: a4cb98b1cd482a419a164a8efef7e97f9f26ed11
Parents: bde9f08
Author: Sean Busbey 
Authored: Thu Jun 14 13:00:08 2018 -0500
Committer: Sean Busbey 
Committed: Thu Jun 14 20:26:42 2018 -0500

--
 dev-support/hbase-personality.sh | 51 +++
 1 file changed, 28 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a4cb98b1/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index 878f438..c3f5668 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -67,10 +67,12 @@ function personality_globals
 
   # Yetus 0.7.0 enforces limits. Default proclimit is 1000.
   # Up it. See HBASE-19902 for how we arrived at this number.
+  #shellcheck disable=SC2034
   PROCLIMIT=1
 
   # Set docker container to run with 20g. Default is 4g in yetus.
   # See HBASE-19902 for how we arrived at 20g.
+  #shellcheck disable=SC2034
   DOCKERMEMLIMIT=20g
 }
 
@@ -106,7 +108,7 @@ function personality_modules
   local repostatus=$1
   local testtype=$2
   local extra=""
-  local MODULES=(${CHANGED_MODULES[@]})
+  local MODULES=("${CHANGED_MODULES[@]}")
 
   yetus_info "Personality: ${repostatus} ${testtype}"
 
@@ -129,6 +131,11 @@ function personality_modules
 MODULES=(.)
   fi
 
+  # If the checkstyle configs change, check everything.
+  if [[ "${testtype}" == checkstyle ]] && [[ "${MODULES[*]}" =~ 
hbase-checkstyle ]]; then
+MODULES=(.)
+  fi
+
   if [[ ${testtype} == mvninstall ]]; then
 # shellcheck disable=SC2086
 personality_enqueue_module . ${extra}
@@ -187,16 +194,31 @@ function personality_modules
 function personality_file_tests
 {
   local filename=$1
+  yetus_debug "HBase specific personality_file_tests"
   # If the change is to the refguide, then we don't need any builtin yetus 
tests
   # the refguide test (below) will suffice for coverage.
   if [[ ${filename} =~ src/main/asciidoc ]] ||
  [[ ${filename} =~ src/main/xslt ]]; then
 yetus_debug "Skipping builtin yetus checks for ${filename}. refguide test 
should pick it up."
-  # fallback to checking which tests based on what yetus would do by default
-  elif declare -f "${BUILDTOOL}_builtin_personality_file_tests" >/dev/null; 
then
-"${BUILDTOOL}_builtin_personality_file_tests" "${filename}"
-  elif declare -f builtin_personality_file_tests >/dev/null; then
-builtin_personality_file_tests "${filename}"
+  else
+# If we change our asciidoc, rebuild mvnsite
+if [[ ${BUILDTOOL} = maven ]]; then
+  if [[ ${filename} =~ src/site || ${filename} =~ src/main/asciidoc ]]; 
then
+yetus_debug "tests/mvnsite: ${filename}"
+add_test mvnsite
+  fi
+fi
+# If we change checkstyle configs, run checkstyle
+if [[ ${filename} =~ checkstyle.*\.xml ]]; then
+  yetus_debug "tests/checkstyle: ${filename}"
+  add_test checkstyle
+fi
+# fallback to checking which tests based on what yetus would do by default
+if declare -f "${BUILDTOOL}_builtin_personality_file_tests" >/dev/null; 
then
+  "${BUILDTOOL}_builtin_personality_file_tests" "${filename}"
+elif declare -f builtin_personality_file_tests >/dev/null; then
+  builtin_personality_file_tests "${filename}"
+fi
   fi
 }
 
@@ -665,23 +687,6 @@ function hbaseanti_patchfile
   return 0
 }
 
-
-## @description  hbase custom mvnsite file filter.  See HBASE-15042
-## @audience private
-## @stabilityevolving
-## @paramfilename
-function mvnsite_filefilter
-{
-  local filename=$1
-
-  if [[ ${BUILDTOOL} = maven ]]; then
-if [[ ${filename} =~ src/site || ${filename} =~ src/main/asciidoc ]]; then
-  yetus_debug "tests/mvnsite: ${filename}"
-  add_test mvnsite
-fi
-  fi
-}
-
 ## This is named so that yetus will check us right after running tests.
 ## Essentially, we check for normal failures and then we look for zombies.
 #function hbase_unit_logfilter



[5/6] hbase git commit: HBASE-20733 QABot should run checkstyle tests if the checkstyle configs change

2018-06-14 Thread busbey
HBASE-20733 QABot should run checkstyle tests if the checkstyle configs change

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b96cf3da
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b96cf3da
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b96cf3da

Branch: refs/heads/branch-1.3
Commit: b96cf3da5c2253e2183cf31a2b20e0ab22d1dabd
Parents: 7d51f60
Author: Sean Busbey 
Authored: Thu Jun 14 13:00:08 2018 -0500
Committer: Sean Busbey 
Committed: Thu Jun 14 20:27:09 2018 -0500

--
 dev-support/hbase-personality.sh | 51 +++
 1 file changed, 28 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b96cf3da/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index 059d7c2..1b313f4 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -67,10 +67,12 @@ function personality_globals
 
   # Yetus 0.7.0 enforces limits. Default proclimit is 1000.
   # Up it. See HBASE-19902 for how we arrived at this number.
+  #shellcheck disable=SC2034
   PROCLIMIT=1
 
   # Set docker container to run with 20g. Default is 4g in yetus.
   # See HBASE-19902 for how we arrived at 20g.
+  #shellcheck disable=SC2034
   DOCKERMEMLIMIT=20g
 }
 
@@ -106,7 +108,7 @@ function personality_modules
   local repostatus=$1
   local testtype=$2
   local extra=""
-  local MODULES=(${CHANGED_MODULES[@]})
+  local MODULES=("${CHANGED_MODULES[@]}")
 
   yetus_info "Personality: ${repostatus} ${testtype}"
 
@@ -129,6 +131,11 @@ function personality_modules
 MODULES=(.)
   fi
 
+  # If the checkstyle configs change, check everything.
+  if [[ "${testtype}" == checkstyle ]] && [[ "${MODULES[*]}" =~ 
hbase-checkstyle ]]; then
+MODULES=(.)
+  fi
+
   if [[ ${testtype} == mvninstall ]]; then
 # shellcheck disable=SC2086
 personality_enqueue_module . ${extra}
@@ -183,16 +190,31 @@ function personality_modules
 function personality_file_tests
 {
   local filename=$1
+  yetus_debug "HBase specific personality_file_tests"
   # If the change is to the refguide, then we don't need any builtin yetus 
tests
   # the refguide test (below) will suffice for coverage.
   if [[ ${filename} =~ src/main/asciidoc ]] ||
  [[ ${filename} =~ src/main/xslt ]]; then
 yetus_debug "Skipping builtin yetus checks for ${filename}. refguide test 
should pick it up."
-  # fallback to checking which tests based on what yetus would do by default
-  elif declare -f "${BUILDTOOL}_builtin_personality_file_tests" >/dev/null; 
then
-"${BUILDTOOL}_builtin_personality_file_tests" "${filename}"
-  elif declare -f builtin_personality_file_tests >/dev/null; then
-builtin_personality_file_tests "${filename}"
+  else
+# If we change our asciidoc, rebuild mvnsite
+if [[ ${BUILDTOOL} = maven ]]; then
+  if [[ ${filename} =~ src/site || ${filename} =~ src/main/asciidoc ]]; 
then
+yetus_debug "tests/mvnsite: ${filename}"
+add_test mvnsite
+  fi
+fi
+# If we change checkstyle configs, run checkstyle
+if [[ ${filename} =~ checkstyle.*\.xml ]]; then
+  yetus_debug "tests/checkstyle: ${filename}"
+  add_test checkstyle
+fi
+# fallback to checking which tests based on what yetus would do by default
+if declare -f "${BUILDTOOL}_builtin_personality_file_tests" >/dev/null; 
then
+  "${BUILDTOOL}_builtin_personality_file_tests" "${filename}"
+elif declare -f builtin_personality_file_tests >/dev/null; then
+  builtin_personality_file_tests "${filename}"
+fi
   fi
 }
 
@@ -640,23 +662,6 @@ function hbaseanti_patchfile
   return 0
 }
 
-
-## @description  hbase custom mvnsite file filter.  See HBASE-15042
-## @audience private
-## @stabilityevolving
-## @paramfilename
-function mvnsite_filefilter
-{
-  local filename=$1
-
-  if [[ ${BUILDTOOL} = maven ]]; then
-if [[ ${filename} =~ src/site || ${filename} =~ src/main/asciidoc ]]; then
-  yetus_debug "tests/mvnsite: ${filename}"
-  add_test mvnsite
-fi
-  fi
-}
-
 ## This is named so that yetus will check us right after running tests.
 ## Essentially, we check for normal failures and then we look for zombies.
 #function hbase_unit_logfilter



[2/6] hbase git commit: HBASE-20733 QABot should run checkstyle tests if the checkstyle configs change

2018-06-14 Thread busbey
HBASE-20733 QABot should run checkstyle tests if the checkstyle configs change

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cb2dfd11
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cb2dfd11
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cb2dfd11

Branch: refs/heads/branch-2.0
Commit: cb2dfd117b1764c3719d184a5ffbf77a7c5949ec
Parents: 987f7b6
Author: Sean Busbey 
Authored: Thu Jun 14 13:00:08 2018 -0500
Committer: Sean Busbey 
Committed: Thu Jun 14 20:26:49 2018 -0500

--
 dev-support/hbase-personality.sh | 51 +++
 1 file changed, 28 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/cb2dfd11/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index c616be2..9e10926 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -67,10 +67,12 @@ function personality_globals
 
   # Yetus 0.7.0 enforces limits. Default proclimit is 1000.
   # Up it. See HBASE-19902 for how we arrived at this number.
+  #shellcheck disable=SC2034
   PROCLIMIT=1
 
   # Set docker container to run with 20g. Default is 4g in yetus.
   # See HBASE-19902 for how we arrived at 20g.
+  #shellcheck disable=SC2034
   DOCKERMEMLIMIT=20g
 }
 
@@ -106,7 +108,7 @@ function personality_modules
   local repostatus=$1
   local testtype=$2
   local extra=""
-  local MODULES=(${CHANGED_MODULES[@]})
+  local MODULES=("${CHANGED_MODULES[@]}")
 
   yetus_info "Personality: ${repostatus} ${testtype}"
 
@@ -129,6 +131,11 @@ function personality_modules
 MODULES=(.)
   fi
 
+  # If the checkstyle configs change, check everything.
+  if [[ "${testtype}" == checkstyle ]] && [[ "${MODULES[*]}" =~ 
hbase-checkstyle ]]; then
+MODULES=(.)
+  fi
+
   if [[ ${testtype} == mvninstall ]]; then
 # shellcheck disable=SC2086
 personality_enqueue_module . ${extra}
@@ -187,16 +194,31 @@ function personality_modules
 function personality_file_tests
 {
   local filename=$1
+  yetus_debug "HBase specific personality_file_tests"
   # If the change is to the refguide, then we don't need any builtin yetus 
tests
   # the refguide test (below) will suffice for coverage.
   if [[ ${filename} =~ src/main/asciidoc ]] ||
  [[ ${filename} =~ src/main/xslt ]]; then
 yetus_debug "Skipping builtin yetus checks for ${filename}. refguide test 
should pick it up."
-  # fallback to checking which tests based on what yetus would do by default
-  elif declare -f "${BUILDTOOL}_builtin_personality_file_tests" >/dev/null; 
then
-"${BUILDTOOL}_builtin_personality_file_tests" "${filename}"
-  elif declare -f builtin_personality_file_tests >/dev/null; then
-builtin_personality_file_tests "${filename}"
+  else
+# If we change our asciidoc, rebuild mvnsite
+if [[ ${BUILDTOOL} = maven ]]; then
+  if [[ ${filename} =~ src/site || ${filename} =~ src/main/asciidoc ]]; 
then
+yetus_debug "tests/mvnsite: ${filename}"
+add_test mvnsite
+  fi
+fi
+# If we change checkstyle configs, run checkstyle
+if [[ ${filename} =~ checkstyle.*\.xml ]]; then
+  yetus_debug "tests/checkstyle: ${filename}"
+  add_test checkstyle
+fi
+# fallback to checking which tests based on what yetus would do by default
+if declare -f "${BUILDTOOL}_builtin_personality_file_tests" >/dev/null; 
then
+  "${BUILDTOOL}_builtin_personality_file_tests" "${filename}"
+elif declare -f builtin_personality_file_tests >/dev/null; then
+  builtin_personality_file_tests "${filename}"
+fi
   fi
 }
 
@@ -656,23 +678,6 @@ function hbaseanti_patchfile
   return 0
 }
 
-
-## @description  hbase custom mvnsite file filter.  See HBASE-15042
-## @audience private
-## @stabilityevolving
-## @paramfilename
-function mvnsite_filefilter
-{
-  local filename=$1
-
-  if [[ ${BUILDTOOL} = maven ]]; then
-if [[ ${filename} =~ src/site || ${filename} =~ src/main/asciidoc ]]; then
-  yetus_debug "tests/mvnsite: ${filename}"
-  add_test mvnsite
-fi
-  fi
-}
-
 ## This is named so that yetus will check us right after running tests.
 ## Essentially, we check for normal failures and then we look for zombies.
 #function hbase_unit_logfilter



[4/6] hbase git commit: HBASE-20733 QABot should run checkstyle tests if the checkstyle configs change

2018-06-14 Thread busbey
HBASE-20733 QABot should run checkstyle tests if the checkstyle configs change

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b4f463a5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b4f463a5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b4f463a5

Branch: refs/heads/branch-1.4
Commit: b4f463a5cd37fdbcb519effb6153ae06a70c9b7b
Parents: 2a35205
Author: Sean Busbey 
Authored: Thu Jun 14 13:00:08 2018 -0500
Committer: Sean Busbey 
Committed: Thu Jun 14 20:27:04 2018 -0500

--
 dev-support/hbase-personality.sh | 51 +++
 1 file changed, 28 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b4f463a5/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index bae7163..c1bd8e6 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -67,10 +67,12 @@ function personality_globals
 
   # Yetus 0.7.0 enforces limits. Default proclimit is 1000.
   # Up it. See HBASE-19902 for how we arrived at this number.
+  #shellcheck disable=SC2034
   PROCLIMIT=1
 
   # Set docker container to run with 20g. Default is 4g in yetus.
   # See HBASE-19902 for how we arrived at 20g.
+  #shellcheck disable=SC2034
   DOCKERMEMLIMIT=20g
 }
 
@@ -106,7 +108,7 @@ function personality_modules
   local repostatus=$1
   local testtype=$2
   local extra=""
-  local MODULES=(${CHANGED_MODULES[@]})
+  local MODULES=("${CHANGED_MODULES[@]}")
 
   yetus_info "Personality: ${repostatus} ${testtype}"
 
@@ -129,6 +131,11 @@ function personality_modules
 MODULES=(.)
   fi
 
+  # If the checkstyle configs change, check everything.
+  if [[ "${testtype}" == checkstyle ]] && [[ "${MODULES[*]}" =~ 
hbase-checkstyle ]]; then
+MODULES=(.)
+  fi
+
   if [[ ${testtype} == mvninstall ]]; then
 # shellcheck disable=SC2086
 personality_enqueue_module . ${extra}
@@ -187,16 +194,31 @@ function personality_modules
 function personality_file_tests
 {
   local filename=$1
+  yetus_debug "HBase specific personality_file_tests"
   # If the change is to the refguide, then we don't need any builtin yetus 
tests
   # the refguide test (below) will suffice for coverage.
   if [[ ${filename} =~ src/main/asciidoc ]] ||
  [[ ${filename} =~ src/main/xslt ]]; then
 yetus_debug "Skipping builtin yetus checks for ${filename}. refguide test 
should pick it up."
-  # fallback to checking which tests based on what yetus would do by default
-  elif declare -f "${BUILDTOOL}_builtin_personality_file_tests" >/dev/null; 
then
-"${BUILDTOOL}_builtin_personality_file_tests" "${filename}"
-  elif declare -f builtin_personality_file_tests >/dev/null; then
-builtin_personality_file_tests "${filename}"
+  else
+# If we change our asciidoc, rebuild mvnsite
+if [[ ${BUILDTOOL} = maven ]]; then
+  if [[ ${filename} =~ src/site || ${filename} =~ src/main/asciidoc ]]; 
then
+yetus_debug "tests/mvnsite: ${filename}"
+add_test mvnsite
+  fi
+fi
+# If we change checkstyle configs, run checkstyle
+if [[ ${filename} =~ checkstyle.*\.xml ]]; then
+  yetus_debug "tests/checkstyle: ${filename}"
+  add_test checkstyle
+fi
+# fallback to checking which tests based on what yetus would do by default
+if declare -f "${BUILDTOOL}_builtin_personality_file_tests" >/dev/null; 
then
+  "${BUILDTOOL}_builtin_personality_file_tests" "${filename}"
+elif declare -f builtin_personality_file_tests >/dev/null; then
+  builtin_personality_file_tests "${filename}"
+fi
   fi
 }
 
@@ -644,23 +666,6 @@ function hbaseanti_patchfile
   return 0
 }
 
-
-## @description  hbase custom mvnsite file filter.  See HBASE-15042
-## @audience private
-## @stabilityevolving
-## @paramfilename
-function mvnsite_filefilter
-{
-  local filename=$1
-
-  if [[ ${BUILDTOOL} = maven ]]; then
-if [[ ${filename} =~ src/site || ${filename} =~ src/main/asciidoc ]]; then
-  yetus_debug "tests/mvnsite: ${filename}"
-  add_test mvnsite
-fi
-  fi
-}
-
 ## This is named so that yetus will check us right after running tests.
 ## Essentially, we check for normal failures and then we look for zombies.
 #function hbase_unit_logfilter



[6/6] hbase git commit: HBASE-20733 QABot should run checkstyle tests if the checkstyle configs change

2018-06-14 Thread busbey
HBASE-20733 QABot should run checkstyle tests if the checkstyle configs change

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/20772f13
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/20772f13
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/20772f13

Branch: refs/heads/branch-1.2
Commit: 20772f139d7d60ecbd9228f007c35547fa64a196
Parents: 595307f
Author: Sean Busbey 
Authored: Thu Jun 14 13:00:08 2018 -0500
Committer: Sean Busbey 
Committed: Thu Jun 14 20:27:14 2018 -0500

--
 dev-support/hbase-personality.sh | 51 +++
 1 file changed, 28 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/20772f13/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index 059d7c2..1b313f4 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -67,10 +67,12 @@ function personality_globals
 
   # Yetus 0.7.0 enforces limits. Default proclimit is 1000.
   # Up it. See HBASE-19902 for how we arrived at this number.
+  #shellcheck disable=SC2034
   PROCLIMIT=1
 
   # Set docker container to run with 20g. Default is 4g in yetus.
   # See HBASE-19902 for how we arrived at 20g.
+  #shellcheck disable=SC2034
   DOCKERMEMLIMIT=20g
 }
 
@@ -106,7 +108,7 @@ function personality_modules
   local repostatus=$1
   local testtype=$2
   local extra=""
-  local MODULES=(${CHANGED_MODULES[@]})
+  local MODULES=("${CHANGED_MODULES[@]}")
 
   yetus_info "Personality: ${repostatus} ${testtype}"
 
@@ -129,6 +131,11 @@ function personality_modules
 MODULES=(.)
   fi
 
+  # If the checkstyle configs change, check everything.
+  if [[ "${testtype}" == checkstyle ]] && [[ "${MODULES[*]}" =~ 
hbase-checkstyle ]]; then
+MODULES=(.)
+  fi
+
   if [[ ${testtype} == mvninstall ]]; then
 # shellcheck disable=SC2086
 personality_enqueue_module . ${extra}
@@ -183,16 +190,31 @@ function personality_modules
 function personality_file_tests
 {
   local filename=$1
+  yetus_debug "HBase specific personality_file_tests"
   # If the change is to the refguide, then we don't need any builtin yetus 
tests
   # the refguide test (below) will suffice for coverage.
   if [[ ${filename} =~ src/main/asciidoc ]] ||
  [[ ${filename} =~ src/main/xslt ]]; then
 yetus_debug "Skipping builtin yetus checks for ${filename}. refguide test 
should pick it up."
-  # fallback to checking which tests based on what yetus would do by default
-  elif declare -f "${BUILDTOOL}_builtin_personality_file_tests" >/dev/null; 
then
-"${BUILDTOOL}_builtin_personality_file_tests" "${filename}"
-  elif declare -f builtin_personality_file_tests >/dev/null; then
-builtin_personality_file_tests "${filename}"
+  else
+# If we change our asciidoc, rebuild mvnsite
+if [[ ${BUILDTOOL} = maven ]]; then
+  if [[ ${filename} =~ src/site || ${filename} =~ src/main/asciidoc ]]; 
then
+yetus_debug "tests/mvnsite: ${filename}"
+add_test mvnsite
+  fi
+fi
+# If we change checkstyle configs, run checkstyle
+if [[ ${filename} =~ checkstyle.*\.xml ]]; then
+  yetus_debug "tests/checkstyle: ${filename}"
+  add_test checkstyle
+fi
+# fallback to checking which tests based on what yetus would do by default
+if declare -f "${BUILDTOOL}_builtin_personality_file_tests" >/dev/null; 
then
+  "${BUILDTOOL}_builtin_personality_file_tests" "${filename}"
+elif declare -f builtin_personality_file_tests >/dev/null; then
+  builtin_personality_file_tests "${filename}"
+fi
   fi
 }
 
@@ -640,23 +662,6 @@ function hbaseanti_patchfile
   return 0
 }
 
-
-## @description  hbase custom mvnsite file filter.  See HBASE-15042
-## @audience private
-## @stabilityevolving
-## @paramfilename
-function mvnsite_filefilter
-{
-  local filename=$1
-
-  if [[ ${BUILDTOOL} = maven ]]; then
-if [[ ${filename} =~ src/site || ${filename} =~ src/main/asciidoc ]]; then
-  yetus_debug "tests/mvnsite: ${filename}"
-  add_test mvnsite
-fi
-  fi
-}
-
 ## This is named so that yetus will check us right after running tests.
 ## Essentially, we check for normal failures and then we look for zombies.
 #function hbase_unit_logfilter



hbase git commit: HBASE-20733 QABot should run checkstyle tests if the checkstyle configs change

2018-06-14 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/master 0b28155d2 -> 04db90077


HBASE-20733 QABot should run checkstyle tests if the checkstyle configs change

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/04db9007
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/04db9007
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/04db9007

Branch: refs/heads/master
Commit: 04db900772889d70836dbd733f844782fb7adecd
Parents: 0b28155
Author: Sean Busbey 
Authored: Thu Jun 14 13:00:08 2018 -0500
Committer: Sean Busbey 
Committed: Thu Jun 14 20:24:29 2018 -0500

--
 dev-support/hbase-personality.sh | 51 +++
 1 file changed, 28 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/04db9007/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index 2c6e4a8..8004167 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -67,10 +67,12 @@ function personality_globals
 
   # Yetus 0.7.0 enforces limits. Default proclimit is 1000.
   # Up it. See HBASE-19902 for how we arrived at this number.
+  #shellcheck disable=SC2034
   PROCLIMIT=1
 
   # Set docker container to run with 20g. Default is 4g in yetus.
   # See HBASE-19902 for how we arrived at 20g.
+  #shellcheck disable=SC2034
   DOCKERMEMLIMIT=20g
 }
 
@@ -106,7 +108,7 @@ function personality_modules
   local repostatus=$1
   local testtype=$2
   local extra=""
-  local MODULES=(${CHANGED_MODULES[@]})
+  local MODULES=("${CHANGED_MODULES[@]}")
 
   yetus_info "Personality: ${repostatus} ${testtype}"
 
@@ -130,6 +132,11 @@ function personality_modules
 MODULES=(.)
   fi
 
+  # If the checkstyle configs change, check everything.
+  if [[ "${testtype}" == checkstyle ]] && [[ "${MODULES[*]}" =~ 
hbase-checkstyle ]]; then
+MODULES=(.)
+  fi
+
   if [[ ${testtype} == mvninstall ]]; then
 # shellcheck disable=SC2086
 personality_enqueue_module . ${extra}
@@ -188,16 +195,31 @@ function personality_modules
 function personality_file_tests
 {
   local filename=$1
+  yetus_debug "HBase specific personality_file_tests"
   # If the change is to the refguide, then we don't need any builtin yetus 
tests
   # the refguide test (below) will suffice for coverage.
   if [[ ${filename} =~ src/main/asciidoc ]] ||
  [[ ${filename} =~ src/main/xslt ]]; then
 yetus_debug "Skipping builtin yetus checks for ${filename}. refguide test 
should pick it up."
-  # fallback to checking which tests based on what yetus would do by default
-  elif declare -f "${BUILDTOOL}_builtin_personality_file_tests" >/dev/null; 
then
-"${BUILDTOOL}_builtin_personality_file_tests" "${filename}"
-  elif declare -f builtin_personality_file_tests >/dev/null; then
-builtin_personality_file_tests "${filename}"
+  else
+# If we change our asciidoc, rebuild mvnsite
+if [[ ${BUILDTOOL} = maven ]]; then
+  if [[ ${filename} =~ src/site || ${filename} =~ src/main/asciidoc ]]; 
then
+yetus_debug "tests/mvnsite: ${filename}"
+add_test mvnsite
+  fi
+fi
+# If we change checkstyle configs, run checkstyle
+if [[ ${filename} =~ checkstyle.*\.xml ]]; then
+  yetus_debug "tests/checkstyle: ${filename}"
+  add_test checkstyle
+fi
+# fallback to checking which tests based on what yetus would do by default
+if declare -f "${BUILDTOOL}_builtin_personality_file_tests" >/dev/null; 
then
+  "${BUILDTOOL}_builtin_personality_file_tests" "${filename}"
+elif declare -f builtin_personality_file_tests >/dev/null; then
+  builtin_personality_file_tests "${filename}"
+fi
   fi
 }
 
@@ -648,23 +670,6 @@ function hbaseanti_patchfile
   return 0
 }
 
-
-## @description  hbase custom mvnsite file filter.  See HBASE-15042
-## @audience private
-## @stabilityevolving
-## @paramfilename
-function mvnsite_filefilter
-{
-  local filename=$1
-
-  if [[ ${BUILDTOOL} = maven ]]; then
-if [[ ${filename} =~ src/site || ${filename} =~ src/main/asciidoc ]]; then
-  yetus_debug "tests/mvnsite: ${filename}"
-  add_test mvnsite
-fi
-  fi
-}
-
 ## This is named so that yetus will check us right after running tests.
 ## Essentially, we check for normal failures and then we look for zombies.
 #function hbase_unit_logfilter



hbase git commit: HBASE-20733 QABot should run checkstyle tests if the checkstyle configs change

2018-06-14 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/HBASE-20733 [created] 73366c862


HBASE-20733 QABot should run checkstyle tests if the checkstyle configs change


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/73366c86
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/73366c86
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/73366c86

Branch: refs/heads/HBASE-20733
Commit: 73366c8621e8dbe011ed3e3adc16eca61983c2d9
Parents: 0b28155
Author: Sean Busbey 
Authored: Thu Jun 14 13:00:08 2018 -0500
Committer: Sean Busbey 
Committed: Thu Jun 14 19:36:17 2018 -0500

--
 dev-support/hbase-personality.sh | 51 +++
 1 file changed, 28 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/73366c86/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index 2c6e4a8..8004167 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -67,10 +67,12 @@ function personality_globals
 
   # Yetus 0.7.0 enforces limits. Default proclimit is 1000.
   # Up it. See HBASE-19902 for how we arrived at this number.
+  #shellcheck disable=SC2034
   PROCLIMIT=1
 
   # Set docker container to run with 20g. Default is 4g in yetus.
   # See HBASE-19902 for how we arrived at 20g.
+  #shellcheck disable=SC2034
   DOCKERMEMLIMIT=20g
 }
 
@@ -106,7 +108,7 @@ function personality_modules
   local repostatus=$1
   local testtype=$2
   local extra=""
-  local MODULES=(${CHANGED_MODULES[@]})
+  local MODULES=("${CHANGED_MODULES[@]}")
 
   yetus_info "Personality: ${repostatus} ${testtype}"
 
@@ -130,6 +132,11 @@ function personality_modules
 MODULES=(.)
   fi
 
+  # If the checkstyle configs change, check everything.
+  if [[ "${testtype}" == checkstyle ]] && [[ "${MODULES[*]}" =~ 
hbase-checkstyle ]]; then
+MODULES=(.)
+  fi
+
   if [[ ${testtype} == mvninstall ]]; then
 # shellcheck disable=SC2086
 personality_enqueue_module . ${extra}
@@ -188,16 +195,31 @@ function personality_modules
 function personality_file_tests
 {
   local filename=$1
+  yetus_debug "HBase specific personality_file_tests"
   # If the change is to the refguide, then we don't need any builtin yetus 
tests
   # the refguide test (below) will suffice for coverage.
   if [[ ${filename} =~ src/main/asciidoc ]] ||
  [[ ${filename} =~ src/main/xslt ]]; then
 yetus_debug "Skipping builtin yetus checks for ${filename}. refguide test 
should pick it up."
-  # fallback to checking which tests based on what yetus would do by default
-  elif declare -f "${BUILDTOOL}_builtin_personality_file_tests" >/dev/null; 
then
-"${BUILDTOOL}_builtin_personality_file_tests" "${filename}"
-  elif declare -f builtin_personality_file_tests >/dev/null; then
-builtin_personality_file_tests "${filename}"
+  else
+# If we change our asciidoc, rebuild mvnsite
+if [[ ${BUILDTOOL} = maven ]]; then
+  if [[ ${filename} =~ src/site || ${filename} =~ src/main/asciidoc ]]; 
then
+yetus_debug "tests/mvnsite: ${filename}"
+add_test mvnsite
+  fi
+fi
+# If we change checkstyle configs, run checkstyle
+if [[ ${filename} =~ checkstyle.*\.xml ]]; then
+  yetus_debug "tests/checkstyle: ${filename}"
+  add_test checkstyle
+fi
+# fallback to checking which tests based on what yetus would do by default
+if declare -f "${BUILDTOOL}_builtin_personality_file_tests" >/dev/null; 
then
+  "${BUILDTOOL}_builtin_personality_file_tests" "${filename}"
+elif declare -f builtin_personality_file_tests >/dev/null; then
+  builtin_personality_file_tests "${filename}"
+fi
   fi
 }
 
@@ -648,23 +670,6 @@ function hbaseanti_patchfile
   return 0
 }
 
-
-## @description  hbase custom mvnsite file filter.  See HBASE-15042
-## @audience private
-## @stabilityevolving
-## @paramfilename
-function mvnsite_filefilter
-{
-  local filename=$1
-
-  if [[ ${BUILDTOOL} = maven ]]; then
-if [[ ${filename} =~ src/site || ${filename} =~ src/main/asciidoc ]]; then
-  yetus_debug "tests/mvnsite: ${filename}"
-  add_test mvnsite
-fi
-  fi
-}
-
 ## This is named so that yetus will check us right after running tests.
 ## Essentially, we check for normal failures and then we look for zombies.
 #function hbase_unit_logfilter



[27/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.ProgressCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.ProgressCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.ProgressCommand.html
index f236300..513d2ad 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.ProgressCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.ProgressCommand.html
@@ -26,1048 +26,1115 @@
 018
 019package 
org.apache.hadoop.hbase.backup.impl;
 020
-021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
-022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-039
-040import java.io.IOException;
-041import java.net.URI;
-042import java.util.List;
+021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BACKUP_LIST_DESC;
+022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP_DESC;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_LIST;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+039import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+040import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+041import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+042import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 043
-044import 
org.apache.commons.lang3.StringUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.conf.Configured;
-047import org.apache.hadoop.fs.FileSystem;
-048import org.apache.hadoop.fs.Path;
-049import 
org.apache.hadoop.hbase.HBaseConfiguration;
-050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.hadoop.hbase.backup.BackupAdmin;
-052import 
org.apache.hadoop.hbase.backup.BackupInfo;
-053import 

hbase-site git commit: INFRA-10751 Empty commit

2018-06-14 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site e11cf2cba -> 4de5b06b2


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/4de5b06b
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/4de5b06b
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/4de5b06b

Branch: refs/heads/asf-site
Commit: 4de5b06b243c0ebefdeaad2470d771c6fc09f7b7
Parents: e11cf2c
Author: jenkins 
Authored: Thu Jun 14 22:22:39 2018 +
Committer: jenkins 
Committed: Thu Jun 14 22:22:39 2018 +

--

--




[43/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
 
b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
index 100d999..2d56475 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
@@ -191,7 +191,7 @@ extends 
 
 Fields inherited from classorg.apache.hadoop.hbase.master.HMaster
-catalogJanitorChore,
 coprocessorServiceHandlers,
 cpHost,
 DEFAULT_HBASE_MASTER_WAIT_ON_SERVICE_IN_SECONDS,
 HBASE_MASTER_WAIT_ON_SERVICE_IN_SECONDS,
 loadBalancerTracker,
 MASTER,
 masterAddressSyncer,
 metaLocationSyncer,
 metricsMaster,
 regionServerTracker,
 rsFatals,
 serviceStarted,
 snapshotManager
+catalogJanitorChore,
 coprocessorServiceHandlers,
 cpHost,
 DEFAULT_HBASE_MASTER_WAIT_ON_SERVICE_IN_SECONDS,
 HBASE_MASTER_WAIT_ON_SERVICE_IN_SECONDS,
 loadBalancerTracker,
 MASTER,
 masterAddressSyncer,
 metaLocationSyncer,
 metricsMaster,
 rsFatals,
 serviceStarted,
 snapshotManager
 
 
 
@@ -247,7 +247,7 @@ extends 
 
 Methods inherited from classorg.apache.hadoop.hbase.master.HMaster
-abort,
 abortProcedure,
 addColumn,
 addReplicationPeer,
 balance,
 balance,
 balanceSwitch,
 canCreateBaseZNode, canUpdateTableDescriptor,
 checkIfShouldMoveSystemRegionAsync,
 checkInitialized,
 checkServiceStarted,
 checkTableModifiable,
 configureInfoServer,
 constructMaster,
 createMetaBootstrap,
 createNamespace,
 createQuotaSnapshotNotifier,
 createRpcServices,
 createServerManager,
 createSystemTable,
 createTable,
 decommissionRegionServers,
 decorateMasterConfiguration,
 deleteColumn,
 deleteNamespace,
 deleteTable,
 disableReplicationPeer,
 disableTable, enableReplicationPeer,
 enableTable,
 getAssignmentManager,
 getAverageLoad,
 getCatalogJanitor,
 getClientIdAuditPrefix,
 getClusterMetrics,
 getClusterMetrics
 , getClusterMetricsWithoutCoprocessor,
 getClusterMetricsWithoutCoprocessor,
 getClusterSchema,
 getDumpServlet,
 getFavoredNodesManager,
 getHFileCleaner,
 getInitializedEvent,
 getLastMajorCompactionTimestamp, getLastMajorCompactionTimestampForRegion,
 getLoadBalancer,
 getLoadBalancerClassName,
 getLoadedCoprocessors,
 getLockManager,
 getLocks,
 getLogCleaner,
 getMasterActiveTime,
 getMasterCoprocessorHost,
 getMasterCoprocessors,
 getMasterFileSystem,
 getMasterFinishedInitializationTime,
 getMasterMetrics,
 getMasterProcedureExecutor,
 getMasterProcedureManagerHost,
 getMasterQuotaManage
 r, getMasterRpcServices,
 getMasterStartTime,
 getMasterWalManager,
 getMergePlanCount,
 getMetaTableObserver,
 getMobCompactionState,
 getNamespace,
 getNamespaces,
 getNumWALFiles, getProcedures,
 getProcessName,
 getQuotaObserverChore,
 getRegionNormalizer,
 getRegionNormalizerTracker,
 getRegionServerFatalLogBuffer,
 getRegionServerInfoPort,
 getRegionServerVersion,
 getRemoteInetAddress,
 getReplicationPeerConfig,
 getReplicationPeerManager,
 getServerCrashProcessingEnabledEvent,
 getServerManager,
 getServerName,
 getSnapshotManager,
 getSnapshotQuotaObserverChore,
 getSpaceQuotaSnapshotNotifier,
 getSplitOrMergeTracker,
 getSplitPlanCount,
 getTableDescriptors,
 getTableRegionForRow,
 getTableStateManager,
 getUseThisHostnameInstead, getWalProcedureStore,
 getZooKeeper,
 initClusterSchemaService,
 initializeZKBasedSystemTrackers,
 initQuotaManager,
 isActiveMaster,
 isBalancerOn,
 isCatalogJanitorEnabled,
 isCleanerChoreEnabled,
 isInitialized,
 isInMaintenanceMode,
 isNormalizerOn,
 isServerCrashProcessingEnabled,
 isSplitOrMergeEnabled,
 listDecommissionedRegionServers,
 listReplicationPeers,
 <
 a 
href="../../../../../org/apache/hadoop/hbase/master/HMaster.html#listTableDescriptors-java.lang.String-java.lang.String-java.util.List-boolean-">listTableDescriptors,
 listTableDescriptorsByNamespace,
 listTableNames,
 listTableNamesByNamespace,
 login,
 main,
 mergeRegions, modifyColumn,
 modifyNamespace,
 modifyTable,
 move,
 normalizeRegions,
 recommissionRegionServer,
 recoverMeta, 
registerService,
 remoteProcedureCompleted,
 remoteProcedureFailed,
 removeReplicationPeer,
 reportMobCompactionEnd,
 reportMobCompactionStart,
 requestMobCompaction,
 restoreSnapshot,
 setCatalogJanitorEnabled,
 setInitialized,
 setServerCrashProcessingEnabled,
 shutdown,
 splitR
 egion, stop,
 stopMaster,
 stopServiceThreads,
 truncateTable,
 updateConfigurationForSpaceQuotaObserver,
 updateReplicationPeerConfig,
 waitForMasterActive
+abort,
 abortProcedure,
 addColumn,
 addReplicationPeer,
 balance,
 balance,
 balanceSwitch,
 canCreateBaseZNode, canUpdateTableDescriptor,
 checkIfShouldMoveSystemRegionAsync,
 checkInitialized,
 checkServiceStarted,
 checkTableModifiable,
 configureInfoServer,
 constructMaster,
 createMetaBootstrap,
 

[28/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.MergeCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.MergeCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.MergeCommand.html
index f236300..513d2ad 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.MergeCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.MergeCommand.html
@@ -26,1048 +26,1115 @@
 018
 019package 
org.apache.hadoop.hbase.backup.impl;
 020
-021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
-022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-039
-040import java.io.IOException;
-041import java.net.URI;
-042import java.util.List;
+021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BACKUP_LIST_DESC;
+022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP_DESC;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_LIST;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+039import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+040import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+041import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+042import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 043
-044import 
org.apache.commons.lang3.StringUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.conf.Configured;
-047import org.apache.hadoop.fs.FileSystem;
-048import org.apache.hadoop.fs.Path;
-049import 
org.apache.hadoop.hbase.HBaseConfiguration;
-050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.hadoop.hbase.backup.BackupAdmin;
-052import 
org.apache.hadoop.hbase.backup.BackupInfo;
-053import 

[01/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 436b0b15e -> e11cf2cba


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/downloads.html
--
diff --git a/downloads.html b/downloads.html
index f1c834c..5ba20c9 100644
--- a/downloads.html
+++ b/downloads.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Apache HBase Downloads
 
@@ -366,7 +366,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-06-13
+  Last Published: 
2018-06-14
 
 
 



[41/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
index eb945fb..ad40e69 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
@@ -197,8 +197,8 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.Type
 org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.LocalityType
+org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.Type
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterServices.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterServices.html 
b/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterServices.html
index 76ca590..559c744 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterServices.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/class-use/MasterServices.html
@@ -382,9 +382,10 @@
 TableNamespaceManager.createNamespaceTable(MasterServicesmasterServices)
 
 
-(package private) ServerManager
+protected ServerManager
 HMaster.createServerManager(MasterServicesmaster)
-Create a ServerManager 
instance.
+
+ Create a ServerManager 
instance.
 
 
 
@@ -450,19 +451,15 @@
 
 
 
-ServerManager(MasterServicesmaster,
- booleanconnect)
-
-
 SplitLogManager(MasterServicesmaster,
org.apache.hadoop.conf.Configurationconf)
 Its OK to construct this object even when region-servers 
are not online.
 
 
-
+
 TableNamespaceManager(MasterServicesmasterServices)
 
-
+
 TableStateManager(MasterServicesmaster)
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionServerTracker.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionServerTracker.html 
b/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionServerTracker.html
index 2a8bf6c..e51f116 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionServerTracker.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/class-use/RegionServerTracker.html
@@ -103,7 +103,7 @@
 
 
 
-(package private) RegionServerTracker
+private RegionServerTracker
 HMaster.regionServerTracker
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/org/apache/hadoop/hbase/master/class-use/ServerManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/class-use/ServerManager.html 
b/devapidocs/org/apache/hadoop/hbase/master/class-use/ServerManager.html
index 7d1b3ed..db31e5d 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/class-use/ServerManager.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/class-use/ServerManager.html
@@ -128,9 +128,10 @@
 
 
 
-(package private) ServerManager
+protected ServerManager
 HMaster.createServerManager(MasterServicesmaster)
-Create a ServerManager 
instance.
+
+ Create a ServerManager 
instance.
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/org/apache/hadoop/hbase/master/package-summary.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/package-summary.html 
b/devapidocs/org/apache/hadoop/hbase/master/package-summary.html
index 81462ad..46e06a9 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/package-summary.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/package-summary.html
@@ -477,7 +477,8 @@
 
 RegionServerTracker
 
-Tracks the online region servers via ZK.
+
+ Tracks the online region servers via ZK.
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
index 23075b7..e37f9f8 100644
--- 

[39/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.NoneCompressor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.NoneCompressor.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.NoneCompressor.html
new file mode 100644
index 000..8b4cbf7
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.NoneCompressor.html
@@ -0,0 +1,287 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+WALCellCodec.NoneCompressor (Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = {"i0":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.regionserver.wal
+Class 
WALCellCodec.NoneCompressor
+
+
+
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.regionserver.wal.WALCellCodec.NoneCompressor
+
+
+
+
+
+
+
+All Implemented Interfaces:
+WALCellCodec.ByteStringCompressor
+
+
+Enclosing class:
+WALCellCodec
+
+
+
+static class WALCellCodec.NoneCompressor
+extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+implements WALCellCodec.ByteStringCompressor
+
+
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+NoneCompressor()
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsInstance MethodsConcrete Methods
+
+Modifier and Type
+Method and Description
+
+
+org.apache.hbase.thirdparty.com.google.protobuf.ByteString
+compress(byte[]data,
+https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in 
java.lang">EnumdictIndex)
+
+
+
+
+
+
+Methods inherited from classjava.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, https://docs.oracle.com/javase/8/docs/api/ja
 va/lang/Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--;
 title="class or interface in java.lang">toString, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Constructor Detail
+
+
+
+
+
+NoneCompressor
+NoneCompressor()
+
+
+
+
+
+
+
+
+
+Method Detail
+
+
+
+
+
+compress
+publicorg.apache.hbase.thirdparty.com.google.protobuf.ByteStringcompress(byte[]data,
+   https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or 

[35/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
index f236300..513d2ad 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
@@ -26,1048 +26,1115 @@
 018
 019package 
org.apache.hadoop.hbase.backup.impl;
 020
-021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
-022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-039
-040import java.io.IOException;
-041import java.net.URI;
-042import java.util.List;
+021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BACKUP_LIST_DESC;
+022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP_DESC;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_LIST;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+039import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+040import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+041import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+042import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 043
-044import 
org.apache.commons.lang3.StringUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.conf.Configured;
-047import org.apache.hadoop.fs.FileSystem;
-048import org.apache.hadoop.fs.Path;
-049import 
org.apache.hadoop.hbase.HBaseConfiguration;
-050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.hadoop.hbase.backup.BackupAdmin;
-052import 
org.apache.hadoop.hbase.backup.BackupInfo;
-053import 

[50/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index edb1ed4..74c8741 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -284,7 +284,7 @@
 3640
 0
 0
-16197
+16190
 
 Files
 
@@ -914,50 +914,60 @@
 0
 2
 
-org/apache/hadoop/hbase/backup/FailedArchiveException.java
+org/apache/hadoop/hbase/backup/BackupDriver.java
 0
 0
 1
 
+org/apache/hadoop/hbase/backup/FailedArchiveException.java
+0
+0
+1
+
 org/apache/hadoop/hbase/backup/HFileArchiver.java
 0
 0
 13
-
+
 org/apache/hadoop/hbase/backup/TestBackupMerge.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/backup/TestHFileArchiving.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/backup/example/TableHFileArchiveTracker.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/backup/example/ZKTableArchiveClient.java
 0
 0
 2
+
+org/apache/hadoop/hbase/backup/impl/BackupCommands.java
+0
+0
+1
 
 org/apache/hadoop/hbase/backup/impl/BackupManager.java
 0
@@ -4769,5216 +4779,5211 @@
 0
 29
 
-org/apache/hadoop/hbase/master/RegionServerTracker.java
-0
-0
-4
-
 org/apache/hadoop/hbase/master/RegionState.java
 0
 0
 86
-
+
 org/apache/hadoop/hbase/master/ServerListener.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/master/ServerManager.java
 0
 0
-24
-
+22
+
 org/apache/hadoop/hbase/master/SnapshotOfRegionAssignmentFromMeta.java
 0
 0
 13
-
+
 org/apache/hadoop/hbase/master/SnapshotSentinel.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/master/SplitLogManager.java
 0
 0
 17
-
+
 org/apache/hadoop/hbase/master/SplitOrMergeTracker.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/master/TableNamespaceManager.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/master/TableStateManager.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/master/TestActiveMasterManager.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/master/TestCatalogJanitor.java
 0
 0
 13
-
+
 org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/master/TestHMasterCommandLine.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/master/TestMaster.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/master/TestMasterFailoverBalancerPersistence.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/master/TestMasterFileSystem.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/master/TestMasterFileSystemWithWALDir.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/master/TestMasterMetrics.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/master/TestMasterMetricsWrapper.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/master/TestMasterNoCluster.java
 0
 0
 20
-
+
 org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/master/TestMasterStatusServlet.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/master/TestMasterTransitions.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/master/TestMirroringTableStateManager.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/master/TestRegionPlacement.java
 0
 0
 20
-
+
 org/apache/hadoop/hbase/master/TestRegionPlacement2.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/master/TestRollingRestart.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/master/TestSplitLogManager.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/master/TestTableStateManager.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/master/TestWarmupRegion.java
 0
 0
 19
-
+
 org/apache/hadoop/hbase/master/assignment/AssignProcedure.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/master/assignment/AssignmentManager.java
 0
 0
 32
-
+
 org/apache/hadoop/hbase/master/assignment/GCMergedRegionsProcedure.java
 0
 0
 16
-
+
 org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.java
 0
 0
 32
-
+
 org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/master/assignment/RegionStateStore.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/master/assignment/RegionStates.java
 0
 0
 26
-
+
 org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/master/assignment/TestAssignmentOnRSCrash.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/master/assignment/TestRegionStates.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java
 0
 0
 2
-
+
 

[30/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
index f236300..513d2ad 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
@@ -26,1048 +26,1115 @@
 018
 019package 
org.apache.hadoop.hbase.backup.impl;
 020
-021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
-022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-039
-040import java.io.IOException;
-041import java.net.URI;
-042import java.util.List;
+021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BACKUP_LIST_DESC;
+022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP_DESC;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_LIST;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+039import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+040import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+041import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+042import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 043
-044import 
org.apache.commons.lang3.StringUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.conf.Configured;
-047import org.apache.hadoop.fs.FileSystem;
-048import org.apache.hadoop.fs.Path;
-049import 
org.apache.hadoop.hbase.HBaseConfiguration;
-050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.hadoop.hbase.backup.BackupAdmin;
-052import 
org.apache.hadoop.hbase.backup.BackupInfo;
-053import 

[26/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.RepairCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.RepairCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.RepairCommand.html
index f236300..513d2ad 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.RepairCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.RepairCommand.html
@@ -26,1048 +26,1115 @@
 018
 019package 
org.apache.hadoop.hbase.backup.impl;
 020
-021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
-022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-039
-040import java.io.IOException;
-041import java.net.URI;
-042import java.util.List;
+021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BACKUP_LIST_DESC;
+022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP_DESC;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_LIST;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+039import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+040import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+041import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+042import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 043
-044import 
org.apache.commons.lang3.StringUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.conf.Configured;
-047import org.apache.hadoop.fs.FileSystem;
-048import org.apache.hadoop.fs.Path;
-049import 
org.apache.hadoop.hbase.HBaseConfiguration;
-050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.hadoop.hbase.backup.BackupAdmin;
-052import 
org.apache.hadoop.hbase.backup.BackupInfo;
-053import 

[36/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupDriver.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupDriver.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupDriver.html
index 65c1a1f..2210003 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupDriver.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupDriver.html
@@ -25,201 +25,206 @@
 017 */
 018package org.apache.hadoop.hbase.backup;
 019
-020import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
-021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-036
-037import java.io.IOException;
-038import java.net.URI;
-039
-040import 
org.apache.hadoop.conf.Configuration;
-041import org.apache.hadoop.fs.Path;
-042import 
org.apache.hadoop.hbase.HBaseConfiguration;
-043import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand;
-044import 
org.apache.hadoop.hbase.backup.impl.BackupCommands;
-045import 
org.apache.hadoop.hbase.backup.impl.BackupManager;
-046import 
org.apache.hadoop.hbase.util.AbstractHBaseTool;
-047import 
org.apache.hadoop.hbase.util.FSUtils;
-048import 
org.apache.hadoop.util.ToolRunner;
-049import org.apache.log4j.Level;
-050import org.apache.log4j.LogManager;
-051import 
org.apache.yetus.audience.InterfaceAudience;
-052import org.slf4j.Logger;
-053import org.slf4j.LoggerFactory;
-054
-055import 
org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
-056
-057/**
-058 *
-059 * Command-line entry point for backup 
operation
-060 *
-061 */
-062@InterfaceAudience.Private
-063public class BackupDriver extends 
AbstractHBaseTool {
-064
-065  private static final Logger LOG = 
LoggerFactory.getLogger(BackupDriver.class);
-066  private CommandLine cmd;
+020import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BACKUP_LIST_DESC;
+021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
+022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP_DESC;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_LIST;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+039import static 

[16/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.WALHdrContext.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.WALHdrContext.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.WALHdrContext.html
index fdc5a8a..62e604e 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.WALHdrContext.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.WALHdrContext.html
@@ -320,141 +320,143 @@
 312this.cellDecoder = 
codec.getDecoder(this.inputStream);
 313if (this.hasCompression) {
 314  this.byteStringUncompressor = 
codec.getByteStringUncompressor();
-315}
-316  }
-317
-318  @Override
-319  protected boolean hasCompression() {
-320return this.hasCompression;
-321  }
-322
-323  @Override
-324  protected boolean hasTagCompression() 
{
-325return this.hasTagCompression;
-326  }
-327
-328  @Override
-329  protected boolean readNext(Entry entry) 
throws IOException {
-330while (true) {
-331  // OriginalPosition might be  0 
on local fs; if so, it is useless to us.
-332  long originalPosition = 
this.inputStream.getPos();
-333  if (trailerPresent  
originalPosition  0  originalPosition == this.walEditsStopOffset) 
{
-334if (LOG.isTraceEnabled()) {
-335  LOG.trace("Reached end of 
expected edits area at offset " + originalPosition);
-336}
-337return false;
-338  }
-339  WALKey.Builder builder = 
WALKey.newBuilder();
-340  long size = 0;
-341  try {
-342long available = -1;
-343try {
-344  int firstByte = 
this.inputStream.read();
-345  if (firstByte == -1) {
-346throw new EOFException("First 
byte is negative at offset " + originalPosition);
-347  }
-348  size = 
CodedInputStream.readRawVarint32(firstByte, this.inputStream);
-349  // available may be  0 on 
local fs for instance.  If so, can't depend on it.
-350  available = 
this.inputStream.available();
-351  if (available  0  
available  size) {
-352throw new 
EOFException("Available stream not enough for edit, " +
-353"inputStream.available()= 
" + this.inputStream.available() + ", " +
-354"entry size= " + size + " 
at offset = " + this.inputStream.getPos());
-355  }
-356  ProtobufUtil.mergeFrom(builder, 
ByteStreams.limit(this.inputStream, size),
-357(int)size);
-358} catch 
(InvalidProtocolBufferException ipbe) {
-359  throw (EOFException) new 
EOFException("Invalid PB, EOF? Ignoring; originalPosition=" +
-360originalPosition + ", 
currentPosition=" + this.inputStream.getPos() +
-361", messageSize=" + size + ", 
currentAvailable=" + available).initCause(ipbe);
-362}
-363if (!builder.isInitialized()) {
-364  // TODO: not clear if we should 
try to recover from corrupt PB that looks semi-legit.
-365  //   If we can get the KV 
count, we could, theoretically, try to get next record.
-366  throw new EOFException("Partial 
PB while reading WAL, " +
-367  "probably an unexpected 
EOF, ignoring. current offset=" + this.inputStream.getPos());
-368}
-369WALKey walKey = 
builder.build();
-370
entry.getKey().readFieldsFromPb(walKey, this.byteStringUncompressor);
-371if (!walKey.hasFollowingKvCount() 
|| 0 == walKey.getFollowingKvCount()) {
-372  if (LOG.isTraceEnabled()) {
-373LOG.trace("WALKey has no KVs 
that follow it; trying the next one. current offset=" +
-374
this.inputStream.getPos());
-375  }
-376  continue;
-377}
-378int expectedCells = 
walKey.getFollowingKvCount();
-379long posBefore = 
this.inputStream.getPos();
-380try {
-381  int actualCells = 
entry.getEdit().readFromCells(cellDecoder, expectedCells);
-382  if (expectedCells != 
actualCells) {
-383throw new EOFException("Only 
read " + actualCells); // other info added in catch
-384  }
-385} catch (Exception ex) {
-386  String posAfterStr = 
"unknown";
-387  try {
-388posAfterStr = 
this.inputStream.getPos() + "";
-389  } catch (Throwable t) {
-390if (LOG.isTraceEnabled()) {
-391  LOG.trace("Error getting 
pos for error message - ignoring", t);
-392}
-393  }
-394  String message = " while 
reading " + expectedCells + " WAL KVs; started reading at "
-395  + posBefore + " and read up 
to " + posAfterStr;
-396  IOException realEofEx = 
extractHiddenEof(ex);
-397  throw (EOFException) new 

[15/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.html
index 64099d9..2cafba9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.html
@@ -55,82 +55,81 @@
 047
 048  @Override
 049  public void append(Entry entry) throws 
IOException {
-050
entry.setCompressionContext(compressionContext);
-051
entry.getKey().getBuilder(compressor).
-052
setFollowingKvCount(entry.getEdit().size()).build().writeDelimitedTo(output);
-053for (Cell cell : 
entry.getEdit().getCells()) {
-054  // cellEncoder must assume little 
about the stream, since we write PB and cells in turn.
-055  cellEncoder.write(cell);
-056}
-057length.set(output.getPos());
-058  }
-059
-060  @Override
-061  public void close() throws IOException 
{
-062if (this.output != null) {
-063  try {
-064if (!trailerWritten) 
writeWALTrailer();
-065this.output.close();
-066  } catch (NullPointerException npe) 
{
-067// Can get a NPE coming up from 
down in DFSClient$DFSOutputStream#close
-068LOG.warn(npe.toString(), npe);
-069  }
-070  this.output = null;
-071}
-072  }
-073
-074  @Override
-075  public void sync(boolean forceSync) 
throws IOException {
-076FSDataOutputStream fsdos = 
this.output;
-077if (fsdos == null) return; // Presume 
closed
-078fsdos.flush();
-079if (forceSync) {
-080  fsdos.hsync();
-081} else {
-082  fsdos.hflush();
-083}
-084  }
-085
-086  public FSDataOutputStream getStream() 
{
-087return this.output;
-088  }
-089
-090  @SuppressWarnings("deprecation")
-091  @Override
-092  protected void initOutput(FileSystem 
fs, Path path, boolean overwritable, int bufferSize,
-093  short replication, long blockSize) 
throws IOException, StreamLacksCapabilityException {
-094this.output = 
fs.createNonRecursive(path, overwritable, bufferSize, replication, blockSize,
-095  null);
-096if 
(fs.getConf().getBoolean(CommonFSUtils.UNSAFE_STREAM_CAPABILITY_ENFORCE, true)) 
{
-097  if 
(!CommonFSUtils.hasCapability(output, "hflush")) {
-098throw new 
StreamLacksCapabilityException("hflush");
-099  }
-100  if 
(!CommonFSUtils.hasCapability(output, "hsync")) {
-101throw new 
StreamLacksCapabilityException("hsync");
-102  }
-103}
-104  }
-105
-106  @Override
-107  protected long 
writeMagicAndWALHeader(byte[] magic, WALHeader header) throws IOException {
-108output.write(magic);
-109header.writeDelimitedTo(output);
-110return output.getPos();
-111  }
-112
-113  @Override
-114  protected OutputStream 
getOutputStreamForCellEncoder() {
-115return this.output;
-116  }
-117
-118  @Override
-119  protected long 
writeWALTrailerAndMagic(WALTrailer trailer, byte[] magic) throws IOException 
{
-120trailer.writeTo(output);
-121
output.writeInt(trailer.getSerializedSize());
-122output.write(magic);
-123return output.getPos();
-124  }
-125}
+050
entry.getKey().getBuilder(compressor).
+051
setFollowingKvCount(entry.getEdit().size()).build().writeDelimitedTo(output);
+052for (Cell cell : 
entry.getEdit().getCells()) {
+053  // cellEncoder must assume little 
about the stream, since we write PB and cells in turn.
+054  cellEncoder.write(cell);
+055}
+056length.set(output.getPos());
+057  }
+058
+059  @Override
+060  public void close() throws IOException 
{
+061if (this.output != null) {
+062  try {
+063if (!trailerWritten) 
writeWALTrailer();
+064this.output.close();
+065  } catch (NullPointerException npe) 
{
+066// Can get a NPE coming up from 
down in DFSClient$DFSOutputStream#close
+067LOG.warn(npe.toString(), npe);
+068  }
+069  this.output = null;
+070}
+071  }
+072
+073  @Override
+074  public void sync(boolean forceSync) 
throws IOException {
+075FSDataOutputStream fsdos = 
this.output;
+076if (fsdos == null) return; // Presume 
closed
+077fsdos.flush();
+078if (forceSync) {
+079  fsdos.hsync();
+080} else {
+081  fsdos.hflush();
+082}
+083  }
+084
+085  public FSDataOutputStream getStream() 
{
+086return this.output;
+087  }
+088
+089  @SuppressWarnings("deprecation")
+090  @Override
+091  protected void initOutput(FileSystem 
fs, Path path, boolean overwritable, int bufferSize,
+092  short replication, long blockSize) 
throws IOException, StreamLacksCapabilityException {
+093this.output = 
fs.createNonRecursive(path, overwritable, bufferSize, 

[46/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
 
b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
index 96d3860..bddfe7e 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = {"i0":10,"i1":10,"i2":10};
+var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class BackupCommands.DeleteCommand
+public static class BackupCommands.DeleteCommand
 extends BackupCommands.Command
 
 
@@ -181,14 +181,26 @@ extends Method and Description
 
 
+private https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]
+convertToBackupIds(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListBackupInfohistory)
+
+
 void
 execute()
 
-
+
+private void
+executeDeleteListOfBackups(org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLinecmdline)
+
+
+private void
+executeDeleteOlderThan(org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLinecmdline)
+
+
 protected void
 printUsage()
 
-
+
 protected boolean
 requiresNoActiveSession()
 The command can't be run if active backup session is in 
progress
@@ -236,7 +248,7 @@ extends 
 
 DeleteCommand
-DeleteCommand(org.apache.hadoop.conf.Configurationconf,
+DeleteCommand(org.apache.hadoop.conf.Configurationconf,
   
org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLinecmdline)
 
 
@@ -254,7 +266,7 @@ extends 
 
 requiresNoActiveSession
-protectedbooleanrequiresNoActiveSession()
+protectedbooleanrequiresNoActiveSession()
 Description copied from 
class:BackupCommands.Command
 The command can't be run if active backup session is in 
progress
 
@@ -271,7 +283,7 @@ extends 
 
 execute
-publicvoidexecute()
+publicvoidexecute()
  throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Overrides:
@@ -281,13 +293,50 @@ extends 
+
+
+
+
+executeDeleteOlderThan
+privatevoidexecuteDeleteOlderThan(org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLinecmdline)
+ throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+
+Throws:
+https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+
+
+
+
+
+
+
+
+convertToBackupIds
+privatehttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]convertToBackupIds(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListBackupInfohistory)
+
+
+
+
+
+
+
+executeDeleteListOfBackups
+privatevoidexecuteDeleteListOfBackups(org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLinecmdline)
+ throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+
+Throws:
+https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+
+
+
 
 
 
 
 
 printUsage
-protectedvoidprintUsage()
+protectedvoidprintUsage()
 
 Specified by:
 printUsagein
 classBackupCommands.Command

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.DescribeCommand.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.DescribeCommand.html
 
b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.DescribeCommand.html
index dcfda4e..c150e20 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.DescribeCommand.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.DescribeCommand.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class BackupCommands.DescribeCommand
+public static class BackupCommands.DescribeCommand
 extends BackupCommands.Command
 
 
@@ -230,7 +230,7 @@ extends 
 
 DescribeCommand

[32/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
index f236300..513d2ad 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
@@ -26,1048 +26,1115 @@
 018
 019package 
org.apache.hadoop.hbase.backup.impl;
 020
-021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
-022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-039
-040import java.io.IOException;
-041import java.net.URI;
-042import java.util.List;
+021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BACKUP_LIST_DESC;
+022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP_DESC;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_LIST;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+039import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+040import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+041import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+042import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 043
-044import 
org.apache.commons.lang3.StringUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.conf.Configured;
-047import org.apache.hadoop.fs.FileSystem;
-048import org.apache.hadoop.fs.Path;
-049import 
org.apache.hadoop.hbase.HBaseConfiguration;
-050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.hadoop.hbase.backup.BackupAdmin;
-052import 
org.apache.hadoop.hbase.backup.BackupInfo;
-053import 

[12/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.ByteStringUncompressor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.ByteStringUncompressor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.ByteStringUncompressor.html
index 83c17c0..9df0225 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.ByteStringUncompressor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.ByteStringUncompressor.html
@@ -54,323 +54,362 @@
 046import org.apache.hadoop.io.IOUtils;
 047
 048import 
org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
-049
+049import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
 050
-051/**
-052 * Compression in this class is lifted 
off Compressor/KeyValueCompression.
-053 * This is a pure coincidence... they are 
independent and don't have to be compatible.
-054 *
-055 * This codec is used at server side for 
writing cells to WAL as well as for sending edits
-056 * as part of the distributed splitting 
process.
-057 */
-058@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
-059  HBaseInterfaceAudience.PHOENIX, 
HBaseInterfaceAudience.CONFIG})
-060public class WALCellCodec implements 
Codec {
-061  /** Configuration key for the class to 
use when encoding cells in the WAL */
-062  public static final String 
WAL_CELL_CODEC_CLASS_KEY = "hbase.regionserver.wal.codec";
-063
-064  protected final CompressionContext 
compression;
-065  protected final ByteStringUncompressor 
statelessUncompressor = new ByteStringUncompressor() {
-066@Override
-067public byte[] uncompress(ByteString 
data, Dictionary dict) throws IOException {
-068  return 
WALCellCodec.uncompressByteString(data, dict);
-069}
-070  };
-071
-072  /**
-073   * bAll subclasses must 
implement a no argument constructor/b
-074   */
-075  public WALCellCodec() {
-076this.compression = null;
-077  }
-078
-079  /**
-080   * Default constructor - ball 
subclasses must implement a constructor with this signature /b
-081   * if they are to be dynamically loaded 
from the {@link Configuration}.
-082   * @param conf configuration to 
configure ttthis/tt
-083   * @param compression compression the 
codec should support, can be ttnull/tt to indicate no
-084   *  compression
-085   */
-086  public WALCellCodec(Configuration conf, 
CompressionContext compression) {
-087this.compression = compression;
-088  }
-089
-090  public static String 
getWALCellCodecClass(Configuration conf) {
-091return 
conf.get(WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());
-092  }
-093
-094  /**
-095   * Create and setup a {@link 
WALCellCodec} from the {@code cellCodecClsName} and
-096   * CompressionContext, if {@code 
cellCodecClsName} is specified.
-097   * Otherwise Cell Codec classname is 
read from {@link Configuration}.
-098   * Fully prepares the codec for use.
-099   * @param conf {@link Configuration} to 
read for the user-specified codec. If none is specified,
-100   *  uses a {@link 
WALCellCodec}.
-101   * @param cellCodecClsName name of 
codec
-102   * @param compression compression the 
codec should use
-103   * @return a {@link WALCellCodec} ready 
for use.
-104   * @throws 
UnsupportedOperationException if the codec cannot be instantiated
-105   */
-106
-107  public static WALCellCodec 
create(Configuration conf, String cellCodecClsName,
-108  CompressionContext compression) 
throws UnsupportedOperationException {
-109if (cellCodecClsName == null) {
-110  cellCodecClsName = 
getWALCellCodecClass(conf);
-111}
-112return 
ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
-113{ Configuration.class, 
CompressionContext.class }, new Object[] { conf, compression });
-114  }
-115
-116  /**
-117   * Create and setup a {@link 
WALCellCodec} from the
-118   * CompressionContext.
-119   * Cell Codec classname is read from 
{@link Configuration}.
-120   * Fully prepares the codec for use.
-121   * @param conf {@link Configuration} to 
read for the user-specified codec. If none is specified,
-122   *  uses a {@link 
WALCellCodec}.
-123   * @param compression compression the 
codec should use
-124   * @return a {@link WALCellCodec} ready 
for use.
-125   * @throws 
UnsupportedOperationException if the codec cannot be instantiated
-126   */
-127  public static WALCellCodec 
create(Configuration conf,
-128  CompressionContext compression) 
throws UnsupportedOperationException {
-129String cellCodecClsName = 
getWALCellCodecClass(conf);
-130return 
ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
-131{ Configuration.class, 
CompressionContext.class }, new Object[] { conf, 

[05/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/wal/WAL.Entry.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WAL.Entry.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WAL.Entry.html
index e31f5c6..f4d1eb0 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WAL.Entry.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WAL.Entry.html
@@ -31,277 +31,266 @@
 023import java.util.Set;
 024import 
org.apache.hadoop.hbase.HConstants;
 025import 
org.apache.hadoop.hbase.client.RegionInfo;
-026import 
org.apache.hadoop.hbase.regionserver.wal.CompressionContext;
-027import 
org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
-028import 
org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
-029import 
org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost;
-030import 
org.apache.hadoop.hbase.replication.regionserver.WALFileLengthProvider;
-031import 
org.apache.yetus.audience.InterfaceAudience;
-032import 
org.apache.yetus.audience.InterfaceStability;
-033
-034import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-035
-036/**
-037 * A Write Ahead Log (WAL) provides 
service for reading, writing waledits. This interface provides
-038 * APIs for WAL users (such as 
RegionServer) to use the WAL (do append, sync, etc).
-039 *
-040 * Note that some internals, such as log 
rolling and performance evaluation tools, will use
-041 * WAL.equals to determine if they have 
already seen a given WAL.
-042 */
-043@InterfaceAudience.Private
-044@InterfaceStability.Evolving
-045public interface WAL extends Closeable, 
WALFileLengthProvider {
-046
-047  /**
-048   * Registers WALActionsListener
-049   */
-050  void registerWALActionsListener(final 
WALActionsListener listener);
-051
-052  /**
-053   * Unregisters WALActionsListener
-054   */
-055  boolean 
unregisterWALActionsListener(final WALActionsListener listener);
-056
-057  /**
-058   * Roll the log writer. That is, start 
writing log messages to a new file.
-059   *
-060   * p
-061   * The implementation is synchronized 
in order to make sure there's one rollWriter
-062   * running at any given time.
-063   *
-064   * @return If lots of logs, flush the 
returned regions so next time through we
-065   * can clean logs. Returns null 
if nothing to flush. Names are actual
-066   * region names as returned by 
{@link RegionInfo#getEncodedName()}
-067   */
-068  byte[][] rollWriter() throws 
FailedLogCloseException, IOException;
-069
-070  /**
-071   * Roll the log writer. That is, start 
writing log messages to a new file.
-072   *
-073   * p
-074   * The implementation is synchronized 
in order to make sure there's one rollWriter
-075   * running at any given time.
-076   *
-077   * @param force
-078   *  If true, force creation of 
a new writer even if no entries have
-079   *  been written to the current 
writer
-080   * @return If lots of logs, flush the 
returned regions so next time through we
-081   * can clean logs. Returns null 
if nothing to flush. Names are actual
-082   * region names as returned by 
{@link RegionInfo#getEncodedName()}
-083   */
-084  byte[][] rollWriter(boolean force) 
throws FailedLogCloseException, IOException;
-085
-086  /**
-087   * Stop accepting new writes. If we 
have unsynced writes still in buffer, sync them.
-088   * Extant edits are left in place in 
backing storage to be replayed later.
-089   */
-090  void shutdown() throws IOException;
-091
-092  /**
-093   * Caller no longer needs any edits 
from this WAL. Implementers are free to reclaim
-094   * underlying resources after this 
call; i.e. filesystem based WALs can archive or
-095   * delete files.
-096   */
-097  @Override
-098  void close() throws IOException;
-099
-100  /**
-101   * Append a set of edits to the WAL. 
The WAL is not flushed/sync'd after this transaction
-102   * completes BUT on return this edit 
must have its region edit/sequence id assigned
-103   * else it messes up our unification of 
mvcc and sequenceid.  On return codekey/code will
-104   * have the region edit/sequence id 
filled in.
-105   * @param info the regioninfo 
associated with append
-106   * @param key Modified by this call; we 
add to it this edits region edit/sequence id.
-107   * @param edits Edits to append. MAY 
CONTAIN NO EDITS for case where we want to get an edit
-108   * sequence id that is after all 
currently appended edits.
-109   * @param inMemstore Always true except 
for case where we are writing a compaction completion
-110   * record into the WAL; in this case 
the entry is just so we can finish an unfinished compaction
-111   * -- it is not an edit for memstore.
-112   * @return Returns a 'transaction id' 
and codekey/code will have the region edit/sequence id
-113   * in it.
-114   */
-115  long 

[37/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/org/apache/hadoop/hbase/wal/WAL.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/WAL.html 
b/devapidocs/org/apache/hadoop/hbase/wal/WAL.html
index aa07f0a..f0637de 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/WAL.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/WAL.html
@@ -111,7 +111,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Private
  @InterfaceStability.Evolving
-public interface WAL
+public interface WAL
 extends https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable, WALFileLengthProvider
 A Write Ahead Log (WAL) provides service for reading, 
writing waledits. This interface provides
  APIs for WAL users (such as RegionServer) to use the WAL (do append, sync, 
etc).
@@ -318,7 +318,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 
 
 registerWALActionsListener
-voidregisterWALActionsListener(WALActionsListenerlistener)
+voidregisterWALActionsListener(WALActionsListenerlistener)
 Registers WALActionsListener
 
 
@@ -328,7 +328,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 
 
 unregisterWALActionsListener
-booleanunregisterWALActionsListener(WALActionsListenerlistener)
+booleanunregisterWALActionsListener(WALActionsListenerlistener)
 Unregisters WALActionsListener
 
 
@@ -338,7 +338,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 
 
 rollWriter
-byte[][]rollWriter()
+byte[][]rollWriter()
  throws FailedLogCloseException,
 https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Roll the log writer. That is, start writing log messages to 
a new file.
@@ -363,7 +363,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 
 
 rollWriter
-byte[][]rollWriter(booleanforce)
+byte[][]rollWriter(booleanforce)
  throws FailedLogCloseException,
 https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Roll the log writer. That is, start writing log messages to 
a new file.
@@ -391,7 +391,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 
 
 shutdown
-voidshutdown()
+voidshutdown()
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Stop accepting new writes. If we have unsynced writes still 
in buffer, sync them.
  Extant edits are left in place in backing storage to be replayed later.
@@ -407,7 +407,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 
 
 close
-voidclose()
+voidclose()
 throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Caller no longer needs any edits from this WAL. 
Implementers are free to reclaim
  underlying resources after this call; i.e. filesystem based WALs can archive 
or
@@ -428,7 +428,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 
 
 append
-longappend(RegionInfoinfo,
+longappend(RegionInfoinfo,
 WALKeyImplkey,
 WALEditedits,
 booleaninMemstore)
@@ -460,7 +460,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 
 
 updateStore
-voidupdateStore(byte[]encodedRegionName,
+voidupdateStore(byte[]encodedRegionName,
  byte[]familyName,
  https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Longsequenceid,
  booleanonlyIfGreater)
@@ -482,7 +482,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 
 
 sync
-voidsync()
+voidsync()
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Sync what we have in the WAL.
 
@@ -497,7 +497,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 
 
 sync
-voidsync(longtxid)
+voidsync(longtxid)
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Sync the WAL if the txId was not already sync'd.
 
@@ -514,7 +514,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 
 
 sync
-defaultvoidsync(booleanforceSync)
+defaultvoidsync(booleanforceSync)
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Parameters:
@@ -531,7 +531,7 @@ extends 

[10/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.CompressedKvEncoder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.CompressedKvEncoder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.CompressedKvEncoder.html
index 83c17c0..9df0225 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.CompressedKvEncoder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.CompressedKvEncoder.html
@@ -54,323 +54,362 @@
 046import org.apache.hadoop.io.IOUtils;
 047
 048import 
org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
-049
+049import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
 050
-051/**
-052 * Compression in this class is lifted 
off Compressor/KeyValueCompression.
-053 * This is a pure coincidence... they are 
independent and don't have to be compatible.
-054 *
-055 * This codec is used at server side for 
writing cells to WAL as well as for sending edits
-056 * as part of the distributed splitting 
process.
-057 */
-058@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
-059  HBaseInterfaceAudience.PHOENIX, 
HBaseInterfaceAudience.CONFIG})
-060public class WALCellCodec implements 
Codec {
-061  /** Configuration key for the class to 
use when encoding cells in the WAL */
-062  public static final String 
WAL_CELL_CODEC_CLASS_KEY = "hbase.regionserver.wal.codec";
-063
-064  protected final CompressionContext 
compression;
-065  protected final ByteStringUncompressor 
statelessUncompressor = new ByteStringUncompressor() {
-066@Override
-067public byte[] uncompress(ByteString 
data, Dictionary dict) throws IOException {
-068  return 
WALCellCodec.uncompressByteString(data, dict);
-069}
-070  };
-071
-072  /**
-073   * bAll subclasses must 
implement a no argument constructor/b
-074   */
-075  public WALCellCodec() {
-076this.compression = null;
-077  }
-078
-079  /**
-080   * Default constructor - ball 
subclasses must implement a constructor with this signature /b
-081   * if they are to be dynamically loaded 
from the {@link Configuration}.
-082   * @param conf configuration to 
configure ttthis/tt
-083   * @param compression compression the 
codec should support, can be ttnull/tt to indicate no
-084   *  compression
-085   */
-086  public WALCellCodec(Configuration conf, 
CompressionContext compression) {
-087this.compression = compression;
-088  }
-089
-090  public static String 
getWALCellCodecClass(Configuration conf) {
-091return 
conf.get(WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());
-092  }
-093
-094  /**
-095   * Create and setup a {@link 
WALCellCodec} from the {@code cellCodecClsName} and
-096   * CompressionContext, if {@code 
cellCodecClsName} is specified.
-097   * Otherwise Cell Codec classname is 
read from {@link Configuration}.
-098   * Fully prepares the codec for use.
-099   * @param conf {@link Configuration} to 
read for the user-specified codec. If none is specified,
-100   *  uses a {@link 
WALCellCodec}.
-101   * @param cellCodecClsName name of 
codec
-102   * @param compression compression the 
codec should use
-103   * @return a {@link WALCellCodec} ready 
for use.
-104   * @throws 
UnsupportedOperationException if the codec cannot be instantiated
-105   */
-106
-107  public static WALCellCodec 
create(Configuration conf, String cellCodecClsName,
-108  CompressionContext compression) 
throws UnsupportedOperationException {
-109if (cellCodecClsName == null) {
-110  cellCodecClsName = 
getWALCellCodecClass(conf);
-111}
-112return 
ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
-113{ Configuration.class, 
CompressionContext.class }, new Object[] { conf, compression });
-114  }
-115
-116  /**
-117   * Create and setup a {@link 
WALCellCodec} from the
-118   * CompressionContext.
-119   * Cell Codec classname is read from 
{@link Configuration}.
-120   * Fully prepares the codec for use.
-121   * @param conf {@link Configuration} to 
read for the user-specified codec. If none is specified,
-122   *  uses a {@link 
WALCellCodec}.
-123   * @param compression compression the 
codec should use
-124   * @return a {@link WALCellCodec} ready 
for use.
-125   * @throws 
UnsupportedOperationException if the codec cannot be instantiated
-126   */
-127  public static WALCellCodec 
create(Configuration conf,
-128  CompressionContext compression) 
throws UnsupportedOperationException {
-129String cellCodecClsName = 
getWALCellCodecClass(conf);
-130return 
ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
-131{ Configuration.class, 
CompressionContext.class }, new Object[] { conf, compression });

[31/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DescribeCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DescribeCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DescribeCommand.html
index f236300..513d2ad 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DescribeCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DescribeCommand.html
@@ -26,1048 +26,1115 @@
 018
 019package 
org.apache.hadoop.hbase.backup.impl;
 020
-021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
-022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-039
-040import java.io.IOException;
-041import java.net.URI;
-042import java.util.List;
+021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BACKUP_LIST_DESC;
+022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP_DESC;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_LIST;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+039import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+040import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+041import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+042import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 043
-044import 
org.apache.commons.lang3.StringUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.conf.Configured;
-047import org.apache.hadoop.fs.FileSystem;
-048import org.apache.hadoop.fs.Path;
-049import 
org.apache.hadoop.hbase.HBaseConfiguration;
-050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.hadoop.hbase.backup.BackupAdmin;
-052import 
org.apache.hadoop.hbase.backup.BackupInfo;
-053import 

[06/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.html
index 83c17c0..9df0225 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.html
@@ -54,323 +54,362 @@
 046import org.apache.hadoop.io.IOUtils;
 047
 048import 
org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
-049
+049import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
 050
-051/**
-052 * Compression in this class is lifted 
off Compressor/KeyValueCompression.
-053 * This is a pure coincidence... they are 
independent and don't have to be compatible.
-054 *
-055 * This codec is used at server side for 
writing cells to WAL as well as for sending edits
-056 * as part of the distributed splitting 
process.
-057 */
-058@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
-059  HBaseInterfaceAudience.PHOENIX, 
HBaseInterfaceAudience.CONFIG})
-060public class WALCellCodec implements 
Codec {
-061  /** Configuration key for the class to 
use when encoding cells in the WAL */
-062  public static final String 
WAL_CELL_CODEC_CLASS_KEY = "hbase.regionserver.wal.codec";
-063
-064  protected final CompressionContext 
compression;
-065  protected final ByteStringUncompressor 
statelessUncompressor = new ByteStringUncompressor() {
-066@Override
-067public byte[] uncompress(ByteString 
data, Dictionary dict) throws IOException {
-068  return 
WALCellCodec.uncompressByteString(data, dict);
-069}
-070  };
-071
-072  /**
-073   * bAll subclasses must 
implement a no argument constructor/b
-074   */
-075  public WALCellCodec() {
-076this.compression = null;
-077  }
-078
-079  /**
-080   * Default constructor - ball 
subclasses must implement a constructor with this signature /b
-081   * if they are to be dynamically loaded 
from the {@link Configuration}.
-082   * @param conf configuration to 
configure ttthis/tt
-083   * @param compression compression the 
codec should support, can be ttnull/tt to indicate no
-084   *  compression
-085   */
-086  public WALCellCodec(Configuration conf, 
CompressionContext compression) {
-087this.compression = compression;
-088  }
-089
-090  public static String 
getWALCellCodecClass(Configuration conf) {
-091return 
conf.get(WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());
-092  }
-093
-094  /**
-095   * Create and setup a {@link 
WALCellCodec} from the {@code cellCodecClsName} and
-096   * CompressionContext, if {@code 
cellCodecClsName} is specified.
-097   * Otherwise Cell Codec classname is 
read from {@link Configuration}.
-098   * Fully prepares the codec for use.
-099   * @param conf {@link Configuration} to 
read for the user-specified codec. If none is specified,
-100   *  uses a {@link 
WALCellCodec}.
-101   * @param cellCodecClsName name of 
codec
-102   * @param compression compression the 
codec should use
-103   * @return a {@link WALCellCodec} ready 
for use.
-104   * @throws 
UnsupportedOperationException if the codec cannot be instantiated
-105   */
-106
-107  public static WALCellCodec 
create(Configuration conf, String cellCodecClsName,
-108  CompressionContext compression) 
throws UnsupportedOperationException {
-109if (cellCodecClsName == null) {
-110  cellCodecClsName = 
getWALCellCodecClass(conf);
-111}
-112return 
ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
-113{ Configuration.class, 
CompressionContext.class }, new Object[] { conf, compression });
-114  }
-115
-116  /**
-117   * Create and setup a {@link 
WALCellCodec} from the
-118   * CompressionContext.
-119   * Cell Codec classname is read from 
{@link Configuration}.
-120   * Fully prepares the codec for use.
-121   * @param conf {@link Configuration} to 
read for the user-specified codec. If none is specified,
-122   *  uses a {@link 
WALCellCodec}.
-123   * @param compression compression the 
codec should use
-124   * @return a {@link WALCellCodec} ready 
for use.
-125   * @throws 
UnsupportedOperationException if the codec cannot be instantiated
-126   */
-127  public static WALCellCodec 
create(Configuration conf,
-128  CompressionContext compression) 
throws UnsupportedOperationException {
-129String cellCodecClsName = 
getWALCellCodecClass(conf);
-130return 
ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
-131{ Configuration.class, 
CompressionContext.class }, new Object[] { conf, compression });
-132  }
-133
-134  public interface ByteStringCompressor 
{
-135ByteString compress(byte[] data, 

[02/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALKeyImpl.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALKeyImpl.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALKeyImpl.html
index a00f005..9bca473 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALKeyImpl.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALKeyImpl.html
@@ -45,593 +45,571 @@
 037
 038import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
 039import 
org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
-040import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
-041
-042import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-043import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
-044import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope;
-045import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType;
-046
-047/**
-048 * Default implementation of Key for an 
Entry in the WAL.
-049 * For internal use only though 
Replication needs to have access.
-050 *
-051 * The log intermingles edits to many 
tables and rows, so each log entry
-052 * identifies the appropriate table and 
row.  Within a table and row, they're
-053 * also sorted.
-054 *
-055 * pSome Transactional edits 
(START, COMMIT, ABORT) will not have an associated row.
-056 *
-057 */
-058// TODO: Key and WALEdit are never used 
separately, or in one-to-many relation, for practical
-059//   purposes. They need to be merged 
into WALEntry.
-060@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.REPLICATION})
-061public class WALKeyImpl implements WALKey 
{
-062  public static final WALKeyImpl 
EMPTY_WALKEYIMPL = new WALKeyImpl();
-063
-064  public MultiVersionConcurrencyControl 
getMvcc() {
-065return mvcc;
-066  }
-067
-068  /**
-069   * Use it to complete mvcc transaction. 
This WALKeyImpl was part of
-070   * (the transaction is started when you 
call append; see the comment on FSHLog#append). To
-071   * complete call
-072   * {@link 
MultiVersionConcurrencyControl#complete(MultiVersionConcurrencyControl.WriteEntry)}
-073   * or {@link 
MultiVersionConcurrencyControl#complete(MultiVersionConcurrencyControl.WriteEntry)}
-074   * @return A WriteEntry gotten from 
local WAL subsystem.
-075   * @see 
#setWriteEntry(MultiVersionConcurrencyControl.WriteEntry)
-076   */
-077  public 
MultiVersionConcurrencyControl.WriteEntry getWriteEntry() {
-078return this.writeEntry;
-079  }
-080
-081  public void 
setWriteEntry(MultiVersionConcurrencyControl.WriteEntry writeEntry) {
-082assert this.writeEntry == null;
-083this.writeEntry = writeEntry;
-084// Set our sequenceid now using 
WriteEntry.
-085this.sequenceId = 
writeEntry.getWriteNumber();
-086  }
-087
-088  private byte [] encodedRegionName;
-089
-090  private TableName tablename;
-091
-092  /**
-093   * SequenceId for this edit. Set 
post-construction at write-to-WAL time. Until then it is
-094   * NO_SEQUENCE_ID. Change it so 
multiple threads can read it -- e.g. access is synchronized.
-095   */
-096  private long sequenceId;
-097
-098  /**
-099   * Used during WAL replay; the 
sequenceId of the edit when it came into the system.
-100   */
-101  private long origLogSeqNum = 0;
-102
-103  /** Time at which this edit was 
written. */
-104  private long writeTime;
-105
-106  /** The first element in the list is 
the cluster id on which the change has originated */
-107  private ListUUID clusterIds;
-108
-109  private NavigableMapbyte[], 
Integer replicationScope;
-110
-111  private long nonceGroup = 
HConstants.NO_NONCE;
-112  private long nonce = 
HConstants.NO_NONCE;
-113  private MultiVersionConcurrencyControl 
mvcc;
-114
-115  /**
-116   * Set in a way visible to multiple 
threads; e.g. synchronized getter/setters.
-117   */
-118  private 
MultiVersionConcurrencyControl.WriteEntry writeEntry;
-119
-120  private CompressionContext 
compressionContext;
-121
-122  public WALKeyImpl() {
-123init(null, null, 0L, 
HConstants.LATEST_TIMESTAMP,
-124new ArrayList(), 
HConstants.NO_NONCE, HConstants.NO_NONCE, null, null);
-125  }
-126
-127  public WALKeyImpl(final 
NavigableMapbyte[], Integer replicationScope) {
-128init(null, null, 0L, 
HConstants.LATEST_TIMESTAMP,
-129new ArrayList(), 
HConstants.NO_NONCE, HConstants.NO_NONCE, null, replicationScope);
-130  }
-131
-132  @VisibleForTesting
-133  public WALKeyImpl(final byte[] 
encodedRegionName, final TableName tablename,
-134long logSeqNum,
-135  final long now, UUID clusterId) {
-136ListUUID clusterIds = new 
ArrayList(1);
-137clusterIds.add(clusterId);
-138init(encodedRegionName, tablename, 
logSeqNum, now, clusterIds,
-139HConstants.NO_NONCE, 
HConstants.NO_NONCE, null, 

[49/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 5fe002a..008388e 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -26,7 +26,7 @@ under the License.
 2007 - 2018 The Apache Software Foundation
 
   File: 3640,
- Errors: 16197,
+ Errors: 16190,
  Warnings: 0,
  Infos: 0
   
@@ -5403,7 +5403,7 @@ under the License.
   0
 
 
-  0
+  1
 
   
   
@@ -25101,7 +25101,7 @@ under the License.
   0
 
 
-  0
+  1
 
   
   
@@ -28825,7 +28825,7 @@ under the License.
   0
 
 
-  4
+  0
 
   
   
@@ -31513,7 +31513,7 @@ under the License.
   0
 
 
-  24
+  22
 
   
   
@@ -32983,7 +32983,7 @@ under the License.
   0
 
 
-  4
+  1
 
   
   

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/coc.html
--
diff --git a/coc.html b/coc.html
index cbead93..c90ab15 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Code of Conduct Policy
@@ -375,7 +375,7 @@ email to mailto:priv...@hbase.apache.org;>the priv
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-06-13
+  Last Published: 
2018-06-14
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/dependencies.html
--
diff --git a/dependencies.html b/dependencies.html
index 7ec1cde..0577314 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Dependencies
 
@@ -440,7 +440,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-06-13
+  Last Published: 
2018-06-14
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/dependency-convergence.html
--
diff --git a/dependency-convergence.html b/dependency-convergence.html
index aefbf97..bc73b8a 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Reactor Dependency Convergence
 
@@ -1105,7 +1105,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-06-13
+  Last Published: 
2018-06-14
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/dependency-info.html
--
diff --git a/dependency-info.html b/dependency-info.html
index 1e06ed6..4f98113 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Dependency Information
 
@@ -313,7 +313,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-06-13
+  Last Published: 
2018-06-14
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/dependency-management.html
--
diff --git a/dependency-management.html b/dependency-management.html
index 0673694..028a76b 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Dependency Management
 
@@ -969,7 +969,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-06-13
+  Last Published: 
2018-06-14
 
   

[22/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
index 42d0637..eb16038 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
@@ -80,21 +80,21 @@
 072import 
org.apache.hadoop.hbase.PleaseHoldException;
 073import 
org.apache.hadoop.hbase.ReplicationPeerNotFoundException;
 074import 
org.apache.hadoop.hbase.ScheduledChore;
-075import 
org.apache.hadoop.hbase.ServerMetricsBuilder;
-076import 
org.apache.hadoop.hbase.ServerName;
-077import 
org.apache.hadoop.hbase.TableDescriptors;
-078import 
org.apache.hadoop.hbase.TableName;
-079import 
org.apache.hadoop.hbase.TableNotDisabledException;
-080import 
org.apache.hadoop.hbase.TableNotFoundException;
-081import 
org.apache.hadoop.hbase.UnknownRegionException;
-082import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-083import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-084import 
org.apache.hadoop.hbase.client.MasterSwitchType;
-085import 
org.apache.hadoop.hbase.client.RegionInfo;
-086import 
org.apache.hadoop.hbase.client.Result;
-087import 
org.apache.hadoop.hbase.client.TableDescriptor;
-088import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-089import 
org.apache.hadoop.hbase.client.TableState;
+075import 
org.apache.hadoop.hbase.ServerName;
+076import 
org.apache.hadoop.hbase.TableDescriptors;
+077import 
org.apache.hadoop.hbase.TableName;
+078import 
org.apache.hadoop.hbase.TableNotDisabledException;
+079import 
org.apache.hadoop.hbase.TableNotFoundException;
+080import 
org.apache.hadoop.hbase.UnknownRegionException;
+081import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+082import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+083import 
org.apache.hadoop.hbase.client.MasterSwitchType;
+084import 
org.apache.hadoop.hbase.client.RegionInfo;
+085import 
org.apache.hadoop.hbase.client.Result;
+086import 
org.apache.hadoop.hbase.client.TableDescriptor;
+087import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+088import 
org.apache.hadoop.hbase.client.TableState;
+089import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
 090import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 091import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
 092import 
org.apache.hadoop.hbase.exceptions.MergeRegionException;
@@ -220,3477 +220,3481 @@
 212
 213import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 214import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState;
-215import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo;
-216import 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas;
-217import 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy;
-218import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
-219import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
-220
-221/**
-222 * HMaster is the "master server" for 
HBase. An HBase cluster has one active
-223 * master.  If many masters are started, 
all compete.  Whichever wins goes on to
-224 * run the cluster.  All others park 
themselves in their constructor until
-225 * master or cluster shutdown or until 
the active master loses its lease in
-226 * zookeeper.  Thereafter, all running 
master jostle to take over master role.
-227 *
-228 * pThe Master can be asked 
shutdown the cluster. See {@link #shutdown()}.  In
-229 * this case it will tell all 
regionservers to go down and then wait on them
-230 * all reporting in that they are down.  
This master will then shut itself down.
-231 *
-232 * pYou can also shutdown just 
this master.  Call {@link #stopMaster()}.
-233 *
-234 * @see org.apache.zookeeper.Watcher
-235 */
-236@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
-237@SuppressWarnings("deprecation")
-238public class HMaster extends 
HRegionServer implements MasterServices {
-239  private static Logger LOG = 
LoggerFactory.getLogger(HMaster.class.getName());
-240
-241  /**
-242   * Protection against zombie master. 
Started once Master accepts active responsibility and
-243   * starts taking over responsibilities. 
Allows a finite time window before giving up ownership.
-244   */
-245  private static class 
InitializationMonitor extends HasThread {
-246/** The amount of time in 
milliseconds to sleep before checking initialization status. */
-247public static final String 
TIMEOUT_KEY = 

[25/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.html
index f236300..513d2ad 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.html
@@ -26,1048 +26,1115 @@
 018
 019package 
org.apache.hadoop.hbase.backup.impl;
 020
-021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
-022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-039
-040import java.io.IOException;
-041import java.net.URI;
-042import java.util.List;
+021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BACKUP_LIST_DESC;
+022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP_DESC;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_LIST;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+039import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+040import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+041import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+042import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 043
-044import 
org.apache.commons.lang3.StringUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.conf.Configured;
-047import org.apache.hadoop.fs.FileSystem;
-048import org.apache.hadoop.fs.Path;
-049import 
org.apache.hadoop.hbase.HBaseConfiguration;
-050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.hadoop.hbase.backup.BackupAdmin;
-052import 
org.apache.hadoop.hbase.backup.BackupInfo;
-053import 
org.apache.hadoop.hbase.backup.BackupInfo.BackupState;
-054import 

[21/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
index 42d0637..eb16038 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
@@ -80,21 +80,21 @@
 072import 
org.apache.hadoop.hbase.PleaseHoldException;
 073import 
org.apache.hadoop.hbase.ReplicationPeerNotFoundException;
 074import 
org.apache.hadoop.hbase.ScheduledChore;
-075import 
org.apache.hadoop.hbase.ServerMetricsBuilder;
-076import 
org.apache.hadoop.hbase.ServerName;
-077import 
org.apache.hadoop.hbase.TableDescriptors;
-078import 
org.apache.hadoop.hbase.TableName;
-079import 
org.apache.hadoop.hbase.TableNotDisabledException;
-080import 
org.apache.hadoop.hbase.TableNotFoundException;
-081import 
org.apache.hadoop.hbase.UnknownRegionException;
-082import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-083import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-084import 
org.apache.hadoop.hbase.client.MasterSwitchType;
-085import 
org.apache.hadoop.hbase.client.RegionInfo;
-086import 
org.apache.hadoop.hbase.client.Result;
-087import 
org.apache.hadoop.hbase.client.TableDescriptor;
-088import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-089import 
org.apache.hadoop.hbase.client.TableState;
+075import 
org.apache.hadoop.hbase.ServerName;
+076import 
org.apache.hadoop.hbase.TableDescriptors;
+077import 
org.apache.hadoop.hbase.TableName;
+078import 
org.apache.hadoop.hbase.TableNotDisabledException;
+079import 
org.apache.hadoop.hbase.TableNotFoundException;
+080import 
org.apache.hadoop.hbase.UnknownRegionException;
+081import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+082import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+083import 
org.apache.hadoop.hbase.client.MasterSwitchType;
+084import 
org.apache.hadoop.hbase.client.RegionInfo;
+085import 
org.apache.hadoop.hbase.client.Result;
+086import 
org.apache.hadoop.hbase.client.TableDescriptor;
+087import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+088import 
org.apache.hadoop.hbase.client.TableState;
+089import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
 090import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 091import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
 092import 
org.apache.hadoop.hbase.exceptions.MergeRegionException;
@@ -220,3477 +220,3481 @@
 212
 213import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 214import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState;
-215import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo;
-216import 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas;
-217import 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy;
-218import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
-219import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
-220
-221/**
-222 * HMaster is the "master server" for 
HBase. An HBase cluster has one active
-223 * master.  If many masters are started, 
all compete.  Whichever wins goes on to
-224 * run the cluster.  All others park 
themselves in their constructor until
-225 * master or cluster shutdown or until 
the active master loses its lease in
-226 * zookeeper.  Thereafter, all running 
master jostle to take over master role.
-227 *
-228 * pThe Master can be asked 
shutdown the cluster. See {@link #shutdown()}.  In
-229 * this case it will tell all 
regionservers to go down and then wait on them
-230 * all reporting in that they are down.  
This master will then shut itself down.
-231 *
-232 * pYou can also shutdown just 
this master.  Call {@link #stopMaster()}.
-233 *
-234 * @see org.apache.zookeeper.Watcher
-235 */
-236@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
-237@SuppressWarnings("deprecation")
-238public class HMaster extends 
HRegionServer implements MasterServices {
-239  private static Logger LOG = 
LoggerFactory.getLogger(HMaster.class.getName());
-240
-241  /**
-242   * Protection against zombie master. 
Started once Master accepts active responsibility and
-243   * starts taking over responsibilities. 
Allows a finite time window before giving up ownership.
-244   */
-245  private static class 
InitializationMonitor extends HasThread {
-246/** The amount of time in 
milliseconds to sleep before checking initialization status. */
-247public static final String 
TIMEOUT_KEY = "hbase.master.initializationmonitor.timeout";

[48/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index a794ddf..b54d12f 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -5362,7 +5362,7 @@
 
 baos
 - Variable in class org.apache.hadoop.hbase.ipc.CellBlockBuilder.ByteBufferOutputStreamSupplier
 
-BaosAndCompressor()
 - Constructor for class org.apache.hadoop.hbase.regionserver.wal.WALCellCodec.BaosAndCompressor
+BaosAndCompressor(CompressionContext)
 - Constructor for class org.apache.hadoop.hbase.regionserver.wal.WALCellCodec.BaosAndCompressor
 
 baosInMemory
 - Variable in class org.apache.hadoop.hbase.io.hfile.HFileBlock.Writer
 
@@ -15665,9 +15665,11 @@
 
 CompoundConfiguration.ImmutableConfWrapper - 
Class in org.apache.hadoop.hbase
 
-compress(byte[],
 Dictionary) - Method in class 
org.apache.hadoop.hbase.regionserver.wal.WALCellCodec.BaosAndCompressor
+compress(byte[],
 Enum) - Method in class org.apache.hadoop.hbase.regionserver.wal.WALCellCodec.BaosAndCompressor
 
-compress(byte[],
 Dictionary) - Method in interface 
org.apache.hadoop.hbase.regionserver.wal.WALCellCodec.ByteStringCompressor
+compress(byte[],
 Enum) - Method in interface 
org.apache.hadoop.hbase.regionserver.wal.WALCellCodec.ByteStringCompressor
+
+compress(byte[],
 Enum) - Method in class org.apache.hadoop.hbase.regionserver.wal.WALCellCodec.NoneCompressor
 
 compress
 - Variable in class org.apache.hadoop.hbase.rest.filter.GZIPResponseWrapper
 
@@ -15775,7 +15777,11 @@
 
 Compression context to use reading.
 
-compressionContext
 - Variable in class org.apache.hadoop.hbase.wal.WALKeyImpl
+compressionContext
 - Variable in class org.apache.hadoop.hbase.regionserver.wal.WALCellCodec.BaosAndCompressor
+
+compressionContext
 - Variable in class org.apache.hadoop.hbase.regionserver.wal.WALCellCodec.StatelessUncompressor
+
+CompressionContext.DictionaryIndex - Enum in org.apache.hadoop.hbase.regionserver.wal
 
 compressionDetails
 - Static variable in class org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2
 
@@ -17500,6 +17506,8 @@
 
 convertThrowableToIOE(Throwable,
 String) - Method in class org.apache.hadoop.hbase.regionserver.HRegionServer
 
+convertToBackupIds(ListBackupInfo)
 - Method in class org.apache.hadoop.hbase.backup.impl.BackupCommands.DeleteCommand
+
 convertToBigInteger(byte[])
 - Method in class org.apache.hadoop.hbase.util.RegionSplitter.NumberStringSplit
 
 Returns the BigInteger represented by the byte array
@@ -20576,7 +20584,8 @@
 
 createServerManager(MasterServices)
 - Method in class org.apache.hadoop.hbase.master.HMaster
 
-Create a ServerManager instance.
+
+ Create a ServerManager 
instance.
 
 createServerSocket(int)
 - Method in class org.apache.hadoop.hbase.SslRMIServerSocketFactorySecure
 
@@ -25283,12 +25292,16 @@
 
 dfsUtilClazz
 - Static variable in class org.apache.hadoop.hbase.util.FSHDFSUtils
 
+dictionaries
 - Variable in class org.apache.hadoop.hbase.regionserver.wal.CompressionContext
+
 Dictionary - Interface in org.apache.hadoop.hbase.io.util
 
 Dictionary interface
 
  Dictionary indexes should be either bytes or shorts, only positive.
 
+DictionaryIndex()
 - Constructor for enum org.apache.hadoop.hbase.regionserver.wal.CompressionContext.DictionaryIndex
+
 didTry
 - Variable in class org.apache.hadoop.hbase.client.FastFailInterceptorContext
 
 didTry()
 - Method in class org.apache.hadoop.hbase.client.FastFailInterceptorContext
@@ -29071,6 +29084,10 @@
 
 executed
 - Variable in class org.apache.hadoop.hbase.procedure2.SequentialProcedure
 
+executeDeleteListOfBackups(CommandLine)
 - Method in class org.apache.hadoop.hbase.backup.impl.BackupCommands.DeleteCommand
+
+executeDeleteOlderThan(CommandLine)
 - Method in class org.apache.hadoop.hbase.backup.impl.BackupCommands.DeleteCommand
+
 executedWriteBufferPeriodicFlushes
 - Variable in class org.apache.hadoop.hbase.client.BufferedMutatorImpl
 
 executeFromState(MasterProcedureEnv,
 MasterProcedureProtos.GCMergedRegionsState) - Method in class 
org.apache.hadoop.hbase.master.assignment.GCMergedRegionsProcedure
@@ -29173,6 +29190,8 @@
 
 executor
 - Variable in class org.apache.hadoop.hbase.master.balancer.RegionLocationFinder
 
+executor
 - Variable in class org.apache.hadoop.hbase.master.RegionServerTracker
+
 executor
 - Variable in class org.apache.hadoop.hbase.metrics.impl.GlobalMetricRegistriesAdapter
 
 executor
 - Variable in class org.apache.hadoop.hbase.procedure.flush.RegionServerFlushTableProcedureManager.FlushTableSubprocedurePool
@@ -30022,8 +30041,6 @@
 
 familyClose(SnapshotProtos.SnapshotRegionManifest.Builder,
 SnapshotProtos.SnapshotRegionManifest.FamilyFiles.Builder) - Method 
in class org.apache.hadoop.hbase.snapshot.SnapshotManifestV2.ManifestBuilder
 
-familyDict
 - Variable in class org.apache.hadoop.hbase.regionserver.wal.CompressionContext
-
 

[14/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.BaosAndCompressor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.BaosAndCompressor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.BaosAndCompressor.html
index 83c17c0..9df0225 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.BaosAndCompressor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.BaosAndCompressor.html
@@ -54,323 +54,362 @@
 046import org.apache.hadoop.io.IOUtils;
 047
 048import 
org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
-049
+049import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
 050
-051/**
-052 * Compression in this class is lifted 
off Compressor/KeyValueCompression.
-053 * This is a pure coincidence... they are 
independent and don't have to be compatible.
-054 *
-055 * This codec is used at server side for 
writing cells to WAL as well as for sending edits
-056 * as part of the distributed splitting 
process.
-057 */
-058@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
-059  HBaseInterfaceAudience.PHOENIX, 
HBaseInterfaceAudience.CONFIG})
-060public class WALCellCodec implements 
Codec {
-061  /** Configuration key for the class to 
use when encoding cells in the WAL */
-062  public static final String 
WAL_CELL_CODEC_CLASS_KEY = "hbase.regionserver.wal.codec";
-063
-064  protected final CompressionContext 
compression;
-065  protected final ByteStringUncompressor 
statelessUncompressor = new ByteStringUncompressor() {
-066@Override
-067public byte[] uncompress(ByteString 
data, Dictionary dict) throws IOException {
-068  return 
WALCellCodec.uncompressByteString(data, dict);
-069}
-070  };
-071
-072  /**
-073   * bAll subclasses must 
implement a no argument constructor/b
-074   */
-075  public WALCellCodec() {
-076this.compression = null;
-077  }
-078
-079  /**
-080   * Default constructor - ball 
subclasses must implement a constructor with this signature /b
-081   * if they are to be dynamically loaded 
from the {@link Configuration}.
-082   * @param conf configuration to 
configure ttthis/tt
-083   * @param compression compression the 
codec should support, can be ttnull/tt to indicate no
-084   *  compression
-085   */
-086  public WALCellCodec(Configuration conf, 
CompressionContext compression) {
-087this.compression = compression;
-088  }
-089
-090  public static String 
getWALCellCodecClass(Configuration conf) {
-091return 
conf.get(WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());
-092  }
-093
-094  /**
-095   * Create and setup a {@link 
WALCellCodec} from the {@code cellCodecClsName} and
-096   * CompressionContext, if {@code 
cellCodecClsName} is specified.
-097   * Otherwise Cell Codec classname is 
read from {@link Configuration}.
-098   * Fully prepares the codec for use.
-099   * @param conf {@link Configuration} to 
read for the user-specified codec. If none is specified,
-100   *  uses a {@link 
WALCellCodec}.
-101   * @param cellCodecClsName name of 
codec
-102   * @param compression compression the 
codec should use
-103   * @return a {@link WALCellCodec} ready 
for use.
-104   * @throws 
UnsupportedOperationException if the codec cannot be instantiated
-105   */
-106
-107  public static WALCellCodec 
create(Configuration conf, String cellCodecClsName,
-108  CompressionContext compression) 
throws UnsupportedOperationException {
-109if (cellCodecClsName == null) {
-110  cellCodecClsName = 
getWALCellCodecClass(conf);
-111}
-112return 
ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
-113{ Configuration.class, 
CompressionContext.class }, new Object[] { conf, compression });
-114  }
-115
-116  /**
-117   * Create and setup a {@link 
WALCellCodec} from the
-118   * CompressionContext.
-119   * Cell Codec classname is read from 
{@link Configuration}.
-120   * Fully prepares the codec for use.
-121   * @param conf {@link Configuration} to 
read for the user-specified codec. If none is specified,
-122   *  uses a {@link 
WALCellCodec}.
-123   * @param compression compression the 
codec should use
-124   * @return a {@link WALCellCodec} ready 
for use.
-125   * @throws 
UnsupportedOperationException if the codec cannot be instantiated
-126   */
-127  public static WALCellCodec 
create(Configuration conf,
-128  CompressionContext compression) 
throws UnsupportedOperationException {
-129String cellCodecClsName = 
getWALCellCodecClass(conf);
-130return 
ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
-131{ Configuration.class, 
CompressionContext.class }, new Object[] { conf, compression });
-132  }

[51/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/e11cf2cb
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/e11cf2cb
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/e11cf2cb

Branch: refs/heads/asf-site
Commit: e11cf2cbadbdd9ff7a2e97fe53b761752bb8452f
Parents: 436b0b1
Author: jenkins 
Authored: Thu Jun 14 22:21:56 2018 +
Committer: jenkins 
Committed: Thu Jun 14 22:21:56 2018 +

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 4 +-
 book.html   | 2 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 30234 -
 checkstyle.rss  |12 +-
 coc.html| 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html | 4 +-
 dependency-info.html| 4 +-
 dependency-management.html  | 4 +-
 devapidocs/allclasses-frame.html| 4 +
 devapidocs/allclasses-noframe.html  | 4 +
 devapidocs/constant-values.html |71 +-
 devapidocs/index-all.html   |   121 +-
 .../hadoop/hbase/backup/BackupDriver.html   |24 +-
 .../BackupRestoreConstants.BackupCommand.html   |44 +-
 .../hbase/backup/BackupRestoreConstants.html|   135 +-
 .../hbase/backup/class-use/BackupInfo.html  |13 +
 .../impl/BackupCommands.BackupSetCommand.html   |32 +-
 .../backup/impl/BackupCommands.Command.html |18 +-
 .../impl/BackupCommands.CreateCommand.html  |18 +-
 .../impl/BackupCommands.DeleteCommand.html  |65 +-
 .../impl/BackupCommands.DescribeCommand.html| 8 +-
 .../backup/impl/BackupCommands.HelpCommand.html | 8 +-
 .../impl/BackupCommands.HistoryCommand.html |18 +-
 .../impl/BackupCommands.MergeCommand.html   |12 +-
 .../impl/BackupCommands.ProgressCommand.html| 8 +-
 .../impl/BackupCommands.RepairCommand.html  |14 +-
 .../hbase/backup/impl/BackupCommands.html   |36 +-
 .../hadoop/hbase/backup/package-tree.html   | 4 +-
 .../hadoop/hbase/class-use/ServerName.html  |54 +-
 .../hadoop/hbase/client/VersionInfoUtil.html| 4 +-
 .../hadoop/hbase/client/package-tree.html   |26 +-
 .../hadoop/hbase/filter/package-tree.html   | 8 +-
 .../hadoop/hbase/io/hfile/package-tree.html | 8 +-
 .../hbase/io/util/class-use/Dictionary.html |49 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   | 2 +-
 .../hadoop/hbase/mapreduce/package-tree.html| 2 +-
 .../master/HMaster.InitializationMonitor.html   |20 +-
 .../master/HMaster.MasterStoppedException.html  | 4 +-
 .../hbase/master/HMaster.RedirectServlet.html   |12 +-
 .../org/apache/hadoop/hbase/master/HMaster.html |   524 +-
 .../master/HMasterCommandLine.LocalHMaster.html | 4 +-
 .../hbase/master/RegionServerTracker.html   |   190 +-
 .../hadoop/hbase/master/ServerManager.html  |   245 +-
 .../hbase/master/balancer/package-tree.html | 2 +-
 .../hbase/master/class-use/MasterServices.html  |13 +-
 .../master/class-use/RegionServerTracker.html   | 2 +-
 .../hbase/master/class-use/ServerManager.html   | 5 +-
 .../hadoop/hbase/master/package-summary.html| 3 +-
 .../hadoop/hbase/master/package-tree.html   | 6 +-
 .../apache/hadoop/hbase/master/package-use.html | 3 +-
 .../hbase/master/procedure/package-tree.html| 2 +-
 .../org/apache/hadoop/hbase/package-tree.html   |16 +-
 .../hadoop/hbase/procedure2/package-tree.html   | 4 +-
 .../class-use/SpaceQuotaSnapshotNotifier.html   | 2 +-
 .../hadoop/hbase/quotas/package-tree.html   | 8 +-
 .../hadoop/hbase/regionserver/package-tree.html |20 +-
 .../regionserver/querymatcher/package-tree.html | 4 +-
 .../wal/AbstractProtobufLogWriter.html  |20 +-
 .../wal/AsyncProtobufLogWriter.html |16 +-
 .../wal/CompressionContext.DictionaryIndex.html |   382 +
 .../regionserver/wal/CompressionContext.html|   118 +-
 .../hbase/regionserver/wal/Compressor.html  | 4 +-
 .../hbase/regionserver/wal/FSWALEntry.html  | 2 +-
 .../regionserver/wal/ProtobufLogReader.html |10 +-
 .../regionserver/wal/ProtobufLogWriter.html |14 +-
 .../hbase/regionserver/wal/ReaderBase.html  |16 +-
 .../regionserver/wal/SecureWALCellCodec.html| 6 +-
 .../wal/WALCellCodec.BaosAndCompressor.html |68 +-
 .../wal/WALCellCodec.ByteStringCompressor.html  |14 +-
 

[44/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html 
b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
index fc94836..c6822d1 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
@@ -128,7 +128,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.LimitedPrivate(value="Tools")
-public class HMaster
+public class HMaster
 extends HRegionServer
 implements MasterServices
 HMaster is the "master server" for HBase. An HBase cluster 
has one active
@@ -406,7 +406,7 @@ implements regionNormalizerTracker
 
 
-(package private) RegionServerTracker
+private RegionServerTracker
 regionServerTracker
 
 
@@ -649,7 +649,7 @@ implements 
-(package private) SpaceQuotaSnapshotNotifier
+private SpaceQuotaSnapshotNotifier
 createQuotaSnapshotNotifier()
 
 
@@ -657,9 +657,10 @@ implements createRpcServices()
 
 
-(package private) ServerManager
+protected ServerManager
 createServerManager(MasterServicesmaster)
-Create a ServerManager 
instance.
+
+ Create a ServerManager 
instance.
 
 
 
@@ -1069,13 +1070,14 @@ implements 
-(package private) void
+protected void
 initClusterSchemaService()
 
 
-(package private) void
+protected void
 initializeZKBasedSystemTrackers()
-Initialize all ZK based system trackers.
+
+ Initialize all ZK based system trackers.
 
 
 
@@ -1083,7 +1085,7 @@ implements initMobCleaner()
 
 
-(package private) void
+private void
 initQuotaManager()
 
 
@@ -1493,7 +1495,7 @@ implements 
 
 LOG
-private staticorg.slf4j.Logger LOG
+private staticorg.slf4j.Logger LOG
 
 
 
@@ -1502,7 +1504,7 @@ implements 
 
 MASTER
-public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String MASTER
+public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String MASTER
 
 See Also:
 Constant
 Field Values
@@ -1515,7 +1517,7 @@ implements 
 
 activeMasterManager
-private finalActiveMasterManager activeMasterManager
+private finalActiveMasterManager activeMasterManager
 
 
 
@@ -1524,7 +1526,7 @@ implements 
 
 regionServerTracker
-RegionServerTracker regionServerTracker
+privateRegionServerTracker regionServerTracker
 
 
 
@@ -1533,7 +1535,7 @@ implements 
 
 drainingServerTracker
-privateDrainingServerTracker drainingServerTracker
+privateDrainingServerTracker drainingServerTracker
 
 
 
@@ -1542,7 +1544,7 @@ implements 
 
 loadBalancerTracker
-LoadBalancerTracker loadBalancerTracker
+LoadBalancerTracker loadBalancerTracker
 
 
 
@@ -1551,7 +1553,7 @@ implements 
 
 metaLocationSyncer
-MetaLocationSyncer metaLocationSyncer
+MetaLocationSyncer metaLocationSyncer
 
 
 
@@ -1560,7 +1562,7 @@ implements 
 
 masterAddressSyncer
-MasterAddressSyncer masterAddressSyncer
+MasterAddressSyncer masterAddressSyncer
 
 
 
@@ -1569,7 +1571,7 @@ implements 
 
 splitOrMergeTracker
-privateSplitOrMergeTracker splitOrMergeTracker
+privateSplitOrMergeTracker splitOrMergeTracker
 
 
 
@@ -1578,7 +1580,7 @@ implements 
 
 regionNormalizerTracker
-privateRegionNormalizerTracker 
regionNormalizerTracker
+privateRegionNormalizerTracker 
regionNormalizerTracker
 
 
 
@@ -1587,7 +1589,7 @@ implements 
 
 maintenanceModeTracker
-privateMasterMaintenanceModeTracker maintenanceModeTracker
+privateMasterMaintenanceModeTracker maintenanceModeTracker
 
 
 
@@ -1596,7 +1598,7 @@ implements 
 
 clusterSchemaService
-privateClusterSchemaService clusterSchemaService
+privateClusterSchemaService clusterSchemaService
 
 
 
@@ -1605,7 +1607,7 @@ implements 
 
 HBASE_MASTER_WAIT_ON_SERVICE_IN_SECONDS
-public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String HBASE_MASTER_WAIT_ON_SERVICE_IN_SECONDS
+public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String HBASE_MASTER_WAIT_ON_SERVICE_IN_SECONDS
 
 See Also:
 Constant
 Field Values
@@ -1618,7 +1620,7 @@ implements 
 
 DEFAULT_HBASE_MASTER_WAIT_ON_SERVICE_IN_SECONDS
-public static finalint DEFAULT_HBASE_MASTER_WAIT_ON_SERVICE_IN_SECONDS
+public static finalint DEFAULT_HBASE_MASTER_WAIT_ON_SERVICE_IN_SECONDS
 
 See Also:
 Constant
 Field Values
@@ -1631,7 +1633,7 @@ implements 
 
 metricsMaster
-finalMetricsMaster metricsMaster
+finalMetricsMaster metricsMaster
 
 
 
@@ -1640,7 +1642,7 @@ implements 
 
 fileSystemManager
-privateMasterFileSystem fileSystemManager
+privateMasterFileSystem fileSystemManager
 
 
 
@@ -1649,7 +1651,7 @@ implements 
 
 walManager
-privateMasterWalManager walManager
+privateMasterWalManager walManager
 
 
 
@@ -1658,7 +1660,7 @@ implements 
 

[29/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HistoryCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HistoryCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HistoryCommand.html
index f236300..513d2ad 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HistoryCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HistoryCommand.html
@@ -26,1048 +26,1115 @@
 018
 019package 
org.apache.hadoop.hbase.backup.impl;
 020
-021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
-022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-039
-040import java.io.IOException;
-041import java.net.URI;
-042import java.util.List;
+021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BACKUP_LIST_DESC;
+022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP_DESC;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_LIST;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+039import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+040import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+041import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+042import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 043
-044import 
org.apache.commons.lang3.StringUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.conf.Configured;
-047import org.apache.hadoop.fs.FileSystem;
-048import org.apache.hadoop.fs.Path;
-049import 
org.apache.hadoop.hbase.HBaseConfiguration;
-050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.hadoop.hbase.backup.BackupAdmin;
-052import 
org.apache.hadoop.hbase.backup.BackupInfo;
-053import 

[33/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CreateCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CreateCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CreateCommand.html
index f236300..513d2ad 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CreateCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CreateCommand.html
@@ -26,1048 +26,1115 @@
 018
 019package 
org.apache.hadoop.hbase.backup.impl;
 020
-021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
-022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-039
-040import java.io.IOException;
-041import java.net.URI;
-042import java.util.List;
+021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BACKUP_LIST_DESC;
+022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP_DESC;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_LIST;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+039import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+040import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+041import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+042import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 043
-044import 
org.apache.commons.lang3.StringUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.conf.Configured;
-047import org.apache.hadoop.fs.FileSystem;
-048import org.apache.hadoop.fs.Path;
-049import 
org.apache.hadoop.hbase.HBaseConfiguration;
-050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.hadoop.hbase.backup.BackupAdmin;
-052import 
org.apache.hadoop.hbase.backup.BackupInfo;
-053import 

[42/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/org/apache/hadoop/hbase/master/ServerManager.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/ServerManager.html 
b/devapidocs/org/apache/hadoop/hbase/master/ServerManager.html
index ed8e9dd..2d0d7ff 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/ServerManager.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/ServerManager.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":9,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":9,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class ServerManager
+public class ServerManager
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 The ServerManager class manages info about region servers.
  
@@ -267,10 +267,6 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 Constructor.
 
 
-
-ServerManager(MasterServicesmaster,
- booleanconnect)
-
 
 
 
@@ -381,104 +377,108 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 getDrainingServersList()
 
 
+int
+getInfoPort(ServerNameserverName)
+
+
 org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds
 getLastFlushedSequenceId(byte[]encodedRegionName)
 
-
+
 ServerMetrics
 getLoad(ServerNameserverName)
 
-
+
 private int
 getMinToStart()
 Calculate min necessary to start.
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapServerName,ServerMetrics
 getOnlineServers()
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
 getOnlineServersList()
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
 getOnlineServersListWithPredicator(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerNamekeys,
   https://docs.oracle.com/javase/8/docs/api/java/util/function/Predicate.html?is-external=true;
 title="class or interface in java.util.function">PredicateServerMetricsidleServerPredicator)
 
-
+
 private https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 getRegionServersInZK(ZKWatcherzkw)
 
-
+
 org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface
 getRsAdmin(ServerNamesn)
 
-
+
 int
 getServerVersion(ServerNameserverName)
 May return 0 when server is not online.
 
 
-
+
 private https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 getStrForMax(intmax)
 
-
+
 (package private) boolean
 isClusterShutdown()
 
-
+
 boolean
 isRegionInServerManagerStates(RegionInfohri)
 
-
+
 boolean
 isServerDead(ServerNameserverName)
 Check if a server is known to be dead.
 
 
-
+
 boolean
 isServerOnline(ServerNameserverName)
 
-
+
 (package private) void
 letRegionServersShutdown()
 
-
+
 void
 moveFromOnlineToDeadServers(ServerNamesn)
 
-
+
 private HBaseRpcController
 newRpcController()
 
-
+
 void
 processDeadServer(ServerNameserverName,
  booleanshouldSplitWal)
 
-
+
 (package private) void
 processQueuedDeadServers()
 Process the servers which died during master's 
initialization.
 
 
-
+
 (package private) void
 recordNewServerWithLock(ServerNameserverName,
ServerMetricssl)
 Adds the onlineServers list.
 
 
-
+
 

[47/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html
 
b/devapidocs/org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html
index 3347de0..0b774f2 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static enum BackupRestoreConstants.BackupCommand
+public static enum BackupRestoreConstants.BackupCommand
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumBackupRestoreConstants.BackupCommand
 
 
@@ -261,7 +261,7 @@ the order they are declared.
 
 
 CREATE
-public static finalBackupRestoreConstants.BackupCommand CREATE
+public static finalBackupRestoreConstants.BackupCommand CREATE
 
 
 
@@ -270,7 +270,7 @@ the order they are declared.
 
 
 CANCEL
-public static finalBackupRestoreConstants.BackupCommand CANCEL
+public static finalBackupRestoreConstants.BackupCommand CANCEL
 
 
 
@@ -279,7 +279,7 @@ the order they are declared.
 
 
 DELETE
-public static finalBackupRestoreConstants.BackupCommand DELETE
+public static finalBackupRestoreConstants.BackupCommand DELETE
 
 
 
@@ -288,7 +288,7 @@ the order they are declared.
 
 
 DESCRIBE
-public static finalBackupRestoreConstants.BackupCommand DESCRIBE
+public static finalBackupRestoreConstants.BackupCommand DESCRIBE
 
 
 
@@ -297,7 +297,7 @@ the order they are declared.
 
 
 HISTORY
-public static finalBackupRestoreConstants.BackupCommand HISTORY
+public static finalBackupRestoreConstants.BackupCommand HISTORY
 
 
 
@@ -306,7 +306,7 @@ the order they are declared.
 
 
 STATUS
-public static finalBackupRestoreConstants.BackupCommand STATUS
+public static finalBackupRestoreConstants.BackupCommand STATUS
 
 
 
@@ -315,7 +315,7 @@ the order they are declared.
 
 
 CONVERT
-public static finalBackupRestoreConstants.BackupCommand CONVERT
+public static finalBackupRestoreConstants.BackupCommand CONVERT
 
 
 
@@ -324,7 +324,7 @@ the order they are declared.
 
 
 MERGE
-public static finalBackupRestoreConstants.BackupCommand MERGE
+public static finalBackupRestoreConstants.BackupCommand MERGE
 
 
 
@@ -333,7 +333,7 @@ the order they are declared.
 
 
 STOP
-public static finalBackupRestoreConstants.BackupCommand STOP
+public static finalBackupRestoreConstants.BackupCommand STOP
 
 
 
@@ -342,7 +342,7 @@ the order they are declared.
 
 
 SHOW
-public static finalBackupRestoreConstants.BackupCommand SHOW
+public static finalBackupRestoreConstants.BackupCommand SHOW
 
 
 
@@ -351,7 +351,7 @@ the order they are declared.
 
 
 HELP
-public static finalBackupRestoreConstants.BackupCommand HELP
+public static finalBackupRestoreConstants.BackupCommand HELP
 
 
 
@@ -360,7 +360,7 @@ the order they are declared.
 
 
 PROGRESS
-public static finalBackupRestoreConstants.BackupCommand PROGRESS
+public static finalBackupRestoreConstants.BackupCommand PROGRESS
 
 
 
@@ -369,7 +369,7 @@ the order they are declared.
 
 
 SET
-public static finalBackupRestoreConstants.BackupCommand SET
+public static finalBackupRestoreConstants.BackupCommand SET
 
 
 
@@ -378,7 +378,7 @@ the order they are declared.
 
 
 SET_ADD
-public static finalBackupRestoreConstants.BackupCommand SET_ADD
+public static finalBackupRestoreConstants.BackupCommand SET_ADD
 
 
 
@@ -387,7 +387,7 @@ the order they are declared.
 
 
 SET_REMOVE
-public static finalBackupRestoreConstants.BackupCommand SET_REMOVE
+public static finalBackupRestoreConstants.BackupCommand SET_REMOVE
 
 
 
@@ -396,7 +396,7 @@ the order they are declared.
 
 
 SET_DELETE
-public static finalBackupRestoreConstants.BackupCommand SET_DELETE
+public static finalBackupRestoreConstants.BackupCommand SET_DELETE
 
 
 
@@ -405,7 +405,7 @@ the order they are declared.
 
 
 SET_DESCRIBE
-public static finalBackupRestoreConstants.BackupCommand SET_DESCRIBE
+public static finalBackupRestoreConstants.BackupCommand SET_DESCRIBE
 
 
 
@@ -414,7 +414,7 @@ the order they are declared.
 
 
 SET_LIST
-public static finalBackupRestoreConstants.BackupCommand SET_LIST
+public static finalBackupRestoreConstants.BackupCommand SET_LIST
 
 
 
@@ -423,7 +423,7 @@ the order they are declared.
 
 
 REPAIR
-public static finalBackupRestoreConstants.BackupCommand REPAIR
+public static finalBackupRestoreConstants.BackupCommand REPAIR
 
 
 
@@ -440,7 +440,7 @@ the order they are declared.
 
 
 values
-public staticBackupRestoreConstants.BackupCommand[]values()
+public staticBackupRestoreConstants.BackupCommand[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  

[38/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/CompressionContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/CompressionContext.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/CompressionContext.html
index 542438d..f837340 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/CompressionContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/CompressionContext.html
@@ -86,10 +86,6 @@
 org.apache.hadoop.hbase.regionserver.wal
 
 
-
-org.apache.hadoop.hbase.wal
-
-
 
 
 
@@ -119,6 +115,14 @@
 WALCellCodec.CompressedKvDecoder.compression
 
 
+(package private) CompressionContext
+WALCellCodec.StatelessUncompressor.compressionContext
+
+
+private CompressionContext
+WALCellCodec.BaosAndCompressor.compressionContext
+
+
 protected CompressionContext
 ReaderBase.compressionContext
 Compression context to use reading.
@@ -174,17 +178,23 @@
 
 
 
+BaosAndCompressor(CompressionContextcompressionContext)
+
+
 CompressedKvDecoder(https://docs.oracle.com/javase/8/docs/api/java/io/InputStream.html?is-external=true;
 title="class or interface in java.io">InputStreamin,
CompressionContextcompression)
 
-
+
 CompressedKvEncoder(https://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true;
 title="class or interface in java.io">OutputStreamout,
CompressionContextcompression)
 
-
+
 SecureWALCellCodec(org.apache.hadoop.conf.Configurationconf,
   CompressionContextcompression)
 
+
+StatelessUncompressor(CompressionContextcompressionContext)
+
 
 WALCellCodec(org.apache.hadoop.conf.Configurationconf,
 CompressionContextcompression)
@@ -195,43 +205,6 @@
 
 
 
-
-
-
-Uses of CompressionContext in org.apache.hadoop.hbase.wal
-
-Fields in org.apache.hadoop.hbase.wal
 declared as CompressionContext
-
-Modifier and Type
-Field and Description
-
-
-
-private CompressionContext
-WALKeyImpl.compressionContext
-
-
-
-
-Methods in org.apache.hadoop.hbase.wal
 with parameters of type CompressionContext
-
-Modifier and Type
-Method and Description
-
-
-
-void
-WALKeyImpl.setCompressionContext(CompressionContextcompressionContext)
-
-
-void
-WAL.Entry.setCompressionContext(CompressionContextcompressionContext)
-Set compression context for this entry.
-
-
-
-
-
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALCellCodec.ByteStringCompressor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALCellCodec.ByteStringCompressor.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALCellCodec.ByteStringCompressor.html
index fbd2cf4..d48d069 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALCellCodec.ByteStringCompressor.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALCellCodec.ByteStringCompressor.html
@@ -110,6 +110,10 @@
 (package private) static class
 WALCellCodec.BaosAndCompressor
 
+
+(package private) static class
+WALCellCodec.NoneCompressor
+
 
 
 
@@ -136,6 +140,10 @@
 WALCellCodec.ByteStringCompressor
 WALCellCodec.getByteStringCompressor()
 
+
+static WALCellCodec.ByteStringCompressor
+WALCellCodec.getNoneCompressor()
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALCellCodec.ByteStringUncompressor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALCellCodec.ByteStringUncompressor.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALCellCodec.ByteStringUncompressor.html
index a8c113e..6534731 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALCellCodec.ByteStringUncompressor.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/class-use/WALCellCodec.ByteStringUncompressor.html
@@ -99,6 +99,23 @@
 
 
 Uses of WALCellCodec.ByteStringUncompressor
 in org.apache.hadoop.hbase.regionserver.wal
+
+Classes in org.apache.hadoop.hbase.regionserver.wal
 that implement WALCellCodec.ByteStringUncompressor
+
+Modifier and Type
+Class and Description
+
+
+
+(package private) static class
+WALCellCodec.NoneUncompressor
+
+
+(package private) static class
+WALCellCodec.StatelessUncompressor
+
+
+
 
 Fields in org.apache.hadoop.hbase.regionserver.wal
 declared as WALCellCodec.ByteStringUncompressor
 
@@ -110,10 +127,6 @@
 protected WALCellCodec.ByteStringUncompressor
 ProtobufLogReader.byteStringUncompressor
 
-
-protected WALCellCodec.ByteStringUncompressor
-WALCellCodec.statelessUncompressor
-
 
 

[40/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/org/apache/hadoop/hbase/regionserver/wal/CompressionContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/CompressionContext.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/CompressionContext.html
index 6cf7da5..25c20c3 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/CompressionContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/CompressionContext.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = {"i0":10};
+var methods = {"i0":10,"i1":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames
@@ -74,7 +74,7 @@ var activeTableTab = "activeTableTab";
 
 
 Summary:
-Nested|
+Nested|
 Field|
 Constr|
 Method
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.LimitedPrivate(value={"Coprocesssor","Phoenix"})
-public class CompressionContext
+public class CompressionContext
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Context that holds the various dictionaries for compression 
in WAL.
 
@@ -119,6 +119,25 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 
+
+
+
+
+
+Nested Class Summary
+
+Nested Classes
+
+Modifier and Type
+Class and Description
+
+
+static class
+CompressionContext.DictionaryIndex
+
+
+
+
 
 
 
@@ -132,28 +151,12 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 Field and Description
 
 
-(package private) static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-ENABLE_WAL_TAGS_COMPRESSION
+private https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapCompressionContext.DictionaryIndex,Dictionary
+dictionaries
 
 
-Dictionary
-familyDict
-
-
-(package private) Dictionary
-qualifierDict
-
-
-Dictionary
-regionDict
-
-
-(package private) Dictionary
-rowDict
-
-
-Dictionary
-tableDict
+(package private) static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+ENABLE_WAL_TAGS_COMPRESSION
 
 
 (package private) TagCompressionContext
@@ -197,6 +200,10 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 (package private) void
 clear()
 
+
+Dictionary
+getDictionary(https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in 
java.lang">EnumdictIndex)
+
 
 
 
@@ -225,56 +232,20 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 ENABLE_WAL_TAGS_COMPRESSION
-static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String ENABLE_WAL_TAGS_COMPRESSION
+static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String ENABLE_WAL_TAGS_COMPRESSION
 
 See Also:
 Constant
 Field Values
 
 
 
-
-
-
-
-
-regionDict
-public finalDictionary regionDict
-
-
-
+
 
 
 
 
-tableDict
-public finalDictionary tableDict
-
-
-
-
-
-
-
-familyDict
-public finalDictionary familyDict
-
-
-
-
-
-
-
-qualifierDict
-finalDictionary qualifierDict
-
-
-
-
-
-
-
-rowDict
-finalDictionary rowDict
+dictionaries
+private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapCompressionContext.DictionaryIndex,Dictionary dictionaries
 
 
 
@@ -283,7 +254,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 tagCompressionContext
-TagCompressionContext tagCompressionContext
+TagCompressionContext tagCompressionContext
 
 
 
@@ -300,7 +271,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 CompressionContext
-publicCompressionContext(https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends DictionarydictType,
+publicCompressionContext(https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends DictionarydictType,
   booleanrecoveredEdits,
   booleanhasTagCompression)
throws https://docs.oracle.com/javase/8/docs/api/java/lang/SecurityException.html?is-external=true;
 title="class or interface in java.lang">SecurityException,
@@ -326,13 +297,22 @@ extends 

[45/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
index a7ea47e..63c7ead 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
@@ -552,24 +552,24 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.client.AbstractResponse.ResponseType
-org.apache.hadoop.hbase.client.AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState
-org.apache.hadoop.hbase.client.AsyncRequestFutureImpl.Retry
-org.apache.hadoop.hbase.client.Scan.ReadType
-org.apache.hadoop.hbase.client.SnapshotType
 org.apache.hadoop.hbase.client.RegionLocateType
-org.apache.hadoop.hbase.client.Durability
-org.apache.hadoop.hbase.client.CompactType
-org.apache.hadoop.hbase.client.ScannerCallable.MoreResults
-org.apache.hadoop.hbase.client.TableState.State
 org.apache.hadoop.hbase.client.AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState
+org.apache.hadoop.hbase.client.Scan.ReadType
+org.apache.hadoop.hbase.client.MasterSwitchType
+org.apache.hadoop.hbase.client.ScannerCallable.MoreResults
+org.apache.hadoop.hbase.client.AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState
 org.apache.hadoop.hbase.client.MobCompactPartitionPolicy
-org.apache.hadoop.hbase.client.AsyncProcessTask.SubmittedRows
-org.apache.hadoop.hbase.client.RequestController.ReturnCode
-org.apache.hadoop.hbase.client.CompactionState
+org.apache.hadoop.hbase.client.AbstractResponse.ResponseType
 org.apache.hadoop.hbase.client.Consistency
+org.apache.hadoop.hbase.client.SnapshotType
+org.apache.hadoop.hbase.client.RequestController.ReturnCode
 org.apache.hadoop.hbase.client.IsolationLevel
-org.apache.hadoop.hbase.client.MasterSwitchType
+org.apache.hadoop.hbase.client.CompactionState
+org.apache.hadoop.hbase.client.CompactType
+org.apache.hadoop.hbase.client.Durability
+org.apache.hadoop.hbase.client.AsyncRequestFutureImpl.Retry
+org.apache.hadoop.hbase.client.TableState.State
+org.apache.hadoop.hbase.client.AsyncProcessTask.SubmittedRows
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
index 3027730..27df980 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
@@ -183,14 +183,14 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
+org.apache.hadoop.hbase.filter.FilterList.Operator
+org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
+org.apache.hadoop.hbase.filter.FilterWrapper.FilterRowRetCode
 org.apache.hadoop.hbase.filter.FuzzyRowFilter.Order
-org.apache.hadoop.hbase.filter.Filter.ReturnCode
 org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
-org.apache.hadoop.hbase.filter.FilterList.Operator
 org.apache.hadoop.hbase.filter.FuzzyRowFilter.SatisfiesCode
-org.apache.hadoop.hbase.filter.FilterWrapper.FilterRowRetCode
 org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
-org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
+org.apache.hadoop.hbase.filter.Filter.ReturnCode
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
index 7a15c5d..16dff46 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
@@ -273,12 +273,12 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface 

[17/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/AbstractProtobufLogWriter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/AbstractProtobufLogWriter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/AbstractProtobufLogWriter.html
index 5dbbaf4..c771708 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/AbstractProtobufLogWriter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/AbstractProtobufLogWriter.html
@@ -191,71 +191,74 @@
 183this.cellEncoder = 
codec.getEncoder(getOutputStreamForCellEncoder());
 184if (doCompress) {
 185  this.compressor = 
codec.getByteStringCompressor();
-186}
-187  }
-188
-189  protected void initAfterHeader(boolean 
doCompress) throws IOException {
-190initAfterHeader0(doCompress);
-191  }
-192
-193  // should be called in sub classes's 
initAfterHeader method to init SecureWALCellCodec.
-194  protected final void 
secureInitAfterHeader(boolean doCompress, Encryptor encryptor)
-195  throws IOException {
-196if 
(conf.getBoolean(HConstants.ENABLE_WAL_ENCRYPTION, false)  encryptor 
!= null) {
-197  WALCellCodec codec = 
SecureWALCellCodec.getCodec(this.conf, encryptor);
-198  this.cellEncoder = 
codec.getEncoder(getOutputStreamForCellEncoder());
-199  // We do not support compression
-200  this.compressionContext = null;
-201} else {
-202  initAfterHeader0(doCompress);
-203}
-204  }
-205
-206  void setWALTrailer(WALTrailer 
walTrailer) {
-207this.trailer = walTrailer;
-208  }
-209
-210  public long getLength() {
-211return length.get();
-212  }
-213
-214  private WALTrailer 
buildWALTrailer(WALTrailer.Builder builder) {
-215return builder.build();
-216  }
-217
-218  protected void writeWALTrailer() {
-219try {
-220  int trailerSize = 0;
-221  if (this.trailer == null) {
-222// use default trailer.
-223LOG.warn("WALTrailer is null. 
Continuing with default.");
-224this.trailer = 
buildWALTrailer(WALTrailer.newBuilder());
-225trailerSize = 
this.trailer.getSerializedSize();
-226  } else if ((trailerSize = 
this.trailer.getSerializedSize())  this.trailerWarnSize) {
-227// continue writing after warning 
the user.
-228LOG.warn("Please investigate 
WALTrailer usage. Trailer size  maximum size : " + trailerSize
-229+ "  " + 
this.trailerWarnSize);
-230  }
-231  
length.set(writeWALTrailerAndMagic(trailer, 
ProtobufLogReader.PB_WAL_COMPLETE_MAGIC));
-232  this.trailerWritten = true;
-233} catch (IOException ioe) {
-234  LOG.warn("Failed to write trailer, 
non-fatal, continuing...", ioe);
-235}
-236  }
-237
-238  protected abstract void 
initOutput(FileSystem fs, Path path, boolean overwritable, int bufferSize,
-239  short replication, long blockSize) 
throws IOException, StreamLacksCapabilityException;
+186} else {
+187  this.compressor = 
WALCellCodec.getNoneCompressor();
+188}
+189  }
+190
+191  protected void initAfterHeader(boolean 
doCompress) throws IOException {
+192initAfterHeader0(doCompress);
+193  }
+194
+195  // should be called in sub classes's 
initAfterHeader method to init SecureWALCellCodec.
+196  protected final void 
secureInitAfterHeader(boolean doCompress, Encryptor encryptor)
+197  throws IOException {
+198if 
(conf.getBoolean(HConstants.ENABLE_WAL_ENCRYPTION, false)  encryptor 
!= null) {
+199  WALCellCodec codec = 
SecureWALCellCodec.getCodec(this.conf, encryptor);
+200  this.cellEncoder = 
codec.getEncoder(getOutputStreamForCellEncoder());
+201  // We do not support compression
+202  this.compressionContext = null;
+203  this.compressor = 
WALCellCodec.getNoneCompressor();
+204} else {
+205  initAfterHeader0(doCompress);
+206}
+207  }
+208
+209  void setWALTrailer(WALTrailer 
walTrailer) {
+210this.trailer = walTrailer;
+211  }
+212
+213  public long getLength() {
+214return length.get();
+215  }
+216
+217  private WALTrailer 
buildWALTrailer(WALTrailer.Builder builder) {
+218return builder.build();
+219  }
+220
+221  protected void writeWALTrailer() {
+222try {
+223  int trailerSize = 0;
+224  if (this.trailer == null) {
+225// use default trailer.
+226LOG.warn("WALTrailer is null. 
Continuing with default.");
+227this.trailer = 
buildWALTrailer(WALTrailer.newBuilder());
+228trailerSize = 
this.trailer.getSerializedSize();
+229  } else if ((trailerSize = 
this.trailer.getSerializedSize())  this.trailerWarnSize) {
+230// continue writing after warning 
the user.
+231LOG.warn("Please investigate 
WALTrailer usage. Trailer size  maximum size : " + trailerSize
+232+ "  " + 
this.trailerWarnSize);
+233  }
+234 

[08/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.NoneCompressor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.NoneCompressor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.NoneCompressor.html
new file mode 100644
index 000..9df0225
--- /dev/null
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.NoneCompressor.html
@@ -0,0 +1,476 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/*
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License.  You may obtain a 
copy of the License at
+009 *
+010 * 
http://www.apache.org/licenses/LICENSE-2.0
+011 *
+012 * Unless required by applicable law or 
agreed to in writing, software
+013 * distributed under the License is 
distributed on an "AS IS" BASIS,
+014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+015 * See the License for the specific 
language governing permissions and
+016 * limitations under the License.
+017 */
+018package 
org.apache.hadoop.hbase.regionserver.wal;
+019
+020import java.io.ByteArrayOutputStream;
+021import java.io.IOException;
+022import java.io.InputStream;
+023import java.io.OutputStream;
+024
+025import 
org.apache.hadoop.conf.Configuration;
+026import org.apache.hadoop.hbase.Cell;
+027import 
org.apache.hadoop.hbase.CellUtil;
+028import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
+029import 
org.apache.hadoop.hbase.PrivateCellUtil;
+030import 
org.apache.hadoop.hbase.KeyValue;
+031import 
org.apache.hadoop.hbase.KeyValueUtil;
+032import 
org.apache.yetus.audience.InterfaceAudience;
+033import 
org.apache.hadoop.hbase.codec.BaseDecoder;
+034import 
org.apache.hadoop.hbase.codec.BaseEncoder;
+035import 
org.apache.hadoop.hbase.codec.Codec;
+036import 
org.apache.hadoop.hbase.codec.KeyValueCodecWithTags;
+037import 
org.apache.hadoop.hbase.io.ByteBuffInputStream;
+038import 
org.apache.hadoop.hbase.io.ByteBufferWriter;
+039import 
org.apache.hadoop.hbase.io.ByteBufferWriterOutputStream;
+040import 
org.apache.hadoop.hbase.io.util.Dictionary;
+041import 
org.apache.hadoop.hbase.io.util.StreamUtils;
+042import 
org.apache.hadoop.hbase.nio.ByteBuff;
+043import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+044import 
org.apache.hadoop.hbase.util.Bytes;
+045import 
org.apache.hadoop.hbase.util.ReflectionUtils;
+046import org.apache.hadoop.io.IOUtils;
+047
+048import 
org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
+049import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
+050
+051
+052/**
+053 * Compression in this class is lifted 
off Compressor/KeyValueCompression.
+054 * This is a pure coincidence... they are 
independent and don't have to be compatible.
+055 *
+056 * This codec is used at server side for 
writing cells to WAL as well as for sending edits
+057 * as part of the distributed splitting 
process.
+058 */
+059@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
+060  HBaseInterfaceAudience.PHOENIX, 
HBaseInterfaceAudience.CONFIG})
+061public class WALCellCodec implements 
Codec {
+062  /** Configuration key for the class to 
use when encoding cells in the WAL */
+063  public static final String 
WAL_CELL_CODEC_CLASS_KEY = "hbase.regionserver.wal.codec";
+064
+065  protected final CompressionContext 
compression;
+066
+067  /**
+068   * bAll subclasses must 
implement a no argument constructor/b
+069   */
+070  public WALCellCodec() {
+071this.compression = null;
+072  }
+073
+074  /**
+075   * Default constructor - ball 
subclasses must implement a constructor with this signature /b
+076   * if they are to be dynamically loaded 
from the {@link Configuration}.
+077   * @param conf configuration to 
configure ttthis/tt
+078   * @param compression compression the 
codec should support, can be ttnull/tt to indicate no
+079   *  compression
+080   */
+081  public WALCellCodec(Configuration conf, 
CompressionContext compression) {
+082this.compression = compression;
+083  }
+084
+085  public static String 
getWALCellCodecClass(Configuration conf) {
+086return 
conf.get(WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());
+087  }
+088
+089  /**
+090   * Create and setup a {@link 
WALCellCodec} from the {@code cellCodecClsName} and
+091   * CompressionContext, if {@code 
cellCodecClsName} is specified.
+092   * Otherwise Cell Codec classname is 
read from {@link Configuration}.
+093 

[23/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
index 42d0637..eb16038 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
@@ -80,21 +80,21 @@
 072import 
org.apache.hadoop.hbase.PleaseHoldException;
 073import 
org.apache.hadoop.hbase.ReplicationPeerNotFoundException;
 074import 
org.apache.hadoop.hbase.ScheduledChore;
-075import 
org.apache.hadoop.hbase.ServerMetricsBuilder;
-076import 
org.apache.hadoop.hbase.ServerName;
-077import 
org.apache.hadoop.hbase.TableDescriptors;
-078import 
org.apache.hadoop.hbase.TableName;
-079import 
org.apache.hadoop.hbase.TableNotDisabledException;
-080import 
org.apache.hadoop.hbase.TableNotFoundException;
-081import 
org.apache.hadoop.hbase.UnknownRegionException;
-082import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-083import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-084import 
org.apache.hadoop.hbase.client.MasterSwitchType;
-085import 
org.apache.hadoop.hbase.client.RegionInfo;
-086import 
org.apache.hadoop.hbase.client.Result;
-087import 
org.apache.hadoop.hbase.client.TableDescriptor;
-088import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-089import 
org.apache.hadoop.hbase.client.TableState;
+075import 
org.apache.hadoop.hbase.ServerName;
+076import 
org.apache.hadoop.hbase.TableDescriptors;
+077import 
org.apache.hadoop.hbase.TableName;
+078import 
org.apache.hadoop.hbase.TableNotDisabledException;
+079import 
org.apache.hadoop.hbase.TableNotFoundException;
+080import 
org.apache.hadoop.hbase.UnknownRegionException;
+081import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+082import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+083import 
org.apache.hadoop.hbase.client.MasterSwitchType;
+084import 
org.apache.hadoop.hbase.client.RegionInfo;
+085import 
org.apache.hadoop.hbase.client.Result;
+086import 
org.apache.hadoop.hbase.client.TableDescriptor;
+087import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+088import 
org.apache.hadoop.hbase.client.TableState;
+089import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
 090import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 091import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
 092import 
org.apache.hadoop.hbase.exceptions.MergeRegionException;
@@ -220,3477 +220,3481 @@
 212
 213import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 214import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState;
-215import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo;
-216import 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas;
-217import 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy;
-218import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
-219import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
-220
-221/**
-222 * HMaster is the "master server" for 
HBase. An HBase cluster has one active
-223 * master.  If many masters are started, 
all compete.  Whichever wins goes on to
-224 * run the cluster.  All others park 
themselves in their constructor until
-225 * master or cluster shutdown or until 
the active master loses its lease in
-226 * zookeeper.  Thereafter, all running 
master jostle to take over master role.
-227 *
-228 * pThe Master can be asked 
shutdown the cluster. See {@link #shutdown()}.  In
-229 * this case it will tell all 
regionservers to go down and then wait on them
-230 * all reporting in that they are down.  
This master will then shut itself down.
-231 *
-232 * pYou can also shutdown just 
this master.  Call {@link #stopMaster()}.
-233 *
-234 * @see org.apache.zookeeper.Watcher
-235 */
-236@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
-237@SuppressWarnings("deprecation")
-238public class HMaster extends 
HRegionServer implements MasterServices {
-239  private static Logger LOG = 
LoggerFactory.getLogger(HMaster.class.getName());
-240
-241  /**
-242   * Protection against zombie master. 
Started once Master accepts active responsibility and
-243   * starts taking over responsibilities. 
Allows a finite time window before giving up ownership.
-244   */
-245  private static class 
InitializationMonitor extends HasThread {
-246/** The amount of time in 
milliseconds to sleep before checking initialization status. */
-247public static final String 
TIMEOUT_KEY = 

[34/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
index f236300..513d2ad 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
@@ -26,1048 +26,1115 @@
 018
 019package 
org.apache.hadoop.hbase.backup.impl;
 020
-021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
-022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-039
-040import java.io.IOException;
-041import java.net.URI;
-042import java.util.List;
+021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BACKUP_LIST_DESC;
+022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP_DESC;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_LIST;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+039import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+040import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+041import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+042import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 043
-044import 
org.apache.commons.lang3.StringUtils;
-045import 
org.apache.hadoop.conf.Configuration;
-046import 
org.apache.hadoop.conf.Configured;
-047import org.apache.hadoop.fs.FileSystem;
-048import org.apache.hadoop.fs.Path;
-049import 
org.apache.hadoop.hbase.HBaseConfiguration;
-050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.hadoop.hbase.backup.BackupAdmin;
-052import 
org.apache.hadoop.hbase.backup.BackupInfo;
-053import 

[13/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.ByteStringCompressor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.ByteStringCompressor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.ByteStringCompressor.html
index 83c17c0..9df0225 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.ByteStringCompressor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.ByteStringCompressor.html
@@ -54,323 +54,362 @@
 046import org.apache.hadoop.io.IOUtils;
 047
 048import 
org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
-049
+049import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
 050
-051/**
-052 * Compression in this class is lifted 
off Compressor/KeyValueCompression.
-053 * This is a pure coincidence... they are 
independent and don't have to be compatible.
-054 *
-055 * This codec is used at server side for 
writing cells to WAL as well as for sending edits
-056 * as part of the distributed splitting 
process.
-057 */
-058@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
-059  HBaseInterfaceAudience.PHOENIX, 
HBaseInterfaceAudience.CONFIG})
-060public class WALCellCodec implements 
Codec {
-061  /** Configuration key for the class to 
use when encoding cells in the WAL */
-062  public static final String 
WAL_CELL_CODEC_CLASS_KEY = "hbase.regionserver.wal.codec";
-063
-064  protected final CompressionContext 
compression;
-065  protected final ByteStringUncompressor 
statelessUncompressor = new ByteStringUncompressor() {
-066@Override
-067public byte[] uncompress(ByteString 
data, Dictionary dict) throws IOException {
-068  return 
WALCellCodec.uncompressByteString(data, dict);
-069}
-070  };
-071
-072  /**
-073   * bAll subclasses must 
implement a no argument constructor/b
-074   */
-075  public WALCellCodec() {
-076this.compression = null;
-077  }
-078
-079  /**
-080   * Default constructor - ball 
subclasses must implement a constructor with this signature /b
-081   * if they are to be dynamically loaded 
from the {@link Configuration}.
-082   * @param conf configuration to 
configure ttthis/tt
-083   * @param compression compression the 
codec should support, can be ttnull/tt to indicate no
-084   *  compression
-085   */
-086  public WALCellCodec(Configuration conf, 
CompressionContext compression) {
-087this.compression = compression;
-088  }
-089
-090  public static String 
getWALCellCodecClass(Configuration conf) {
-091return 
conf.get(WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());
-092  }
-093
-094  /**
-095   * Create and setup a {@link 
WALCellCodec} from the {@code cellCodecClsName} and
-096   * CompressionContext, if {@code 
cellCodecClsName} is specified.
-097   * Otherwise Cell Codec classname is 
read from {@link Configuration}.
-098   * Fully prepares the codec for use.
-099   * @param conf {@link Configuration} to 
read for the user-specified codec. If none is specified,
-100   *  uses a {@link 
WALCellCodec}.
-101   * @param cellCodecClsName name of 
codec
-102   * @param compression compression the 
codec should use
-103   * @return a {@link WALCellCodec} ready 
for use.
-104   * @throws 
UnsupportedOperationException if the codec cannot be instantiated
-105   */
-106
-107  public static WALCellCodec 
create(Configuration conf, String cellCodecClsName,
-108  CompressionContext compression) 
throws UnsupportedOperationException {
-109if (cellCodecClsName == null) {
-110  cellCodecClsName = 
getWALCellCodecClass(conf);
-111}
-112return 
ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
-113{ Configuration.class, 
CompressionContext.class }, new Object[] { conf, compression });
-114  }
-115
-116  /**
-117   * Create and setup a {@link 
WALCellCodec} from the
-118   * CompressionContext.
-119   * Cell Codec classname is read from 
{@link Configuration}.
-120   * Fully prepares the codec for use.
-121   * @param conf {@link Configuration} to 
read for the user-specified codec. If none is specified,
-122   *  uses a {@link 
WALCellCodec}.
-123   * @param compression compression the 
codec should use
-124   * @return a {@link WALCellCodec} ready 
for use.
-125   * @throws 
UnsupportedOperationException if the codec cannot be instantiated
-126   */
-127  public static WALCellCodec 
create(Configuration conf,
-128  CompressionContext compression) 
throws UnsupportedOperationException {
-129String cellCodecClsName = 
getWALCellCodecClass(conf);
-130return 
ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
-131{ Configuration.class, 
CompressionContext.class }, new Object[] { conf, compression 

[18/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/master/ServerManager.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/ServerManager.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/ServerManager.html
index 51fcd6e..6b9e2a8 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/ServerManager.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/ServerManager.html
@@ -54,1029 +54,1026 @@
 046import 
org.apache.hadoop.hbase.ServerMetricsBuilder;
 047import 
org.apache.hadoop.hbase.ServerName;
 048import 
org.apache.hadoop.hbase.YouAreDeadException;
-049import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-050import 
org.apache.hadoop.hbase.client.ClusterConnection;
-051import 
org.apache.hadoop.hbase.client.RegionInfo;
-052import 
org.apache.hadoop.hbase.client.RetriesExhaustedException;
-053import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-054import 
org.apache.hadoop.hbase.ipc.RpcControllerFactory;
-055import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
-056import 
org.apache.hadoop.hbase.regionserver.HRegionServer;
-057import 
org.apache.hadoop.hbase.util.Bytes;
-058import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-059import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-060import 
org.apache.yetus.audience.InterfaceAudience;
-061import 
org.apache.zookeeper.KeeperException;
-062import org.slf4j.Logger;
-063import org.slf4j.LoggerFactory;
-064
-065import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-066import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
-067
-068import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest;
-073
-074/**
-075 * The ServerManager class manages info 
about region servers.
-076 * p
-077 * Maintains lists of online and dead 
servers.  Processes the startups,
-078 * shutdowns, and deaths of region 
servers.
-079 * p
-080 * Servers are distinguished in two 
different ways.  A given server has a
-081 * location, specified by hostname and 
port, and of which there can only be one
-082 * online at any given time.  A server 
instance is specified by the location
-083 * (hostname and port) as well as the 
startcode (timestamp from when the server
-084 * was started).  This is used to 
differentiate a restarted instance of a given
-085 * server from the original instance.
-086 * p
-087 * If a sever is known not to be running 
any more, it is called dead. The dead
-088 * server needs to be handled by a 
ServerShutdownHandler.  If the handler is not
-089 * enabled yet, the server can't be 
handled right away so it is queued up.
-090 * After the handler is enabled, the 
server will be submitted to a handler to handle.
-091 * However, the handler may be just 
partially enabled.  If so,
-092 * the server cannot be fully processed, 
and be queued up for further processing.
-093 * A server is fully processed only after 
the handler is fully enabled
-094 * and has completed the handling.
-095 */
-096@InterfaceAudience.Private
-097public class ServerManager {
-098  public static final String 
WAIT_ON_REGIONSERVERS_MAXTOSTART =
-099  
"hbase.master.wait.on.regionservers.maxtostart";
-100
-101  public static final String 
WAIT_ON_REGIONSERVERS_MINTOSTART =
-102  
"hbase.master.wait.on.regionservers.mintostart";
-103
-104  public static final String 
WAIT_ON_REGIONSERVERS_TIMEOUT =
-105  
"hbase.master.wait.on.regionservers.timeout";
-106
-107  public static final String 
WAIT_ON_REGIONSERVERS_INTERVAL =
-108  
"hbase.master.wait.on.regionservers.interval";
-109
-110  private static final Logger LOG = 
LoggerFactory.getLogger(ServerManager.class);
-111
-112  // Set if we are to shutdown the 
cluster.
-113  private AtomicBoolean clusterShutdown = 
new AtomicBoolean(false);
-114
-115  /**
-116   * The last flushed sequence id for a 
region.
-117   */
-118  private final 
ConcurrentNavigableMapbyte[], Long flushedSequenceIdByRegion =
-119new 
ConcurrentSkipListMap(Bytes.BYTES_COMPARATOR);
-120
-121  /**
-122   * The last flushed sequence id for a 
store in a region.
-123   */
-124  private final 
ConcurrentNavigableMapbyte[], ConcurrentNavigableMapbyte[], 
Long
-125storeFlushedSequenceIdsByRegion = new 
ConcurrentSkipListMap(Bytes.BYTES_COMPARATOR);
-126
-127  /** Map of registered servers to their 
current load */
-128  private final 
ConcurrentNavigableMapServerName, ServerMetrics onlineServers =
-129new 

[20/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html
index 42d0637..eb16038 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html
@@ -80,21 +80,21 @@
 072import 
org.apache.hadoop.hbase.PleaseHoldException;
 073import 
org.apache.hadoop.hbase.ReplicationPeerNotFoundException;
 074import 
org.apache.hadoop.hbase.ScheduledChore;
-075import 
org.apache.hadoop.hbase.ServerMetricsBuilder;
-076import 
org.apache.hadoop.hbase.ServerName;
-077import 
org.apache.hadoop.hbase.TableDescriptors;
-078import 
org.apache.hadoop.hbase.TableName;
-079import 
org.apache.hadoop.hbase.TableNotDisabledException;
-080import 
org.apache.hadoop.hbase.TableNotFoundException;
-081import 
org.apache.hadoop.hbase.UnknownRegionException;
-082import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-083import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-084import 
org.apache.hadoop.hbase.client.MasterSwitchType;
-085import 
org.apache.hadoop.hbase.client.RegionInfo;
-086import 
org.apache.hadoop.hbase.client.Result;
-087import 
org.apache.hadoop.hbase.client.TableDescriptor;
-088import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-089import 
org.apache.hadoop.hbase.client.TableState;
+075import 
org.apache.hadoop.hbase.ServerName;
+076import 
org.apache.hadoop.hbase.TableDescriptors;
+077import 
org.apache.hadoop.hbase.TableName;
+078import 
org.apache.hadoop.hbase.TableNotDisabledException;
+079import 
org.apache.hadoop.hbase.TableNotFoundException;
+080import 
org.apache.hadoop.hbase.UnknownRegionException;
+081import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+082import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+083import 
org.apache.hadoop.hbase.client.MasterSwitchType;
+084import 
org.apache.hadoop.hbase.client.RegionInfo;
+085import 
org.apache.hadoop.hbase.client.Result;
+086import 
org.apache.hadoop.hbase.client.TableDescriptor;
+087import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+088import 
org.apache.hadoop.hbase.client.TableState;
+089import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
 090import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 091import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
 092import 
org.apache.hadoop.hbase.exceptions.MergeRegionException;
@@ -220,3477 +220,3481 @@
 212
 213import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 214import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState;
-215import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo;
-216import 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas;
-217import 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy;
-218import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
-219import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
-220
-221/**
-222 * HMaster is the "master server" for 
HBase. An HBase cluster has one active
-223 * master.  If many masters are started, 
all compete.  Whichever wins goes on to
-224 * run the cluster.  All others park 
themselves in their constructor until
-225 * master or cluster shutdown or until 
the active master loses its lease in
-226 * zookeeper.  Thereafter, all running 
master jostle to take over master role.
-227 *
-228 * pThe Master can be asked 
shutdown the cluster. See {@link #shutdown()}.  In
-229 * this case it will tell all 
regionservers to go down and then wait on them
-230 * all reporting in that they are down.  
This master will then shut itself down.
-231 *
-232 * pYou can also shutdown just 
this master.  Call {@link #stopMaster()}.
-233 *
-234 * @see org.apache.zookeeper.Watcher
-235 */
-236@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
-237@SuppressWarnings("deprecation")
-238public class HMaster extends 
HRegionServer implements MasterServices {
-239  private static Logger LOG = 
LoggerFactory.getLogger(HMaster.class.getName());
-240
-241  /**
-242   * Protection against zombie master. 
Started once Master accepts active responsibility and
-243   * starts taking over responsibilities. 
Allows a finite time window before giving up ownership.
-244   */
-245  private static class 
InitializationMonitor extends HasThread {
-246/** The amount of time in 
milliseconds to sleep before checking initialization status. */
-247public static final String 
TIMEOUT_KEY = "hbase.master.initializationmonitor.timeout";
-248public static final long 
TIMEOUT_DEFAULT = 

[11/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.CompressedKvDecoder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.CompressedKvDecoder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.CompressedKvDecoder.html
index 83c17c0..9df0225 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.CompressedKvDecoder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.CompressedKvDecoder.html
@@ -54,323 +54,362 @@
 046import org.apache.hadoop.io.IOUtils;
 047
 048import 
org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
-049
+049import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
 050
-051/**
-052 * Compression in this class is lifted 
off Compressor/KeyValueCompression.
-053 * This is a pure coincidence... they are 
independent and don't have to be compatible.
-054 *
-055 * This codec is used at server side for 
writing cells to WAL as well as for sending edits
-056 * as part of the distributed splitting 
process.
-057 */
-058@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
-059  HBaseInterfaceAudience.PHOENIX, 
HBaseInterfaceAudience.CONFIG})
-060public class WALCellCodec implements 
Codec {
-061  /** Configuration key for the class to 
use when encoding cells in the WAL */
-062  public static final String 
WAL_CELL_CODEC_CLASS_KEY = "hbase.regionserver.wal.codec";
-063
-064  protected final CompressionContext 
compression;
-065  protected final ByteStringUncompressor 
statelessUncompressor = new ByteStringUncompressor() {
-066@Override
-067public byte[] uncompress(ByteString 
data, Dictionary dict) throws IOException {
-068  return 
WALCellCodec.uncompressByteString(data, dict);
-069}
-070  };
-071
-072  /**
-073   * bAll subclasses must 
implement a no argument constructor/b
-074   */
-075  public WALCellCodec() {
-076this.compression = null;
-077  }
-078
-079  /**
-080   * Default constructor - ball 
subclasses must implement a constructor with this signature /b
-081   * if they are to be dynamically loaded 
from the {@link Configuration}.
-082   * @param conf configuration to 
configure ttthis/tt
-083   * @param compression compression the 
codec should support, can be ttnull/tt to indicate no
-084   *  compression
-085   */
-086  public WALCellCodec(Configuration conf, 
CompressionContext compression) {
-087this.compression = compression;
-088  }
-089
-090  public static String 
getWALCellCodecClass(Configuration conf) {
-091return 
conf.get(WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());
-092  }
-093
-094  /**
-095   * Create and setup a {@link 
WALCellCodec} from the {@code cellCodecClsName} and
-096   * CompressionContext, if {@code 
cellCodecClsName} is specified.
-097   * Otherwise Cell Codec classname is 
read from {@link Configuration}.
-098   * Fully prepares the codec for use.
-099   * @param conf {@link Configuration} to 
read for the user-specified codec. If none is specified,
-100   *  uses a {@link 
WALCellCodec}.
-101   * @param cellCodecClsName name of 
codec
-102   * @param compression compression the 
codec should use
-103   * @return a {@link WALCellCodec} ready 
for use.
-104   * @throws 
UnsupportedOperationException if the codec cannot be instantiated
-105   */
-106
-107  public static WALCellCodec 
create(Configuration conf, String cellCodecClsName,
-108  CompressionContext compression) 
throws UnsupportedOperationException {
-109if (cellCodecClsName == null) {
-110  cellCodecClsName = 
getWALCellCodecClass(conf);
-111}
-112return 
ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
-113{ Configuration.class, 
CompressionContext.class }, new Object[] { conf, compression });
-114  }
-115
-116  /**
-117   * Create and setup a {@link 
WALCellCodec} from the
-118   * CompressionContext.
-119   * Cell Codec classname is read from 
{@link Configuration}.
-120   * Fully prepares the codec for use.
-121   * @param conf {@link Configuration} to 
read for the user-specified codec. If none is specified,
-122   *  uses a {@link 
WALCellCodec}.
-123   * @param compression compression the 
codec should use
-124   * @return a {@link WALCellCodec} ready 
for use.
-125   * @throws 
UnsupportedOperationException if the codec cannot be instantiated
-126   */
-127  public static WALCellCodec 
create(Configuration conf,
-128  CompressionContext compression) 
throws UnsupportedOperationException {
-129String cellCodecClsName = 
getWALCellCodecClass(conf);
-130return 
ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
-131{ Configuration.class, 
CompressionContext.class }, new Object[] { conf, compression });

[19/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/master/RegionServerTracker.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/RegionServerTracker.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/RegionServerTracker.html
index d60bbd0..cc0dba8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/RegionServerTracker.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/RegionServerTracker.html
@@ -7,179 +7,190 @@
 
 
 001/**
-002 *
-003 * Licensed to the Apache Software 
Foundation (ASF) under one
-004 * or more contributor license 
agreements.  See the NOTICE file
-005 * distributed with this work for 
additional information
-006 * regarding copyright ownership.  The 
ASF licenses this file
-007 * to you under the Apache License, 
Version 2.0 (the
-008 * "License"); you may not use this file 
except in compliance
-009 * with the License.  You may obtain a 
copy of the License at
-010 *
-011 * 
http://www.apache.org/licenses/LICENSE-2.0
-012 *
-013 * Unless required by applicable law or 
agreed to in writing, software
-014 * distributed under the License is 
distributed on an "AS IS" BASIS,
-015 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
-016 * See the License for the specific 
language governing permissions and
-017 * limitations under the License.
-018 */
-019package org.apache.hadoop.hbase.master;
-020
-021import java.io.IOException;
-022import java.io.InterruptedIOException;
-023import java.util.ArrayList;
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License.  You may obtain a 
copy of the License at
+009 *
+010 * 
http://www.apache.org/licenses/LICENSE-2.0
+011 *
+012 * Unless required by applicable law or 
agreed to in writing, software
+013 * distributed under the License is 
distributed on an "AS IS" BASIS,
+014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+015 * See the License for the specific 
language governing permissions and
+016 * limitations under the License.
+017 */
+018package org.apache.hadoop.hbase.master;
+019
+020import java.io.IOException;
+021import java.io.InterruptedIOException;
+022import java.util.HashSet;
+023import java.util.Iterator;
 024import java.util.List;
-025import java.util.NavigableMap;
-026import java.util.TreeMap;
-027
-028import 
org.apache.hadoop.hbase.ServerName;
-029import 
org.apache.hadoop.hbase.zookeeper.ZKListener;
-030import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-031import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-032import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-033import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-034import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo;
-035import 
org.apache.yetus.audience.InterfaceAudience;
-036import 
org.apache.zookeeper.KeeperException;
-037import org.slf4j.Logger;
-038import org.slf4j.LoggerFactory;
-039
-040/**
-041 * Tracks the online region servers via 
ZK.
-042 *
-043 * pHandling of new RSs checking 
in is done via RPC.  This class
-044 * is only responsible for watching for 
expired nodes.  It handles
-045 * listening for changes in the RS node 
list and watching each node.
-046 *
-047 * pIf an RS node gets deleted, 
this automatically handles calling of
-048 * {@link 
ServerManager#expireServer(ServerName)}
-049 */
-050@InterfaceAudience.Private
-051public class RegionServerTracker extends 
ZKListener {
-052  private static final Logger LOG = 
LoggerFactory.getLogger(RegionServerTracker.class);
-053  private final 
NavigableMapServerName, RegionServerInfo regionServers = new 
TreeMap();
-054  private ServerManager serverManager;
-055  private MasterServices server;
-056
-057  public RegionServerTracker(ZKWatcher 
watcher,
-058  MasterServices server, 
ServerManager serverManager) {
-059super(watcher);
-060this.server = server;
-061this.serverManager = serverManager;
-062  }
-063
-064  /**
-065   * Starts the tracking of online 
RegionServers.
-066   *
-067   * pAll RSs will be tracked 
after this method is called.
-068   *
-069   * @throws KeeperException
-070   * @throws IOException
-071   */
-072  public void start() throws 
KeeperException, IOException {
-073watcher.registerListener(this);
-074ListString servers =
-075  
ZKUtil.listChildrenAndWatchThem(watcher, watcher.getZNodePaths().rsZNode);
-076refresh(servers);
-077  }
-078
-079  private void refresh(final 
ListString servers) throws IOException {
-080

[24/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/client/VersionInfoUtil.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/VersionInfoUtil.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/VersionInfoUtil.html
index 027f0d4..5c7f786 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/VersionInfoUtil.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/VersionInfoUtil.html
@@ -110,7 +110,7 @@
 102   * @param versionInfo the VersionInfo 
object to pack
 103   * @return the version number as int. 
(e.g. 0x0103004 is 1.3.4)
 104   */
-105  private static int 
getVersionNumber(final HBaseProtos.VersionInfo versionInfo) {
+105  public static int 
getVersionNumber(final HBaseProtos.VersionInfo versionInfo) {
 106if (versionInfo != null) {
 107  try {
 108final String[] components = 
getVersionComponents(versionInfo);



[09/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.EnsureKvEncoder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.EnsureKvEncoder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.EnsureKvEncoder.html
index 83c17c0..9df0225 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.EnsureKvEncoder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.EnsureKvEncoder.html
@@ -54,323 +54,362 @@
 046import org.apache.hadoop.io.IOUtils;
 047
 048import 
org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
-049
+049import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
 050
-051/**
-052 * Compression in this class is lifted 
off Compressor/KeyValueCompression.
-053 * This is a pure coincidence... they are 
independent and don't have to be compatible.
-054 *
-055 * This codec is used at server side for 
writing cells to WAL as well as for sending edits
-056 * as part of the distributed splitting 
process.
-057 */
-058@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
-059  HBaseInterfaceAudience.PHOENIX, 
HBaseInterfaceAudience.CONFIG})
-060public class WALCellCodec implements 
Codec {
-061  /** Configuration key for the class to 
use when encoding cells in the WAL */
-062  public static final String 
WAL_CELL_CODEC_CLASS_KEY = "hbase.regionserver.wal.codec";
-063
-064  protected final CompressionContext 
compression;
-065  protected final ByteStringUncompressor 
statelessUncompressor = new ByteStringUncompressor() {
-066@Override
-067public byte[] uncompress(ByteString 
data, Dictionary dict) throws IOException {
-068  return 
WALCellCodec.uncompressByteString(data, dict);
-069}
-070  };
-071
-072  /**
-073   * bAll subclasses must 
implement a no argument constructor/b
-074   */
-075  public WALCellCodec() {
-076this.compression = null;
-077  }
-078
-079  /**
-080   * Default constructor - ball 
subclasses must implement a constructor with this signature /b
-081   * if they are to be dynamically loaded 
from the {@link Configuration}.
-082   * @param conf configuration to 
configure ttthis/tt
-083   * @param compression compression the 
codec should support, can be ttnull/tt to indicate no
-084   *  compression
-085   */
-086  public WALCellCodec(Configuration conf, 
CompressionContext compression) {
-087this.compression = compression;
-088  }
-089
-090  public static String 
getWALCellCodecClass(Configuration conf) {
-091return 
conf.get(WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());
-092  }
-093
-094  /**
-095   * Create and setup a {@link 
WALCellCodec} from the {@code cellCodecClsName} and
-096   * CompressionContext, if {@code 
cellCodecClsName} is specified.
-097   * Otherwise Cell Codec classname is 
read from {@link Configuration}.
-098   * Fully prepares the codec for use.
-099   * @param conf {@link Configuration} to 
read for the user-specified codec. If none is specified,
-100   *  uses a {@link 
WALCellCodec}.
-101   * @param cellCodecClsName name of 
codec
-102   * @param compression compression the 
codec should use
-103   * @return a {@link WALCellCodec} ready 
for use.
-104   * @throws 
UnsupportedOperationException if the codec cannot be instantiated
-105   */
-106
-107  public static WALCellCodec 
create(Configuration conf, String cellCodecClsName,
-108  CompressionContext compression) 
throws UnsupportedOperationException {
-109if (cellCodecClsName == null) {
-110  cellCodecClsName = 
getWALCellCodecClass(conf);
-111}
-112return 
ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
-113{ Configuration.class, 
CompressionContext.class }, new Object[] { conf, compression });
-114  }
-115
-116  /**
-117   * Create and setup a {@link 
WALCellCodec} from the
-118   * CompressionContext.
-119   * Cell Codec classname is read from 
{@link Configuration}.
-120   * Fully prepares the codec for use.
-121   * @param conf {@link Configuration} to 
read for the user-specified codec. If none is specified,
-122   *  uses a {@link 
WALCellCodec}.
-123   * @param compression compression the 
codec should use
-124   * @return a {@link WALCellCodec} ready 
for use.
-125   * @throws 
UnsupportedOperationException if the codec cannot be instantiated
-126   */
-127  public static WALCellCodec 
create(Configuration conf,
-128  CompressionContext compression) 
throws UnsupportedOperationException {
-129String cellCodecClsName = 
getWALCellCodecClass(conf);
-130return 
ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
-131{ Configuration.class, 
CompressionContext.class }, new Object[] { conf, compression });
-132  }
-133
-134  

[07/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.StatelessUncompressor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.StatelessUncompressor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.StatelessUncompressor.html
new file mode 100644
index 000..9df0225
--- /dev/null
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.StatelessUncompressor.html
@@ -0,0 +1,476 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/*
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License.  You may obtain a 
copy of the License at
+009 *
+010 * 
http://www.apache.org/licenses/LICENSE-2.0
+011 *
+012 * Unless required by applicable law or 
agreed to in writing, software
+013 * distributed under the License is 
distributed on an "AS IS" BASIS,
+014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+015 * See the License for the specific 
language governing permissions and
+016 * limitations under the License.
+017 */
+018package 
org.apache.hadoop.hbase.regionserver.wal;
+019
+020import java.io.ByteArrayOutputStream;
+021import java.io.IOException;
+022import java.io.InputStream;
+023import java.io.OutputStream;
+024
+025import 
org.apache.hadoop.conf.Configuration;
+026import org.apache.hadoop.hbase.Cell;
+027import 
org.apache.hadoop.hbase.CellUtil;
+028import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
+029import 
org.apache.hadoop.hbase.PrivateCellUtil;
+030import 
org.apache.hadoop.hbase.KeyValue;
+031import 
org.apache.hadoop.hbase.KeyValueUtil;
+032import 
org.apache.yetus.audience.InterfaceAudience;
+033import 
org.apache.hadoop.hbase.codec.BaseDecoder;
+034import 
org.apache.hadoop.hbase.codec.BaseEncoder;
+035import 
org.apache.hadoop.hbase.codec.Codec;
+036import 
org.apache.hadoop.hbase.codec.KeyValueCodecWithTags;
+037import 
org.apache.hadoop.hbase.io.ByteBuffInputStream;
+038import 
org.apache.hadoop.hbase.io.ByteBufferWriter;
+039import 
org.apache.hadoop.hbase.io.ByteBufferWriterOutputStream;
+040import 
org.apache.hadoop.hbase.io.util.Dictionary;
+041import 
org.apache.hadoop.hbase.io.util.StreamUtils;
+042import 
org.apache.hadoop.hbase.nio.ByteBuff;
+043import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+044import 
org.apache.hadoop.hbase.util.Bytes;
+045import 
org.apache.hadoop.hbase.util.ReflectionUtils;
+046import org.apache.hadoop.io.IOUtils;
+047
+048import 
org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
+049import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
+050
+051
+052/**
+053 * Compression in this class is lifted 
off Compressor/KeyValueCompression.
+054 * This is a pure coincidence... they are 
independent and don't have to be compatible.
+055 *
+056 * This codec is used at server side for 
writing cells to WAL as well as for sending edits
+057 * as part of the distributed splitting 
process.
+058 */
+059@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
+060  HBaseInterfaceAudience.PHOENIX, 
HBaseInterfaceAudience.CONFIG})
+061public class WALCellCodec implements 
Codec {
+062  /** Configuration key for the class to 
use when encoding cells in the WAL */
+063  public static final String 
WAL_CELL_CODEC_CLASS_KEY = "hbase.regionserver.wal.codec";
+064
+065  protected final CompressionContext 
compression;
+066
+067  /**
+068   * bAll subclasses must 
implement a no argument constructor/b
+069   */
+070  public WALCellCodec() {
+071this.compression = null;
+072  }
+073
+074  /**
+075   * Default constructor - ball 
subclasses must implement a constructor with this signature /b
+076   * if they are to be dynamically loaded 
from the {@link Configuration}.
+077   * @param conf configuration to 
configure ttthis/tt
+078   * @param compression compression the 
codec should support, can be ttnull/tt to indicate no
+079   *  compression
+080   */
+081  public WALCellCodec(Configuration conf, 
CompressionContext compression) {
+082this.compression = compression;
+083  }
+084
+085  public static String 
getWALCellCodecClass(Configuration conf) {
+086return 
conf.get(WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());
+087  }
+088
+089  /**
+090   * Create and setup a {@link 
WALCellCodec} from the {@code cellCodecClsName} and
+091   * CompressionContext, if {@code 
cellCodecClsName} is specified.
+092   * Otherwise Cell Codec classname is 
read from 

[04/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/wal/WAL.Reader.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WAL.Reader.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WAL.Reader.html
index e31f5c6..f4d1eb0 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WAL.Reader.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WAL.Reader.html
@@ -31,277 +31,266 @@
 023import java.util.Set;
 024import 
org.apache.hadoop.hbase.HConstants;
 025import 
org.apache.hadoop.hbase.client.RegionInfo;
-026import 
org.apache.hadoop.hbase.regionserver.wal.CompressionContext;
-027import 
org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
-028import 
org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
-029import 
org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost;
-030import 
org.apache.hadoop.hbase.replication.regionserver.WALFileLengthProvider;
-031import 
org.apache.yetus.audience.InterfaceAudience;
-032import 
org.apache.yetus.audience.InterfaceStability;
-033
-034import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-035
-036/**
-037 * A Write Ahead Log (WAL) provides 
service for reading, writing waledits. This interface provides
-038 * APIs for WAL users (such as 
RegionServer) to use the WAL (do append, sync, etc).
-039 *
-040 * Note that some internals, such as log 
rolling and performance evaluation tools, will use
-041 * WAL.equals to determine if they have 
already seen a given WAL.
-042 */
-043@InterfaceAudience.Private
-044@InterfaceStability.Evolving
-045public interface WAL extends Closeable, 
WALFileLengthProvider {
-046
-047  /**
-048   * Registers WALActionsListener
-049   */
-050  void registerWALActionsListener(final 
WALActionsListener listener);
-051
-052  /**
-053   * Unregisters WALActionsListener
-054   */
-055  boolean 
unregisterWALActionsListener(final WALActionsListener listener);
-056
-057  /**
-058   * Roll the log writer. That is, start 
writing log messages to a new file.
-059   *
-060   * p
-061   * The implementation is synchronized 
in order to make sure there's one rollWriter
-062   * running at any given time.
-063   *
-064   * @return If lots of logs, flush the 
returned regions so next time through we
-065   * can clean logs. Returns null 
if nothing to flush. Names are actual
-066   * region names as returned by 
{@link RegionInfo#getEncodedName()}
-067   */
-068  byte[][] rollWriter() throws 
FailedLogCloseException, IOException;
-069
-070  /**
-071   * Roll the log writer. That is, start 
writing log messages to a new file.
-072   *
-073   * p
-074   * The implementation is synchronized 
in order to make sure there's one rollWriter
-075   * running at any given time.
-076   *
-077   * @param force
-078   *  If true, force creation of 
a new writer even if no entries have
-079   *  been written to the current 
writer
-080   * @return If lots of logs, flush the 
returned regions so next time through we
-081   * can clean logs. Returns null 
if nothing to flush. Names are actual
-082   * region names as returned by 
{@link RegionInfo#getEncodedName()}
-083   */
-084  byte[][] rollWriter(boolean force) 
throws FailedLogCloseException, IOException;
-085
-086  /**
-087   * Stop accepting new writes. If we 
have unsynced writes still in buffer, sync them.
-088   * Extant edits are left in place in 
backing storage to be replayed later.
-089   */
-090  void shutdown() throws IOException;
-091
-092  /**
-093   * Caller no longer needs any edits 
from this WAL. Implementers are free to reclaim
-094   * underlying resources after this 
call; i.e. filesystem based WALs can archive or
-095   * delete files.
-096   */
-097  @Override
-098  void close() throws IOException;
-099
-100  /**
-101   * Append a set of edits to the WAL. 
The WAL is not flushed/sync'd after this transaction
-102   * completes BUT on return this edit 
must have its region edit/sequence id assigned
-103   * else it messes up our unification of 
mvcc and sequenceid.  On return codekey/code will
-104   * have the region edit/sequence id 
filled in.
-105   * @param info the regioninfo 
associated with append
-106   * @param key Modified by this call; we 
add to it this edits region edit/sequence id.
-107   * @param edits Edits to append. MAY 
CONTAIN NO EDITS for case where we want to get an edit
-108   * sequence id that is after all 
currently appended edits.
-109   * @param inMemstore Always true except 
for case where we are writing a compaction completion
-110   * record into the WAL; in this case 
the entry is just so we can finish an unfinished compaction
-111   * -- it is not an edit for memstore.
-112   * @return Returns a 'transaction id' 
and codekey/code will have the region edit/sequence id
-113   * in it.
-114   */
-115  long 

[03/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/wal/WAL.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WAL.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WAL.html
index e31f5c6..f4d1eb0 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WAL.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WAL.html
@@ -31,277 +31,266 @@
 023import java.util.Set;
 024import 
org.apache.hadoop.hbase.HConstants;
 025import 
org.apache.hadoop.hbase.client.RegionInfo;
-026import 
org.apache.hadoop.hbase.regionserver.wal.CompressionContext;
-027import 
org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
-028import 
org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
-029import 
org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost;
-030import 
org.apache.hadoop.hbase.replication.regionserver.WALFileLengthProvider;
-031import 
org.apache.yetus.audience.InterfaceAudience;
-032import 
org.apache.yetus.audience.InterfaceStability;
-033
-034import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-035
-036/**
-037 * A Write Ahead Log (WAL) provides 
service for reading, writing waledits. This interface provides
-038 * APIs for WAL users (such as 
RegionServer) to use the WAL (do append, sync, etc).
-039 *
-040 * Note that some internals, such as log 
rolling and performance evaluation tools, will use
-041 * WAL.equals to determine if they have 
already seen a given WAL.
-042 */
-043@InterfaceAudience.Private
-044@InterfaceStability.Evolving
-045public interface WAL extends Closeable, 
WALFileLengthProvider {
-046
-047  /**
-048   * Registers WALActionsListener
-049   */
-050  void registerWALActionsListener(final 
WALActionsListener listener);
-051
-052  /**
-053   * Unregisters WALActionsListener
-054   */
-055  boolean 
unregisterWALActionsListener(final WALActionsListener listener);
-056
-057  /**
-058   * Roll the log writer. That is, start 
writing log messages to a new file.
-059   *
-060   * p
-061   * The implementation is synchronized 
in order to make sure there's one rollWriter
-062   * running at any given time.
-063   *
-064   * @return If lots of logs, flush the 
returned regions so next time through we
-065   * can clean logs. Returns null 
if nothing to flush. Names are actual
-066   * region names as returned by 
{@link RegionInfo#getEncodedName()}
-067   */
-068  byte[][] rollWriter() throws 
FailedLogCloseException, IOException;
-069
-070  /**
-071   * Roll the log writer. That is, start 
writing log messages to a new file.
-072   *
-073   * p
-074   * The implementation is synchronized 
in order to make sure there's one rollWriter
-075   * running at any given time.
-076   *
-077   * @param force
-078   *  If true, force creation of 
a new writer even if no entries have
-079   *  been written to the current 
writer
-080   * @return If lots of logs, flush the 
returned regions so next time through we
-081   * can clean logs. Returns null 
if nothing to flush. Names are actual
-082   * region names as returned by 
{@link RegionInfo#getEncodedName()}
-083   */
-084  byte[][] rollWriter(boolean force) 
throws FailedLogCloseException, IOException;
-085
-086  /**
-087   * Stop accepting new writes. If we 
have unsynced writes still in buffer, sync them.
-088   * Extant edits are left in place in 
backing storage to be replayed later.
-089   */
-090  void shutdown() throws IOException;
-091
-092  /**
-093   * Caller no longer needs any edits 
from this WAL. Implementers are free to reclaim
-094   * underlying resources after this 
call; i.e. filesystem based WALs can archive or
-095   * delete files.
-096   */
-097  @Override
-098  void close() throws IOException;
-099
-100  /**
-101   * Append a set of edits to the WAL. 
The WAL is not flushed/sync'd after this transaction
-102   * completes BUT on return this edit 
must have its region edit/sequence id assigned
-103   * else it messes up our unification of 
mvcc and sequenceid.  On return codekey/code will
-104   * have the region edit/sequence id 
filled in.
-105   * @param info the regioninfo 
associated with append
-106   * @param key Modified by this call; we 
add to it this edits region edit/sequence id.
-107   * @param edits Edits to append. MAY 
CONTAIN NO EDITS for case where we want to get an edit
-108   * sequence id that is after all 
currently appended edits.
-109   * @param inMemstore Always true except 
for case where we are writing a compaction completion
-110   * record into the WAL; in this case 
the entry is just so we can finish an unfinished compaction
-111   * -- it is not an edit for memstore.
-112   * @return Returns a 'transaction id' 
and codekey/code will have the region edit/sequence id
-113   * in it.
-114   */
-115  long append(RegionInfo info, WALKeyImpl 
key, 

hbase git commit: HBASE-20625 refactor some WALCellCodec related code

2018-06-14 Thread zghao
Repository: hbase
Updated Branches:
  refs/heads/branch-2 161dc7c7f -> bde9f08a8


HBASE-20625 refactor some WALCellCodec related code

Signed-off-by: Guanghao Zhang 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bde9f08a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bde9f08a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bde9f08a

Branch: refs/heads/branch-2
Commit: bde9f08a83b7f0594cfccb22acfc9c66dc795049
Parents: 161dc7c
Author: jingyuntian 
Authored: Tue Jun 12 16:17:13 2018 +0800
Committer: Guanghao Zhang 
Committed: Thu Jun 14 19:46:33 2018 +0800

--
 .../hbase/protobuf/ReplicationProtbufUtil.java  | 61 --
 .../wal/AbstractProtobufLogWriter.java  |  3 +
 .../wal/AsyncProtobufLogWriter.java |  1 -
 .../regionserver/wal/CompressionContext.java| 54 ++--
 .../regionserver/wal/ProtobufLogReader.java |  2 +
 .../regionserver/wal/ProtobufLogWriter.java |  1 -
 .../hbase/regionserver/wal/ReaderBase.java  |  3 -
 .../wal/SecureProtobufLogReader.java|  1 +
 .../hbase/regionserver/wal/WALCellCodec.java| 87 ++--
 .../replication/ClusterMarkingEntryFilter.java  |  2 -
 .../java/org/apache/hadoop/hbase/wal/WAL.java   |  2 +
 .../org/apache/hadoop/hbase/wal/WALKeyImpl.java | 57 +
 .../wal/FaultyProtobufLogReader.java|  3 -
 13 files changed, 139 insertions(+), 138 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bde9f08a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
index 81dd59e..157ad1b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
@@ -24,29 +24,25 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
-import java.util.Map;
-import java.util.NavigableMap;
-import java.util.UUID;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellScanner;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.PrivateCellUtil;
-import org.apache.hadoop.hbase.wal.WALKeyImpl;
+import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.io.SizedCellScanner;
 import org.apache.hadoop.hbase.ipc.HBaseRpcController;
 import org.apache.hadoop.hbase.ipc.HBaseRpcControllerImpl;
 import org.apache.hadoop.hbase.wal.WALEdit;
-import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.hbase.wal.WAL.Entry;
 
+import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
+
 @InterfaceAudience.Private
 public class ReplicationProtbufUtil {
   /**
@@ -81,7 +77,7 @@ public class ReplicationProtbufUtil {
* found.
*/
   public static Pair
-  buildReplicateWALEntryRequest(final Entry[] entries) {
+  buildReplicateWALEntryRequest(final Entry[] entries) throws IOException {
 return buildReplicateWALEntryRequest(entries, null, null, null, null);
   }
 
@@ -97,53 +93,30 @@ public class ReplicationProtbufUtil {
*/
   public static Pair
   buildReplicateWALEntryRequest(final Entry[] entries, byte[] 
encodedRegionName,
-  String replicationClusterId, Path sourceBaseNamespaceDir, Path 
sourceHFileArchiveDir) {
+  String replicationClusterId, Path sourceBaseNamespaceDir, Path 
sourceHFileArchiveDir)
+  throws IOException {
 // Accumulate all the Cells seen in here.
 List> allCells = new ArrayList<>(entries.length);
 int size = 0;
-WALProtos.FamilyScope.Builder scopeBuilder = 
WALProtos.FamilyScope.newBuilder();
 AdminProtos.WALEntry.Builder entryBuilder = 
AdminProtos.WALEntry.newBuilder();
 AdminProtos.ReplicateWALEntryRequest.Builder builder =
   AdminProtos.ReplicateWALEntryRequest.newBuilder();
-HBaseProtos.UUID.Builder uuidBuilder = HBaseProtos.UUID.newBuilder();
+
 for (Entry entry: entries) {
   entryBuilder.clear();
-  // TODO: this duplicates 

hbase git commit: HBASE-20625 refactor some WALCellCodec related code

2018-06-14 Thread zghao
Repository: hbase
Updated Branches:
  refs/heads/master 9e9db3245 -> 0b28155d2


HBASE-20625 refactor some WALCellCodec related code

Signed-off-by: Guanghao Zhang 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0b28155d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0b28155d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0b28155d

Branch: refs/heads/master
Commit: 0b28155d274910b4e667b949d51f78809a1eff0b
Parents: 9e9db32
Author: jingyuntian 
Authored: Thu Jun 14 10:25:24 2018 +0800
Committer: Guanghao Zhang 
Committed: Thu Jun 14 19:37:01 2018 +0800

--
 .../hbase/protobuf/ReplicationProtbufUtil.java  | 61 --
 .../wal/AbstractProtobufLogWriter.java  |  3 +
 .../wal/AsyncProtobufLogWriter.java |  1 -
 .../regionserver/wal/CompressionContext.java| 54 ++--
 .../regionserver/wal/ProtobufLogReader.java |  2 +
 .../regionserver/wal/ProtobufLogWriter.java |  1 -
 .../hbase/regionserver/wal/ReaderBase.java  |  3 -
 .../wal/SecureProtobufLogReader.java|  1 +
 .../hbase/regionserver/wal/WALCellCodec.java| 87 ++--
 .../replication/ClusterMarkingEntryFilter.java  |  2 -
 .../java/org/apache/hadoop/hbase/wal/WAL.java   | 11 ---
 .../org/apache/hadoop/hbase/wal/WALKeyImpl.java | 60 +-
 .../wal/FaultyProtobufLogReader.java|  3 -
 13 files changed, 134 insertions(+), 155 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0b28155d/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
index 81dd59e..157ad1b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
@@ -24,29 +24,25 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
-import java.util.Map;
-import java.util.NavigableMap;
-import java.util.UUID;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellScanner;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.PrivateCellUtil;
-import org.apache.hadoop.hbase.wal.WALKeyImpl;
+import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.io.SizedCellScanner;
 import org.apache.hadoop.hbase.ipc.HBaseRpcController;
 import org.apache.hadoop.hbase.ipc.HBaseRpcControllerImpl;
 import org.apache.hadoop.hbase.wal.WALEdit;
-import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.hbase.wal.WAL.Entry;
 
+import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
+
 @InterfaceAudience.Private
 public class ReplicationProtbufUtil {
   /**
@@ -81,7 +77,7 @@ public class ReplicationProtbufUtil {
* found.
*/
   public static Pair
-  buildReplicateWALEntryRequest(final Entry[] entries) {
+  buildReplicateWALEntryRequest(final Entry[] entries) throws IOException {
 return buildReplicateWALEntryRequest(entries, null, null, null, null);
   }
 
@@ -97,53 +93,30 @@ public class ReplicationProtbufUtil {
*/
   public static Pair
   buildReplicateWALEntryRequest(final Entry[] entries, byte[] 
encodedRegionName,
-  String replicationClusterId, Path sourceBaseNamespaceDir, Path 
sourceHFileArchiveDir) {
+  String replicationClusterId, Path sourceBaseNamespaceDir, Path 
sourceHFileArchiveDir)
+  throws IOException {
 // Accumulate all the Cells seen in here.
 List> allCells = new ArrayList<>(entries.length);
 int size = 0;
-WALProtos.FamilyScope.Builder scopeBuilder = 
WALProtos.FamilyScope.newBuilder();
 AdminProtos.WALEntry.Builder entryBuilder = 
AdminProtos.WALEntry.newBuilder();
 AdminProtos.ReplicateWALEntryRequest.Builder builder =
   AdminProtos.ReplicateWALEntryRequest.newBuilder();
-HBaseProtos.UUID.Builder uuidBuilder = HBaseProtos.UUID.newBuilder();
+
 for (Entry entry: entries) {
   entryBuilder.clear();
-  // TODO: this duplicates a