hbase git commit: Need to check server when doing ServerCrashProcedure; we had it but I removed it a few patches back... makes for SCPs stamping on each otehr failing ongoing assigns

2017-05-13 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/HBASE-14614 61ed02691 -> d43df0ed6


Need to check server when doing ServerCrashProcedure; we had it but I removed 
it a few patches back... makes for SCPs stamping on each otehr failing ongoing 
assigns


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d43df0ed
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d43df0ed
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d43df0ed

Branch: refs/heads/HBASE-14614
Commit: d43df0ed653b627e3bf1e1f40aa4aac1ae89f300
Parents: 61ed026
Author: Michael Stack 
Authored: Sat May 13 21:37:52 2017 -0700
Committer: Michael Stack 
Committed: Sat May 13 21:37:52 2017 -0700

--
 .../hbase/master/assignment/AssignProcedure.java  |  8 
 .../master/assignment/RegionTransitionProcedure.java  |  6 ++
 .../hbase/master/assignment/UnassignProcedure.java|  5 +
 .../hbase/master/procedure/ServerCrashException.java  |  7 +--
 .../hbase/master/procedure/ServerCrashProcedure.java  | 14 --
 5 files changed, 36 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d43df0ed/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java
index 36f6f08..42ece16 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java
@@ -327,4 +327,12 @@ public class AssignProcedure extends 
RegionTransitionProcedure {
 super.toStringClassDetails(sb);
 if (this.targetServer != null) sb.append(", 
target=").append(this.targetServer);
   }
+
+  @Override
+  public ServerName getServer(final MasterProcedureEnv env) {
+RegionStateNode node =
+
env.getAssignmentManager().getRegionStates().getRegionNode(this.getRegionInfo());
+if (node == null) return null;
+return node.getRegionLocation();
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hbase/blob/d43df0ed/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
index 6dc809b..49124ea 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
@@ -372,4 +372,10 @@ public abstract class RegionTransitionProcedure
 // the client does not know about this procedure.
 return false;
   }
+
+  /**
+   * Used by ServerCrashProcedure to see if this Assign/Unassign needs 
processing.
+   * @return ServerName the Assign or Unassign is going against.
+   */
+  public abstract ServerName getServer(final MasterProcedureEnv env);
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hbase/blob/d43df0ed/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.java
index a82a2f5..126718a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.java
@@ -239,4 +239,9 @@ public class UnassignProcedure extends 
RegionTransitionProcedure {
 super.toStringClassDetails(sb);
 sb.append(", server=").append(this.destinationServer);
   }
+
+  @Override
+  public ServerName getServer(final MasterProcedureEnv env) {
+return this.destinationServer;
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hbase/blob/d43df0ed/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashException.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashException.java
 

[07/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/testdevapidocs/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.html 
b/testdevapidocs/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.html
index 148ea94..a60a8b8 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public class BalancerTestBase
+public class BalancerTestBase
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Class used to be the base of unit tests on load balancers. 
It gives helper
  methods to create maps of ServerName to lists of 
HRegionInfo
@@ -399,7 +399,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -408,7 +408,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 rand
-protected statichttp://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true;
 title="class or interface in java.util">Random rand
+protected statichttp://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true;
 title="class or interface in java.util">Random rand
 
 
 
@@ -417,7 +417,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 regionId
-staticint regionId
+staticint regionId
 
 
 
@@ -426,7 +426,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 conf
-protected staticorg.apache.hadoop.conf.Configuration conf
+protected staticorg.apache.hadoop.conf.Configuration conf
 
 
 
@@ -435,7 +435,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 loadBalancer
-protected 
staticorg.apache.hadoop.hbase.master.balancer.StochasticLoadBalancer loadBalancer
+protected 
staticorg.apache.hadoop.hbase.master.balancer.StochasticLoadBalancer loadBalancer
 
 
 
@@ -444,7 +444,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 largeCluster
-protectedint[] largeCluster
+protectedint[] largeCluster
 
 
 
@@ -453,7 +453,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 clusterStateMocks
-protectedint[][] clusterStateMocks
+protectedint[][] clusterStateMocks
 
 
 
@@ -462,7 +462,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 clusterRegionLocationMocks
-protectedint[][][] clusterRegionLocationMocks
+protectedint[][][] clusterRegionLocationMocks
 Data set for testLocalityCost:
 
  [test][regions][0] = [serverIndex] -> number of regions
@@ -486,7 +486,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 regionQueue
-privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Queue.html?is-external=true;
 title="class or interface in 
java.util">Queueorg.apache.hadoop.hbase.HRegionInfo regionQueue
+privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Queue.html?is-external=true;
 title="class or interface in 
java.util">Queueorg.apache.hadoop.hbase.HRegionInfo regionQueue
 
 
 
@@ -495,7 +495,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 serverQueue
-privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Queue.html?is-external=true;
 title="class or interface in 
java.util">Queueorg.apache.hadoop.hbase.ServerName serverQueue
+privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Queue.html?is-external=true;
 title="class or interface in 
java.util">Queueorg.apache.hadoop.hbase.ServerName serverQueue
 
 
 
@@ -512,7 +512,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 BalancerTestBase
-publicBalancerTestBase()
+publicBalancerTestBase()
 
 
 
@@ -529,7 +529,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 beforeAllTests
-public staticvoidbeforeAllTests()
+public staticvoidbeforeAllTests()
throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -543,7 +543,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 assertClusterAsBalanced
-publicvoidassertClusterAsBalanced(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.master.balancer.ServerAndLoadservers)
+publicvoidassertClusterAsBalanced(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 

[43/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/org/apache/hadoop/hbase/backup/impl/TableBackupClient.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/impl/TableBackupClient.html 
b/devapidocs/org/apache/hadoop/hbase/backup/impl/TableBackupClient.html
index 3d4c2dc..c96491b 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/impl/TableBackupClient.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/TableBackupClient.html
@@ -18,8 +18,8 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":6,"i8":10,"i9":10,"i10":10};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],8:["t4","Concrete Methods"]};
+var methods = 
{"i0":10,"i1":10,"i2":9,"i3":10,"i4":9,"i5":9,"i6":10,"i7":9,"i8":9,"i9":6,"i10":10,"i11":10,"i12":10,"i13":10,"i14":9,"i15":10,"i16":9};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],4:["t3","Abstract 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public abstract class TableBackupClient
+public abstract class TableBackupClient
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Base class for backup operation. Concrete implementation for
  full and incremental backup are delegated to corresponding sub-classes:
@@ -138,34 +138,38 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 Field and Description
 
 
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+BACKUP_CLIENT_IMPL_CLASS
+
+
 protected http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 backupId
 
-
+
 protected BackupInfo
 backupInfo
 
-
+
 protected BackupManager
 backupManager
 
-
+
 protected 
org.apache.hadoop.conf.Configuration
 conf
 
-
+
 protected Connection
 conn
 
-
+
 private static 
org.apache.commons.logging.Log
 LOG
 
-
+
 protected http://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true;
 title="class or interface in java.util">HashMaphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
 newTimestamps
 
-
+
 protected http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableName
 tableList
 
@@ -184,6 +188,9 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 Constructor and Description
 
 
+TableBackupClient()
+
+
 TableBackupClient(Connectionconn,
  http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringbackupId,
  BackupRequestrequest)
@@ -198,13 +205,13 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 Method Summary
 
-All MethodsInstance MethodsAbstract MethodsConcrete Methods
+All MethodsStatic MethodsInstance MethodsAbstract MethodsConcrete Methods
 
 Modifier and Type
 Method and Description
 
 
-private void
+protected void
 addManifest(BackupInfobackupInfo,
BackupManagerbackupManager,
BackupTypetype,
@@ -220,29 +227,35 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-private void
+static void
+cleanupAndRestoreBackupSystem(Connectionconn,
+ BackupInfobackupInfo,
+ 
org.apache.hadoop.conf.Configurationconf)
+
+
+protected void
 cleanupDistCpLog(BackupInfobackupInfo,
 org.apache.hadoop.conf.Configurationconf)
 Clean up directories with prefix "_distcp_logs-", which are 
generated when DistCp copying
  hlogs.
 
 
-
-private void
+
+protected static void
 cleanupExportSnapshotLog(org.apache.hadoop.conf.Configurationconf)
 Clean up directories with prefix "exportSnapshot-", which 
are generated when exporting
  snapshots.
 
 
-
-private void
+
+protected static void
 cleanupTargetDir(BackupInfobackupInfo,
 org.apache.hadoop.conf.Configurationconf)
 Clean up the uncompleted data at target directory if the 
ongoing backup has already entered
  the copy phase.
 
 
-
+
 protected void
 completeBackup(Connectionconn,
   BackupInfobackupInfo,
@@ -252,21 +265,26 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 Complete the overall backup.
 
 
-
-private void
-deleteSnapshot(Connectionconn,
- 

[38/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
index 5ea0902..ab05849 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
@@ -16,11 +16,11 @@
 008@InterfaceAudience.Private
 009public class Version {
 010  public static final String version = 
"2.0.0-SNAPSHOT";
-011  public static final String revision = 
"b34ab5980ea7a21fd750537476027f9a8665eacc";
+011  public static final String revision = 
"305ffcb04025ea6f7880e9961120d309f55bf8ba";
 012  public static final String user = 
"jenkins";
-013  public static final String date = "Fri 
May 12 14:39:46 UTC 2017";
+013  public static final String date = "Sat 
May 13 14:38:50 UTC 2017";
 014  public static final String url = 
"git://asf920.gq1.ygridcore.net/home/jenkins/jenkins-slave/workspace/hbase_generate_website/hbase";
-015  public static final String srcChecksum 
= "0475775f4a6a60d81d5ec4a190f550f8";
+015  public static final String srcChecksum 
= "35c4310e9777e11223615305594d2db6";
 016}
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupClientFactory.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupClientFactory.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupClientFactory.html
new file mode 100644
index 000..82dbdea
--- /dev/null
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupClientFactory.html
@@ -0,0 +1,125 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/**
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License.  You may obtain a 
copy of the License at
+009 *
+010 * 
http://www.apache.org/licenses/LICENSE-2.0
+011 *
+012 * Unless required by applicable law or 
agreed to in writing, software
+013 * distributed under the License is 
distributed on an "AS IS" BASIS,
+014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+015 * See the License for the specific 
language governing permissions and
+016 * limitations under the License.
+017 */
+018package org.apache.hadoop.hbase.backup;
+019
+020import java.io.IOException;
+021
+022import 
org.apache.hadoop.conf.Configuration;
+023import 
org.apache.hadoop.hbase.backup.impl.FullTableBackupClient;
+024import 
org.apache.hadoop.hbase.backup.impl.IncrementalTableBackupClient;
+025import 
org.apache.hadoop.hbase.backup.impl.TableBackupClient;
+026import 
org.apache.hadoop.hbase.client.Connection;
+027
+028public class BackupClientFactory {
+029
+030  public static TableBackupClient create 
(Connection conn, String backupId, BackupRequest request)
+031throws IOException
+032  {
+033Configuration conf = 
conn.getConfiguration();
+034try {
+035  String clsName = 
conf.get(TableBackupClient.BACKUP_CLIENT_IMPL_CLASS);
+036  if (clsName != null) {
+037Class? clientImpl = 
Class.forName(clsName);
+038TableBackupClient client = 
(TableBackupClient) clientImpl.newInstance();
+039client.init(conn, backupId, 
request);
+040return client;
+041  }
+042} catch (Exception e) {
+043  throw new IOException(e);
+044}
+045
+046BackupType type = 
request.getBackupType();
+047if (type == BackupType.FULL) {
+048  return new 
FullTableBackupClient(conn, backupId, request);
+049} else {
+050  return new 
IncrementalTableBackupClient(conn, backupId, request);
+051}
+052  }
+053}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupDriver.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupDriver.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupDriver.html
index b1c210e..e779d2b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupDriver.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupDriver.html
@@ -142,74 +142,76 @@
 134return -1;
 135  }
 136  throw e;
-137}
-138return 0;
-139  }
-140
-141  @Override
-142  protected 

[10/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
--
diff --git 
a/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html 
b/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
index d7b7da4..26edc6a 100644
--- a/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
+++ b/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-05-12
+Last Published: 2017-05-13
   | Version: 
2.0.0-SNAPSHOT
   
 Apache HBase - Exemplar for 
hbase-shaded-client archetype

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/hbase-archetypes/hbase-shaded-client-project/dependency-info.html
--
diff --git a/hbase-archetypes/hbase-shaded-client-project/dependency-info.html 
b/hbase-archetypes/hbase-shaded-client-project/dependency-info.html
index 83da6c7..8fd0982 100644
--- a/hbase-archetypes/hbase-shaded-client-project/dependency-info.html
+++ b/hbase-archetypes/hbase-shaded-client-project/dependency-info.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-05-12
+Last Published: 2017-05-13
   | Version: 
2.0.0-SNAPSHOT
   
 Apache HBase - Exemplar for 
hbase-shaded-client archetype

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/hbase-archetypes/hbase-shaded-client-project/dependency-management.html
--
diff --git 
a/hbase-archetypes/hbase-shaded-client-project/dependency-management.html 
b/hbase-archetypes/hbase-shaded-client-project/dependency-management.html
index 54c5684..48699a8 100644
--- a/hbase-archetypes/hbase-shaded-client-project/dependency-management.html
+++ b/hbase-archetypes/hbase-shaded-client-project/dependency-management.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-05-12
+Last Published: 2017-05-13
   | Version: 
2.0.0-SNAPSHOT
   
 Apache HBase - Exemplar for 
hbase-shaded-client archetype

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/hbase-archetypes/hbase-shaded-client-project/index.html
--
diff --git a/hbase-archetypes/hbase-shaded-client-project/index.html 
b/hbase-archetypes/hbase-shaded-client-project/index.html
index 2c3c66c..471fe42 100644
--- a/hbase-archetypes/hbase-shaded-client-project/index.html
+++ b/hbase-archetypes/hbase-shaded-client-project/index.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-05-12
+Last Published: 2017-05-13
   | Version: 
2.0.0-SNAPSHOT
   
 Apache HBase - Exemplar for 
hbase-shaded-client archetype

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/hbase-archetypes/hbase-shaded-client-project/integration.html
--
diff --git a/hbase-archetypes/hbase-shaded-client-project/integration.html 
b/hbase-archetypes/hbase-shaded-client-project/integration.html
index 2098702..d5158a0 100644
--- a/hbase-archetypes/hbase-shaded-client-project/integration.html
+++ b/hbase-archetypes/hbase-shaded-client-project/integration.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 

[21/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.Type.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.Type.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.Type.html
index 7e37ca0..79c65e3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.Type.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.Type.html
@@ -70,1527 +70,1525 @@
 062import com.google.common.collect.Sets;
 063
 064/**
-065 * The base class for load balancers. It 
provides the the functions used to by
-066 * {@link 
org.apache.hadoop.hbase.master.AssignmentManager} to assign regions
-067 * in the edge cases. It doesn't provide 
an implementation of the
-068 * actual balancing algorithm.
-069 *
-070 */
-071public abstract class BaseLoadBalancer 
implements LoadBalancer {
-072  protected static final int 
MIN_SERVER_BALANCE = 2;
-073  private volatile boolean stopped = 
false;
+065 * The base class for load balancers. It 
provides functions used by
+066 * {@link 
org.apache.hadoop.hbase.master.AssignmentManager} to assign regions in the edge 
cases.
+067 * It doesn't provide an implementation 
of the actual balancing algorithm.
+068 */
+069public abstract class BaseLoadBalancer 
implements LoadBalancer {
+070  protected static final int 
MIN_SERVER_BALANCE = 2;
+071  private volatile boolean stopped = 
false;
+072
+073  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
 074
-075  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
-076
-077  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
-078= load - 
load.getNumberOfRegions() == 0;
+075  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
+076= load - 
load.getNumberOfRegions() == 0;
+077
+078  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
 079
-080  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
-081
-082  private static class DefaultRackManager 
extends RackManager {
-083@Override
-084public String getRack(ServerName 
server) {
-085  return UNKNOWN_RACK;
-086}
-087  }
-088
-089  /**
-090   * The constructor that uses the basic 
MetricsBalancer
-091   */
-092  protected BaseLoadBalancer() {
-093metricsBalancer = new 
MetricsBalancer();
-094  }
-095
-096  /**
-097   * This Constructor accepts an instance 
of MetricsBalancer,
-098   * which will be used instead of 
creating a new one
-099   */
-100  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
-101this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
-102  }
-103
-104  /**
-105   * An efficient array based 
implementation similar to ClusterState for keeping
-106   * the status of the cluster in terms 
of region assignment and distribution.
-107   * LoadBalancers, such as 
StochasticLoadBalancer uses this Cluster object because of
-108   * hundreds of thousands of hashmap 
manipulations are very costly, which is why this
-109   * class uses mostly indexes and 
arrays.
-110   *
-111   * Cluster tracks a list of unassigned 
regions, region assignments, and the server
-112   * topology in terms of server names, 
hostnames and racks.
-113   */
-114  protected static class Cluster {
-115ServerName[] servers;
-116String[] hosts; // ServerName 
uniquely identifies a region server. multiple RS can run on the same host
-117String[] racks;
-118boolean multiServersPerHost = false; 
// whether or not any host has more than one server
-119
-120ArrayListString tables;
-121HRegionInfo[] regions;
-122DequeBalancerRegionLoad[] 
regionLoads;
-123private RegionLocationFinder 
regionFinder;
+080  private static class DefaultRackManager 
extends RackManager {
+081@Override
+082public String getRack(ServerName 
server) {
+083  return UNKNOWN_RACK;
+084}
+085  }
+086
+087  /**
+088   * The constructor that uses the basic 
MetricsBalancer
+089   */
+090  protected BaseLoadBalancer() {
+091metricsBalancer = new 
MetricsBalancer();
+092  }
+093
+094  /**
+095   * This Constructor accepts an instance 
of MetricsBalancer,
+096   * which will be used instead of 
creating a new one
+097   */
+098  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
+099this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
+100  }
+101
+102  /**
+103   * An efficient array based 
implementation similar to ClusterState for keeping
+104   * the status of the cluster in terms 
of region assignment and distribution.
+105   * LoadBalancers, such as 
StochasticLoadBalancer uses 

[17/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.SwapRegionsAction.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.SwapRegionsAction.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.SwapRegionsAction.html
index 7e37ca0..79c65e3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.SwapRegionsAction.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.SwapRegionsAction.html
@@ -70,1527 +70,1525 @@
 062import com.google.common.collect.Sets;
 063
 064/**
-065 * The base class for load balancers. It 
provides the the functions used to by
-066 * {@link 
org.apache.hadoop.hbase.master.AssignmentManager} to assign regions
-067 * in the edge cases. It doesn't provide 
an implementation of the
-068 * actual balancing algorithm.
-069 *
-070 */
-071public abstract class BaseLoadBalancer 
implements LoadBalancer {
-072  protected static final int 
MIN_SERVER_BALANCE = 2;
-073  private volatile boolean stopped = 
false;
+065 * The base class for load balancers. It 
provides functions used by
+066 * {@link 
org.apache.hadoop.hbase.master.AssignmentManager} to assign regions in the edge 
cases.
+067 * It doesn't provide an implementation 
of the actual balancing algorithm.
+068 */
+069public abstract class BaseLoadBalancer 
implements LoadBalancer {
+070  protected static final int 
MIN_SERVER_BALANCE = 2;
+071  private volatile boolean stopped = 
false;
+072
+073  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
 074
-075  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
-076
-077  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
-078= load - 
load.getNumberOfRegions() == 0;
+075  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
+076= load - 
load.getNumberOfRegions() == 0;
+077
+078  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
 079
-080  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
-081
-082  private static class DefaultRackManager 
extends RackManager {
-083@Override
-084public String getRack(ServerName 
server) {
-085  return UNKNOWN_RACK;
-086}
-087  }
-088
-089  /**
-090   * The constructor that uses the basic 
MetricsBalancer
-091   */
-092  protected BaseLoadBalancer() {
-093metricsBalancer = new 
MetricsBalancer();
-094  }
-095
-096  /**
-097   * This Constructor accepts an instance 
of MetricsBalancer,
-098   * which will be used instead of 
creating a new one
-099   */
-100  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
-101this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
-102  }
-103
-104  /**
-105   * An efficient array based 
implementation similar to ClusterState for keeping
-106   * the status of the cluster in terms 
of region assignment and distribution.
-107   * LoadBalancers, such as 
StochasticLoadBalancer uses this Cluster object because of
-108   * hundreds of thousands of hashmap 
manipulations are very costly, which is why this
-109   * class uses mostly indexes and 
arrays.
-110   *
-111   * Cluster tracks a list of unassigned 
regions, region assignments, and the server
-112   * topology in terms of server names, 
hostnames and racks.
-113   */
-114  protected static class Cluster {
-115ServerName[] servers;
-116String[] hosts; // ServerName 
uniquely identifies a region server. multiple RS can run on the same host
-117String[] racks;
-118boolean multiServersPerHost = false; 
// whether or not any host has more than one server
-119
-120ArrayListString tables;
-121HRegionInfo[] regions;
-122DequeBalancerRegionLoad[] 
regionLoads;
-123private RegionLocationFinder 
regionFinder;
+080  private static class DefaultRackManager 
extends RackManager {
+081@Override
+082public String getRack(ServerName 
server) {
+083  return UNKNOWN_RACK;
+084}
+085  }
+086
+087  /**
+088   * The constructor that uses the basic 
MetricsBalancer
+089   */
+090  protected BaseLoadBalancer() {
+091metricsBalancer = new 
MetricsBalancer();
+092  }
+093
+094  /**
+095   * This Constructor accepts an instance 
of MetricsBalancer,
+096   * which will be used instead of 
creating a new one
+097   */
+098  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
+099this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
+100  }
+101
+102  /**
+103   * An efficient array based 
implementation similar to ClusterState for keeping
+104   * the status of the cluster in terms 
of region assignment and distribution.
+105   * LoadBalancers, such as 

[31/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DescribeCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DescribeCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DescribeCommand.html
index cc40d21..2df0b04 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DescribeCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DescribeCommand.html
@@ -61,8 +61,8 @@
 053import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 054import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand;
 055import 
org.apache.hadoop.hbase.backup.BackupType;
-056import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
-057import 
org.apache.hadoop.hbase.backup.util.BackupSet;
+056import 
org.apache.hadoop.hbase.backup.util.BackupSet;
+057import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
 058import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 059import 
org.apache.hadoop.hbase.client.Connection;
 060import 
org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -122,670 +122,703 @@
 114
 115  public static abstract class Command 
extends Configured {
 116CommandLine cmdline;
-117
+117Connection conn;
 118Command(Configuration conf) {
-119  super(conf);
-120}
-121
-122public void execute() throws 
IOException {
-123  if (cmdline.hasOption("h") || 
cmdline.hasOption("help")) {
-124printUsage();
-125throw new 
IOException(INCORRECT_USAGE);
-126  }
-127}
-128
-129protected abstract void 
printUsage();
-130  }
-131
-132  private BackupCommands() {
-133throw new 
AssertionError("Instantiating utility class...");
-134  }
-135
-136  public static Command 
createCommand(Configuration conf, BackupCommand type, CommandLine cmdline) {
-137Command cmd = null;
-138switch (type) {
-139case CREATE:
-140  cmd = new CreateCommand(conf, 
cmdline);
-141  break;
-142case DESCRIBE:
-143  cmd = new DescribeCommand(conf, 
cmdline);
-144  break;
-145case PROGRESS:
-146  cmd = new ProgressCommand(conf, 
cmdline);
-147  break;
-148case DELETE:
-149  cmd = new DeleteCommand(conf, 
cmdline);
-150  break;
-151case CANCEL:
-152  cmd = new CancelCommand(conf, 
cmdline);
-153  break;
-154case HISTORY:
-155  cmd = new HistoryCommand(conf, 
cmdline);
-156  break;
-157case SET:
-158  cmd = new BackupSetCommand(conf, 
cmdline);
-159  break;
-160case HELP:
-161default:
-162  cmd = new HelpCommand(conf, 
cmdline);
-163  break;
-164}
-165return cmd;
-166  }
-167
-168  static int numOfArgs(String[] args) {
-169if (args == null) return 0;
-170return args.length;
-171  }
-172
-173  public static class CreateCommand 
extends Command {
-174
-175CreateCommand(Configuration conf, 
CommandLine cmdline) {
-176  super(conf);
-177  this.cmdline = cmdline;
-178}
-179
-180@Override
-181public void execute() throws 
IOException {
-182  super.execute();
-183  if (cmdline == null || 
cmdline.getArgs() == null) {
-184printUsage();
-185throw new 
IOException(INCORRECT_USAGE);
-186  }
-187  String[] args = 
cmdline.getArgs();
-188  if (args.length !=3) {
-189printUsage();
-190throw new 
IOException(INCORRECT_USAGE);
-191  }
-192
-193  if 
(!BackupType.FULL.toString().equalsIgnoreCase(args[1])
-194   
!BackupType.INCREMENTAL.toString().equalsIgnoreCase(args[1])) {
-195System.out.println("ERROR: 
invalid backup type: " + args[1]);
-196printUsage();
-197throw new 
IOException(INCORRECT_USAGE);
-198  }
-199  if (!verifyPath(args[2])) {
-200System.out.println("ERROR: 
invalid backup destination: " + args[2]);
-201printUsage();
-202throw new 
IOException(INCORRECT_USAGE);
-203  }
-204
-205  String tables = null;
-206  Configuration conf = getConf() != 
null ? getConf() : HBaseConfiguration.create();
-207
-208  // Check if we have both: backup 
set and list of tables
-209  if (cmdline.hasOption(OPTION_TABLE) 
 cmdline.hasOption(OPTION_SET)) {
-210System.out.println("ERROR: You 
can specify either backup set or list"
-211+ " of tables, but not 
both");
-212printUsage();
-213throw new 
IOException(INCORRECT_USAGE);
-214  }
-215
-216  // Check backup set
-217  String setName = null;
-218  if (cmdline.hasOption(OPTION_SET)) 
{
-219setName = 
cmdline.getOptionValue(OPTION_SET);
-220tables = getTablesForSet(setName, 
conf);
-221
-222if (tables == null) {
-223  System.out.println("ERROR: 
Backup set '" + setName
-224

[09/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/replication.html
--
diff --git a/replication.html b/replication.html
index 5e8f5a8..d7941e3 100644
--- a/replication.html
+++ b/replication.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Apache HBase (TM) Replication
@@ -308,7 +308,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-05-12
+  Last Published: 
2017-05-13
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/resources.html
--
diff --git a/resources.html b/resources.html
index 0479e0c..54effd9 100644
--- a/resources.html
+++ b/resources.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Other Apache HBase (TM) Resources
 
@@ -336,7 +336,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-05-12
+  Last Published: 
2017-05-13
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/source-repository.html
--
diff --git a/source-repository.html b/source-repository.html
index 11a44ed..f65184d 100644
--- a/source-repository.html
+++ b/source-repository.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Source Repository
 
@@ -304,7 +304,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-05-12
+  Last Published: 
2017-05-13
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/sponsors.html
--
diff --git a/sponsors.html b/sponsors.html
index cf7ebc2..d63dd80 100644
--- a/sponsors.html
+++ b/sponsors.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Apache HBase™ Sponsors
 
@@ -338,7 +338,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-05-12
+  Last Published: 
2017-05-13
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/supportingprojects.html
--
diff --git a/supportingprojects.html b/supportingprojects.html
index bf48478..4bd8aec 100644
--- a/supportingprojects.html
+++ b/supportingprojects.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Supporting Projects
 
@@ -525,7 +525,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-05-12
+  Last Published: 
2017-05-13
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/team-list.html
--
diff --git a/team-list.html b/team-list.html
index 13f5ae4..b398198 100644
--- a/team-list.html
+++ b/team-list.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Team list
 
@@ -799,7 +799,7 @@ window.onLoad = init();
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-05-12
+  Last Published: 
2017-05-13
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/testdevapidocs/allclasses-frame.html
--
diff --git a/testdevapidocs/allclasses-frame.html 
b/testdevapidocs/allclasses-frame.html
index 3e678e5..845589d 100644
--- a/testdevapidocs/allclasses-frame.html
+++ b/testdevapidocs/allclasses-frame.html
@@ -271,6 +271,7 @@
 LabelFilteringScanLabelGenerator
 LargeTests
 LauncherSecurityManager
+LoadBalancerPerformanceEvaluation
 LoadOnlyFavoredStochasticBalancer
 LoadTestDataGenerator
 LoadTestDataGeneratorWithACL
@@ -950,6 +951,8 @@
 TestFullBackup
 TestFullBackupSet
 TestFullBackupSetRestoreSet
+TestFullBackupWithFailures
+TestFullBackupWithFailures.FullTableBackupClientForTest
 TestFullLogReconstruction
 TestFullRestore
 TestFuzzyRowAndColumnRangeFilter

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CancelCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CancelCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CancelCommand.html
index cc40d21..2df0b04 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CancelCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CancelCommand.html
@@ -61,8 +61,8 @@
 053import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 054import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand;
 055import 
org.apache.hadoop.hbase.backup.BackupType;
-056import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
-057import 
org.apache.hadoop.hbase.backup.util.BackupSet;
+056import 
org.apache.hadoop.hbase.backup.util.BackupSet;
+057import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
 058import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 059import 
org.apache.hadoop.hbase.client.Connection;
 060import 
org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -122,670 +122,703 @@
 114
 115  public static abstract class Command 
extends Configured {
 116CommandLine cmdline;
-117
+117Connection conn;
 118Command(Configuration conf) {
-119  super(conf);
-120}
-121
-122public void execute() throws 
IOException {
-123  if (cmdline.hasOption("h") || 
cmdline.hasOption("help")) {
-124printUsage();
-125throw new 
IOException(INCORRECT_USAGE);
-126  }
-127}
-128
-129protected abstract void 
printUsage();
-130  }
-131
-132  private BackupCommands() {
-133throw new 
AssertionError("Instantiating utility class...");
-134  }
-135
-136  public static Command 
createCommand(Configuration conf, BackupCommand type, CommandLine cmdline) {
-137Command cmd = null;
-138switch (type) {
-139case CREATE:
-140  cmd = new CreateCommand(conf, 
cmdline);
-141  break;
-142case DESCRIBE:
-143  cmd = new DescribeCommand(conf, 
cmdline);
-144  break;
-145case PROGRESS:
-146  cmd = new ProgressCommand(conf, 
cmdline);
-147  break;
-148case DELETE:
-149  cmd = new DeleteCommand(conf, 
cmdline);
-150  break;
-151case CANCEL:
-152  cmd = new CancelCommand(conf, 
cmdline);
-153  break;
-154case HISTORY:
-155  cmd = new HistoryCommand(conf, 
cmdline);
-156  break;
-157case SET:
-158  cmd = new BackupSetCommand(conf, 
cmdline);
-159  break;
-160case HELP:
-161default:
-162  cmd = new HelpCommand(conf, 
cmdline);
-163  break;
-164}
-165return cmd;
-166  }
-167
-168  static int numOfArgs(String[] args) {
-169if (args == null) return 0;
-170return args.length;
-171  }
-172
-173  public static class CreateCommand 
extends Command {
-174
-175CreateCommand(Configuration conf, 
CommandLine cmdline) {
-176  super(conf);
-177  this.cmdline = cmdline;
-178}
-179
-180@Override
-181public void execute() throws 
IOException {
-182  super.execute();
-183  if (cmdline == null || 
cmdline.getArgs() == null) {
-184printUsage();
-185throw new 
IOException(INCORRECT_USAGE);
-186  }
-187  String[] args = 
cmdline.getArgs();
-188  if (args.length !=3) {
-189printUsage();
-190throw new 
IOException(INCORRECT_USAGE);
-191  }
-192
-193  if 
(!BackupType.FULL.toString().equalsIgnoreCase(args[1])
-194   
!BackupType.INCREMENTAL.toString().equalsIgnoreCase(args[1])) {
-195System.out.println("ERROR: 
invalid backup type: " + args[1]);
-196printUsage();
-197throw new 
IOException(INCORRECT_USAGE);
-198  }
-199  if (!verifyPath(args[2])) {
-200System.out.println("ERROR: 
invalid backup destination: " + args[2]);
-201printUsage();
-202throw new 
IOException(INCORRECT_USAGE);
-203  }
-204
-205  String tables = null;
-206  Configuration conf = getConf() != 
null ? getConf() : HBaseConfiguration.create();
-207
-208  // Check if we have both: backup 
set and list of tables
-209  if (cmdline.hasOption(OPTION_TABLE) 
 cmdline.hasOption(OPTION_SET)) {
-210System.out.println("ERROR: You 
can specify either backup set or list"
-211+ " of tables, but not 
both");
-212printUsage();
-213throw new 
IOException(INCORRECT_USAGE);
-214  }
-215
-216  // Check backup set
-217  String setName = null;
-218  if (cmdline.hasOption(OPTION_SET)) 
{
-219setName = 
cmdline.getOptionValue(OPTION_SET);
-220tables = getTablesForSet(setName, 
conf);
-221
-222if (tables == null) {
-223  System.out.println("ERROR: 
Backup set '" + setName
-224  + 

[47/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html 
b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html
index a156b37..5f42ab0 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class BackupAdminImpl
+public class BackupAdminImpl
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements BackupAdmin
 
@@ -345,7 +345,7 @@ implements 
 
 CHECK_OK
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CHECK_OK
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CHECK_OK
 
 See Also:
 Constant
 Field Values
@@ -358,7 +358,7 @@ implements 
 
 CHECK_FAILED
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CHECK_FAILED
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CHECK_FAILED
 
 See Also:
 Constant
 Field Values
@@ -371,7 +371,7 @@ implements 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -380,7 +380,7 @@ implements 
 
 conn
-private finalConnection conn
+private finalConnection conn
 
 
 
@@ -397,7 +397,7 @@ implements 
 
 BackupAdminImpl
-publicBackupAdminImpl(Connectionconn)
+publicBackupAdminImpl(Connectionconn)
 
 
 
@@ -414,7 +414,7 @@ implements 
 
 close
-publicvoidclose()
+publicvoidclose()
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Specified by:
@@ -432,7 +432,7 @@ implements 
 
 getBackupInfo
-publicBackupInfogetBackupInfo(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringbackupId)
+publicBackupInfogetBackupInfo(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringbackupId)
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:BackupAdmin
 Describe backup image command
@@ -454,7 +454,7 @@ implements 
 
 deleteBackups
-publicintdeleteBackups(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]backupIds)
+publicintdeleteBackups(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]backupIds)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:BackupAdmin
 Delete backup image command
@@ -476,7 +476,7 @@ implements 
 
 finalizeDelete
-privatevoidfinalizeDelete(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/util/HashSet.html?is-external=true;
 title="class or interface in java.util">HashSetTableNametablesMap,
+privatevoidfinalizeDelete(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/util/HashSet.html?is-external=true;
 title="class or interface in java.util">HashSetTableNametablesMap,
 BackupSystemTabletable)
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Updates incremental backup set for every backupRoot
@@ -495,7 +495,7 @@ implements 
 
 deleteBackup
-privateintdeleteBackup(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringbackupId,

[49/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index dec8c84..34c9c1e 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
 en-us
 2007 - 2017 The Apache Software Foundation
 
-  File: 2159,
- Errors: 14339,
+  File: 2160,
+ Errors: 14355,
  Warnings: 0,
  Infos: 0
   
@@ -769,7 +769,7 @@ under the License.
   0
 
 
-  55
+  54
 
   
   
@@ -7377,7 +7377,7 @@ under the License.
   0
 
 
-  1
+  12
 
   
   
@@ -15749,7 +15749,7 @@ under the License.
   0
 
 
-  38
+  40
 
   
   
@@ -16006,6 +16006,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.backup.BackupClientFactory.java;>org/apache/hadoop/hbase/backup/BackupClientFactory.java
+
+
+  0
+
+
+  0
+
+
+  3
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.shaded.com.google.protobuf.AnyOrBuilder.java;>org/apache/hadoop/hbase/shaded/com/google/protobuf/AnyOrBuilder.java
 
 
@@ -22357,7 +22371,7 @@ under the License.
   0
 
 
-  4
+  5
 
   
   

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/coc.html
--
diff --git a/coc.html b/coc.html
index 3658a32..7cdaaf2 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Code of Conduct Policy
@@ -380,7 +380,7 @@ email to mailto:priv...@hbase.apache.org;>the priv
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-05-12
+  Last Published: 
2017-05-13
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/cygwin.html
--
diff --git a/cygwin.html b/cygwin.html
index 8b4c73f..dbc388f 100644
--- a/cygwin.html
+++ b/cygwin.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Installing Apache HBase (TM) on Windows using 
Cygwin
 
@@ -679,7 +679,7 @@ Now your HBase server is running, start 
coding and build that next
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-05-12
+  Last Published: 
2017-05-13
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/dependencies.html
--
diff --git a/dependencies.html b/dependencies.html
index d46a5c7..88f0820 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Dependencies
 
@@ -524,7 +524,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-05-12
+  Last Published: 
2017-05-13
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/dependency-convergence.html
--
diff --git a/dependency-convergence.html b/dependency-convergence.html
index a0bcac9..83657db 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Reactor Dependency Convergence
 
@@ -1849,7 +1849,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-05-12
+  Last Published: 
2017-05-13
 
 
 


[18/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.MoveRegionAction.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.MoveRegionAction.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.MoveRegionAction.html
index 7e37ca0..79c65e3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.MoveRegionAction.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.MoveRegionAction.html
@@ -70,1527 +70,1525 @@
 062import com.google.common.collect.Sets;
 063
 064/**
-065 * The base class for load balancers. It 
provides the the functions used to by
-066 * {@link 
org.apache.hadoop.hbase.master.AssignmentManager} to assign regions
-067 * in the edge cases. It doesn't provide 
an implementation of the
-068 * actual balancing algorithm.
-069 *
-070 */
-071public abstract class BaseLoadBalancer 
implements LoadBalancer {
-072  protected static final int 
MIN_SERVER_BALANCE = 2;
-073  private volatile boolean stopped = 
false;
+065 * The base class for load balancers. It 
provides functions used by
+066 * {@link 
org.apache.hadoop.hbase.master.AssignmentManager} to assign regions in the edge 
cases.
+067 * It doesn't provide an implementation 
of the actual balancing algorithm.
+068 */
+069public abstract class BaseLoadBalancer 
implements LoadBalancer {
+070  protected static final int 
MIN_SERVER_BALANCE = 2;
+071  private volatile boolean stopped = 
false;
+072
+073  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
 074
-075  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
-076
-077  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
-078= load - 
load.getNumberOfRegions() == 0;
+075  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
+076= load - 
load.getNumberOfRegions() == 0;
+077
+078  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
 079
-080  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
-081
-082  private static class DefaultRackManager 
extends RackManager {
-083@Override
-084public String getRack(ServerName 
server) {
-085  return UNKNOWN_RACK;
-086}
-087  }
-088
-089  /**
-090   * The constructor that uses the basic 
MetricsBalancer
-091   */
-092  protected BaseLoadBalancer() {
-093metricsBalancer = new 
MetricsBalancer();
-094  }
-095
-096  /**
-097   * This Constructor accepts an instance 
of MetricsBalancer,
-098   * which will be used instead of 
creating a new one
-099   */
-100  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
-101this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
-102  }
-103
-104  /**
-105   * An efficient array based 
implementation similar to ClusterState for keeping
-106   * the status of the cluster in terms 
of region assignment and distribution.
-107   * LoadBalancers, such as 
StochasticLoadBalancer uses this Cluster object because of
-108   * hundreds of thousands of hashmap 
manipulations are very costly, which is why this
-109   * class uses mostly indexes and 
arrays.
-110   *
-111   * Cluster tracks a list of unassigned 
regions, region assignments, and the server
-112   * topology in terms of server names, 
hostnames and racks.
-113   */
-114  protected static class Cluster {
-115ServerName[] servers;
-116String[] hosts; // ServerName 
uniquely identifies a region server. multiple RS can run on the same host
-117String[] racks;
-118boolean multiServersPerHost = false; 
// whether or not any host has more than one server
-119
-120ArrayListString tables;
-121HRegionInfo[] regions;
-122DequeBalancerRegionLoad[] 
regionLoads;
-123private RegionLocationFinder 
regionFinder;
+080  private static class DefaultRackManager 
extends RackManager {
+081@Override
+082public String getRack(ServerName 
server) {
+083  return UNKNOWN_RACK;
+084}
+085  }
+086
+087  /**
+088   * The constructor that uses the basic 
MetricsBalancer
+089   */
+090  protected BaseLoadBalancer() {
+091metricsBalancer = new 
MetricsBalancer();
+092  }
+093
+094  /**
+095   * This Constructor accepts an instance 
of MetricsBalancer,
+096   * which will be used instead of 
creating a new one
+097   */
+098  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
+099this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
+100  }
+101
+102  /**
+103   * An efficient array based 
implementation similar to ClusterState for keeping
+104   * the status of the cluster in terms 
of region assignment and distribution.
+105   * LoadBalancers, such as 

[23/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html
index fbffa2c..4f6f813 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html
@@ -61,172 +61,175 @@
 053public class FullTableBackupClient 
extends TableBackupClient {
 054  private static final Log LOG = 
LogFactory.getLog(FullTableBackupClient.class);
 055
-056  public FullTableBackupClient(final 
Connection conn, final String backupId, BackupRequest request)
-057  throws IOException {
-058super(conn, backupId, request);
-059  }
-060
-061  /**
-062   * Do snapshot copy.
-063   * @param backupInfo backup info
-064   * @throws Exception exception
-065   */
-066  private void snapshotCopy(BackupInfo 
backupInfo) throws Exception {
-067LOG.info("Snapshot copy is 
starting.");
-068
-069// set overall backup phase: 
snapshot_copy
-070
backupInfo.setPhase(BackupPhase.SNAPSHOTCOPY);
+056  public FullTableBackupClient() {
+057  }
+058
+059  public FullTableBackupClient(final 
Connection conn, final String backupId, BackupRequest request)
+060  throws IOException {
+061super(conn, backupId, request);
+062  }
+063
+064  /**
+065   * Do snapshot copy.
+066   * @param backupInfo backup info
+067   * @throws Exception exception
+068   */
+069  protected void snapshotCopy(BackupInfo 
backupInfo) throws Exception {
+070LOG.info("Snapshot copy is 
starting.");
 071
-072// call ExportSnapshot to copy files 
based on hbase snapshot for backup
-073// ExportSnapshot only support single 
snapshot export, need loop for multiple tables case
-074BackupCopyJob copyService = 
BackupRestoreFactory.getBackupCopyJob(conf);
-075
-076// number of snapshots matches number 
of tables
-077float numOfSnapshots = 
backupInfo.getSnapshotNames().size();
+072// set overall backup phase: 
snapshot_copy
+073
backupInfo.setPhase(BackupPhase.SNAPSHOTCOPY);
+074
+075// call ExportSnapshot to copy files 
based on hbase snapshot for backup
+076// ExportSnapshot only support single 
snapshot export, need loop for multiple tables case
+077BackupCopyJob copyService = 
BackupRestoreFactory.getBackupCopyJob(conf);
 078
-079LOG.debug("There are " + (int) 
numOfSnapshots + " snapshots to be copied.");
-080
-081for (TableName table : 
backupInfo.getTables()) {
-082  // Currently we simply set the sub 
copy tasks by counting the table snapshot number, we can
-083  // calculate the real files' size 
for the percentage in the future.
-084  // 
backupCopier.setSubTaskPercntgInWholeTask(1f / numOfSnapshots);
-085  int res = 0;
-086  String[] args = new String[4];
-087  args[0] = "-snapshot";
-088  args[1] = 
backupInfo.getSnapshotName(table);
-089  args[2] = "-copy-to";
-090  args[3] = 
backupInfo.getTableBackupDir(table);
-091
-092  LOG.debug("Copy snapshot " + 
args[1] + " to " + args[3]);
-093  res = copyService.copy(backupInfo, 
backupManager, conf, BackupType.FULL, args);
-094  // if one snapshot export failed, 
do not continue for remained snapshots
-095  if (res != 0) {
-096LOG.error("Exporting Snapshot " + 
args[1] + " failed with return code: " + res + ".");
-097
-098throw new IOException("Failed of 
exporting snapshot " + args[1] + " to " + args[3]
-099+ " with reason code " + 
res);
-100  }
-101  LOG.info("Snapshot copy " + args[1] 
+ " finished.");
-102}
-103  }
-104
-105  /**
-106   * Backup request execution
-107   * @throws IOException
-108   */
-109  @Override
-110  public void execute() throws 
IOException {
-111
-112try (Admin admin = conn.getAdmin();) 
{
-113
-114  // Begin BACKUP
-115  beginBackup(backupManager, 
backupInfo);
-116  String savedStartCode = null;
-117  boolean firstBackup = false;
-118  // do snapshot for full table 
backup
-119
-120  savedStartCode = 
backupManager.readBackupStartCode();
-121  firstBackup = savedStartCode == 
null || Long.parseLong(savedStartCode) == 0L;
-122  if (firstBackup) {
-123// This is our first backup. 
Let's put some marker to system table so that we can hold the logs
-124// while we do the backup.
-125
backupManager.writeBackupStartCode(0L);
-126  }
-127  // We roll log here before we do 
the snapshot. It is possible there is duplicate data
-128  // in the log that is already in 
the snapshot. But if we do it after the snapshot, we
-129  // could have data loss.
-130  // A better approach is to do the 
roll log on each RS in the 

[39/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
index 59a46a4..dadc344 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
@@ -192,8 +192,8 @@
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
 org.apache.hadoop.hbase.security.AuthMethod
-org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection
 org.apache.hadoop.hbase.security.SaslStatus
+org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html 
b/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
index d72927b..b2569da 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
@@ -393,13 +393,31 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 privateHMaster m_master
 
 
+
+
+
+
+
+m_catalogJanitorEnabled
+privateboolean m_catalogJanitorEnabled
+
+
+
+
+
+
+
+m_catalogJanitorEnabled__IsNotDefault
+privateboolean m_catalogJanitorEnabled__IsNotDefault
+
+
 
 
 
 
 
 m_metaLocation
-privateServerName m_metaLocation
+privateServerName m_metaLocation
 
 
 
@@ -408,7 +426,7 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 
 
 m_metaLocation__IsNotDefault
-privateboolean m_metaLocation__IsNotDefault
+privateboolean m_metaLocation__IsNotDefault
 
 
 
@@ -417,7 +435,7 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 
 
 m_frags
-privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer m_frags
+privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer m_frags
 
 
 
@@ -426,7 +444,7 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 
 
 m_frags__IsNotDefault
-privateboolean m_frags__IsNotDefault
+privateboolean m_frags__IsNotDefault
 
 
 
@@ -435,7 +453,7 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 
 
 m_filter
-privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String m_filter
+privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String m_filter
 
 
 
@@ -444,7 +462,7 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 
 
 m_filter__IsNotDefault
-privateboolean m_filter__IsNotDefault
+privateboolean m_filter__IsNotDefault
 
 
 
@@ -453,7 +471,7 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 
 
 m_serverManager
-privateServerManager m_serverManager
+privateServerManager m_serverManager
 
 
 
@@ -462,7 +480,7 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 
 
 m_serverManager__IsNotDefault
-privateboolean m_serverManager__IsNotDefault
+privateboolean m_serverManager__IsNotDefault
 
 
 
@@ -471,7 +489,7 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 
 
 m_servers
-privatehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName m_servers
+privatehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName m_servers
 
 
 
@@ -480,43 +498,25 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 
 
 m_servers__IsNotDefault
-privateboolean m_servers__IsNotDefault
-
-
-
-
-
-
-
-m_assignmentManager
-privateAssignmentManager m_assignmentManager
-
-
-
-
-
-
-

[19/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
index 7e37ca0..79c65e3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
@@ -70,1527 +70,1525 @@
 062import com.google.common.collect.Sets;
 063
 064/**
-065 * The base class for load balancers. It 
provides the the functions used to by
-066 * {@link 
org.apache.hadoop.hbase.master.AssignmentManager} to assign regions
-067 * in the edge cases. It doesn't provide 
an implementation of the
-068 * actual balancing algorithm.
-069 *
-070 */
-071public abstract class BaseLoadBalancer 
implements LoadBalancer {
-072  protected static final int 
MIN_SERVER_BALANCE = 2;
-073  private volatile boolean stopped = 
false;
+065 * The base class for load balancers. It 
provides functions used by
+066 * {@link 
org.apache.hadoop.hbase.master.AssignmentManager} to assign regions in the edge 
cases.
+067 * It doesn't provide an implementation 
of the actual balancing algorithm.
+068 */
+069public abstract class BaseLoadBalancer 
implements LoadBalancer {
+070  protected static final int 
MIN_SERVER_BALANCE = 2;
+071  private volatile boolean stopped = 
false;
+072
+073  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
 074
-075  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
-076
-077  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
-078= load - 
load.getNumberOfRegions() == 0;
+075  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
+076= load - 
load.getNumberOfRegions() == 0;
+077
+078  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
 079
-080  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
-081
-082  private static class DefaultRackManager 
extends RackManager {
-083@Override
-084public String getRack(ServerName 
server) {
-085  return UNKNOWN_RACK;
-086}
-087  }
-088
-089  /**
-090   * The constructor that uses the basic 
MetricsBalancer
-091   */
-092  protected BaseLoadBalancer() {
-093metricsBalancer = new 
MetricsBalancer();
-094  }
-095
-096  /**
-097   * This Constructor accepts an instance 
of MetricsBalancer,
-098   * which will be used instead of 
creating a new one
-099   */
-100  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
-101this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
-102  }
-103
-104  /**
-105   * An efficient array based 
implementation similar to ClusterState for keeping
-106   * the status of the cluster in terms 
of region assignment and distribution.
-107   * LoadBalancers, such as 
StochasticLoadBalancer uses this Cluster object because of
-108   * hundreds of thousands of hashmap 
manipulations are very costly, which is why this
-109   * class uses mostly indexes and 
arrays.
-110   *
-111   * Cluster tracks a list of unassigned 
regions, region assignments, and the server
-112   * topology in terms of server names, 
hostnames and racks.
-113   */
-114  protected static class Cluster {
-115ServerName[] servers;
-116String[] hosts; // ServerName 
uniquely identifies a region server. multiple RS can run on the same host
-117String[] racks;
-118boolean multiServersPerHost = false; 
// whether or not any host has more than one server
-119
-120ArrayListString tables;
-121HRegionInfo[] regions;
-122DequeBalancerRegionLoad[] 
regionLoads;
-123private RegionLocationFinder 
regionFinder;
+080  private static class DefaultRackManager 
extends RackManager {
+081@Override
+082public String getRack(ServerName 
server) {
+083  return UNKNOWN_RACK;
+084}
+085  }
+086
+087  /**
+088   * The constructor that uses the basic 
MetricsBalancer
+089   */
+090  protected BaseLoadBalancer() {
+091metricsBalancer = new 
MetricsBalancer();
+092  }
+093
+094  /**
+095   * This Constructor accepts an instance 
of MetricsBalancer,
+096   * which will be used instead of 
creating a new one
+097   */
+098  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
+099this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
+100  }
+101
+102  /**
+103   * An efficient array based 
implementation similar to ClusterState for keeping
+104   * the status of the cluster in terms 
of region assignment and distribution.
+105   * LoadBalancers, such 

[29/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HistoryCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HistoryCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HistoryCommand.html
index cc40d21..2df0b04 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HistoryCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HistoryCommand.html
@@ -61,8 +61,8 @@
 053import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 054import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand;
 055import 
org.apache.hadoop.hbase.backup.BackupType;
-056import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
-057import 
org.apache.hadoop.hbase.backup.util.BackupSet;
+056import 
org.apache.hadoop.hbase.backup.util.BackupSet;
+057import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
 058import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 059import 
org.apache.hadoop.hbase.client.Connection;
 060import 
org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -122,670 +122,703 @@
 114
 115  public static abstract class Command 
extends Configured {
 116CommandLine cmdline;
-117
+117Connection conn;
 118Command(Configuration conf) {
-119  super(conf);
-120}
-121
-122public void execute() throws 
IOException {
-123  if (cmdline.hasOption("h") || 
cmdline.hasOption("help")) {
-124printUsage();
-125throw new 
IOException(INCORRECT_USAGE);
-126  }
-127}
-128
-129protected abstract void 
printUsage();
-130  }
-131
-132  private BackupCommands() {
-133throw new 
AssertionError("Instantiating utility class...");
-134  }
-135
-136  public static Command 
createCommand(Configuration conf, BackupCommand type, CommandLine cmdline) {
-137Command cmd = null;
-138switch (type) {
-139case CREATE:
-140  cmd = new CreateCommand(conf, 
cmdline);
-141  break;
-142case DESCRIBE:
-143  cmd = new DescribeCommand(conf, 
cmdline);
-144  break;
-145case PROGRESS:
-146  cmd = new ProgressCommand(conf, 
cmdline);
-147  break;
-148case DELETE:
-149  cmd = new DeleteCommand(conf, 
cmdline);
-150  break;
-151case CANCEL:
-152  cmd = new CancelCommand(conf, 
cmdline);
-153  break;
-154case HISTORY:
-155  cmd = new HistoryCommand(conf, 
cmdline);
-156  break;
-157case SET:
-158  cmd = new BackupSetCommand(conf, 
cmdline);
-159  break;
-160case HELP:
-161default:
-162  cmd = new HelpCommand(conf, 
cmdline);
-163  break;
-164}
-165return cmd;
-166  }
-167
-168  static int numOfArgs(String[] args) {
-169if (args == null) return 0;
-170return args.length;
-171  }
-172
-173  public static class CreateCommand 
extends Command {
-174
-175CreateCommand(Configuration conf, 
CommandLine cmdline) {
-176  super(conf);
-177  this.cmdline = cmdline;
-178}
-179
-180@Override
-181public void execute() throws 
IOException {
-182  super.execute();
-183  if (cmdline == null || 
cmdline.getArgs() == null) {
-184printUsage();
-185throw new 
IOException(INCORRECT_USAGE);
-186  }
-187  String[] args = 
cmdline.getArgs();
-188  if (args.length !=3) {
-189printUsage();
-190throw new 
IOException(INCORRECT_USAGE);
-191  }
-192
-193  if 
(!BackupType.FULL.toString().equalsIgnoreCase(args[1])
-194   
!BackupType.INCREMENTAL.toString().equalsIgnoreCase(args[1])) {
-195System.out.println("ERROR: 
invalid backup type: " + args[1]);
-196printUsage();
-197throw new 
IOException(INCORRECT_USAGE);
-198  }
-199  if (!verifyPath(args[2])) {
-200System.out.println("ERROR: 
invalid backup destination: " + args[2]);
-201printUsage();
-202throw new 
IOException(INCORRECT_USAGE);
-203  }
-204
-205  String tables = null;
-206  Configuration conf = getConf() != 
null ? getConf() : HBaseConfiguration.create();
-207
-208  // Check if we have both: backup 
set and list of tables
-209  if (cmdline.hasOption(OPTION_TABLE) 
 cmdline.hasOption(OPTION_SET)) {
-210System.out.println("ERROR: You 
can specify either backup set or list"
-211+ " of tables, but not 
both");
-212printUsage();
-213throw new 
IOException(INCORRECT_USAGE);
-214  }
-215
-216  // Check backup set
-217  String setName = null;
-218  if (cmdline.hasOption(OPTION_SET)) 
{
-219setName = 
cmdline.getOptionValue(OPTION_SET);
-220tables = getTablesForSet(setName, 
conf);
-221
-222if (tables == null) {
-223  System.out.println("ERROR: 
Backup set '" + setName
-224 

[34/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
index cc40d21..2df0b04 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
@@ -61,8 +61,8 @@
 053import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 054import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand;
 055import 
org.apache.hadoop.hbase.backup.BackupType;
-056import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
-057import 
org.apache.hadoop.hbase.backup.util.BackupSet;
+056import 
org.apache.hadoop.hbase.backup.util.BackupSet;
+057import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
 058import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 059import 
org.apache.hadoop.hbase.client.Connection;
 060import 
org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -122,670 +122,703 @@
 114
 115  public static abstract class Command 
extends Configured {
 116CommandLine cmdline;
-117
+117Connection conn;
 118Command(Configuration conf) {
-119  super(conf);
-120}
-121
-122public void execute() throws 
IOException {
-123  if (cmdline.hasOption("h") || 
cmdline.hasOption("help")) {
-124printUsage();
-125throw new 
IOException(INCORRECT_USAGE);
-126  }
-127}
-128
-129protected abstract void 
printUsage();
-130  }
-131
-132  private BackupCommands() {
-133throw new 
AssertionError("Instantiating utility class...");
-134  }
-135
-136  public static Command 
createCommand(Configuration conf, BackupCommand type, CommandLine cmdline) {
-137Command cmd = null;
-138switch (type) {
-139case CREATE:
-140  cmd = new CreateCommand(conf, 
cmdline);
-141  break;
-142case DESCRIBE:
-143  cmd = new DescribeCommand(conf, 
cmdline);
-144  break;
-145case PROGRESS:
-146  cmd = new ProgressCommand(conf, 
cmdline);
-147  break;
-148case DELETE:
-149  cmd = new DeleteCommand(conf, 
cmdline);
-150  break;
-151case CANCEL:
-152  cmd = new CancelCommand(conf, 
cmdline);
-153  break;
-154case HISTORY:
-155  cmd = new HistoryCommand(conf, 
cmdline);
-156  break;
-157case SET:
-158  cmd = new BackupSetCommand(conf, 
cmdline);
-159  break;
-160case HELP:
-161default:
-162  cmd = new HelpCommand(conf, 
cmdline);
-163  break;
-164}
-165return cmd;
-166  }
-167
-168  static int numOfArgs(String[] args) {
-169if (args == null) return 0;
-170return args.length;
-171  }
-172
-173  public static class CreateCommand 
extends Command {
-174
-175CreateCommand(Configuration conf, 
CommandLine cmdline) {
-176  super(conf);
-177  this.cmdline = cmdline;
-178}
-179
-180@Override
-181public void execute() throws 
IOException {
-182  super.execute();
-183  if (cmdline == null || 
cmdline.getArgs() == null) {
-184printUsage();
-185throw new 
IOException(INCORRECT_USAGE);
-186  }
-187  String[] args = 
cmdline.getArgs();
-188  if (args.length !=3) {
-189printUsage();
-190throw new 
IOException(INCORRECT_USAGE);
-191  }
-192
-193  if 
(!BackupType.FULL.toString().equalsIgnoreCase(args[1])
-194   
!BackupType.INCREMENTAL.toString().equalsIgnoreCase(args[1])) {
-195System.out.println("ERROR: 
invalid backup type: " + args[1]);
-196printUsage();
-197throw new 
IOException(INCORRECT_USAGE);
-198  }
-199  if (!verifyPath(args[2])) {
-200System.out.println("ERROR: 
invalid backup destination: " + args[2]);
-201printUsage();
-202throw new 
IOException(INCORRECT_USAGE);
-203  }
-204
-205  String tables = null;
-206  Configuration conf = getConf() != 
null ? getConf() : HBaseConfiguration.create();
-207
-208  // Check if we have both: backup 
set and list of tables
-209  if (cmdline.hasOption(OPTION_TABLE) 
 cmdline.hasOption(OPTION_SET)) {
-210System.out.println("ERROR: You 
can specify either backup set or list"
-211+ " of tables, but not 
both");
-212printUsage();
-213throw new 
IOException(INCORRECT_USAGE);
-214  }
-215
-216  // Check backup set
-217  String setName = null;
-218  if (cmdline.hasOption(OPTION_SET)) 
{
-219setName = 
cmdline.getOptionValue(OPTION_SET);
-220tables = getTablesForSet(setName, 
conf);
-221
-222if (tables == null) {
-223  System.out.println("ERROR: 
Backup set '" + setName
-224  + "' is either empty or 
does not 

[51/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/8e0a5167
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/8e0a5167
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/8e0a5167

Branch: refs/heads/asf-site
Commit: 8e0a516706e9ad7e5e103a62c5c080f7514ee7f4
Parents: 80faeee
Author: jenkins 
Authored: Sat May 13 14:58:27 2017 +
Committer: jenkins 
Committed: Sat May 13 14:58:27 2017 +

--
 acid-semantics.html |4 +-
 apache_hbase_reference_guide.pdf|4 +-
 apache_hbase_reference_guide.pdfmarks   |4 +-
 bulk-loads.html |4 +-
 checkstyle-aggregate.html   | 5002 +-
 checkstyle.rss  |   26 +-
 coc.html|4 +-
 cygwin.html |4 +-
 dependencies.html   |4 +-
 dependency-convergence.html |4 +-
 dependency-info.html|4 +-
 dependency-management.html  |4 +-
 devapidocs/allclasses-frame.html|1 +
 devapidocs/allclasses-noframe.html  |1 +
 devapidocs/constant-values.html |   25 +-
 devapidocs/index-all.html   |   74 +-
 .../apache/hadoop/hbase/backup/BackupAdmin.html |4 +-
 .../hbase/backup/BackupClientFactory.html   |  281 +
 .../hadoop/hbase/backup/BackupCopyJob.html  |4 +-
 .../hadoop/hbase/backup/BackupDriver.html   |   12 +-
 .../BackupRestoreConstants.BackupCommand.html   |   42 +-
 .../hbase/backup/BackupRestoreConstants.html|   60 +-
 .../backup/class-use/BackupClientFactory.html   |  125 +
 .../hbase/backup/class-use/BackupInfo.html  |   62 +-
 .../hbase/backup/class-use/BackupRequest.html   |   12 +
 .../hbase/backup/class-use/BackupType.html  |2 +-
 .../hbase/backup/impl/BackupAdminImpl.html  |   52 +-
 .../impl/BackupCommands.BackupSetCommand.html   |   41 +-
 .../impl/BackupCommands.CancelCommand.html  |   17 +-
 .../backup/impl/BackupCommands.Command.html |   61 +-
 .../impl/BackupCommands.CreateCommand.html  |   46 +-
 .../impl/BackupCommands.DeleteCommand.html  |   42 +-
 .../impl/BackupCommands.DescribeCommand.html|   17 +-
 .../backup/impl/BackupCommands.HelpCommand.html |   17 +-
 .../impl/BackupCommands.HistoryCommand.html |   27 +-
 .../impl/BackupCommands.ProgressCommand.html|   17 +-
 .../hbase/backup/impl/BackupCommands.html   |6 +-
 .../hadoop/hbase/backup/impl/BackupManager.html |  118 +-
 .../hbase/backup/impl/BackupSystemTable.html|  455 +-
 .../backup/impl/FullTableBackupClient.html  |   36 +-
 .../backup/impl/IncrementalBackupManager.html   |2 +-
 .../impl/IncrementalTableBackupClient.html  |   68 +-
 .../hbase/backup/impl/TableBackupClient.html|  277 +-
 .../backup/impl/class-use/BackupException.html  |2 +-
 .../backup/impl/class-use/BackupManager.html|2 +-
 .../impl/class-use/TableBackupClient.html   |   24 +
 .../hadoop/hbase/backup/impl/package-use.html   |5 +
 .../hadoop/hbase/backup/package-frame.html  |1 +
 .../hadoop/hbase/backup/package-summary.html|   42 +-
 .../hadoop/hbase/backup/package-tree.html   |5 +-
 .../hadoop/hbase/class-use/TableName.html   |   14 +-
 .../hbase/classification/package-tree.html  |6 +-
 .../hadoop/hbase/client/class-use/Admin.html|   11 +-
 .../hbase/client/class-use/Connection.html  |   46 +-
 .../hadoop/hbase/client/class-use/Put.html  |8 +
 .../hadoop/hbase/client/package-tree.html   |   26 +-
 .../hadoop/hbase/filter/package-tree.html   |   10 +-
 .../hadoop/hbase/io/hfile/package-tree.html |4 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   |2 +-
 .../hadoop/hbase/mapreduce/package-tree.html|4 +-
 .../hadoop/hbase/master/LoadBalancer.html   |2 +-
 .../BaseLoadBalancer.Cluster.Action.Type.html   |   14 +-
 .../BaseLoadBalancer.Cluster.Action.html|   10 +-
 ...LoadBalancer.Cluster.AssignRegionAction.html |   12 +-
 ...seLoadBalancer.Cluster.MoveRegionAction.html |   14 +-
 ...eLoadBalancer.Cluster.SwapRegionsAction.html |   16 +-
 .../balancer/BaseLoadBalancer.Cluster.html  |  140 +-
 .../BaseLoadBalancer.DefaultRackManager.html|6 +-
 .../hbase/master/balancer/BaseLoadBalancer.html |  107 +-
 .../hadoop/hbase/master/package-tree.html   |6 +-
 .../hbase/master/procedure/package-tree.html|2 +-
 .../org/apache/hadoop/hbase/package-tree.html   |   12 +-
 .../hadoop/hbase/procedure2/package-tree.html   |2 +-
 

[11/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/hbase-annotations/index.html
--
diff --git a/hbase-annotations/index.html b/hbase-annotations/index.html
index 632f9d5..8b68d50 100644
--- a/hbase-annotations/index.html
+++ b/hbase-annotations/index.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-05-12
+Last Published: 2017-05-13
   | Version: 
2.0.0-SNAPSHOT
   
 Apache HBase - Annotations

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/hbase-annotations/integration.html
--
diff --git a/hbase-annotations/integration.html 
b/hbase-annotations/integration.html
index 1a82d3f..0bf5958 100644
--- a/hbase-annotations/integration.html
+++ b/hbase-annotations/integration.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-05-12
+Last Published: 2017-05-13
   | Version: 
2.0.0-SNAPSHOT
   
 Apache HBase - Annotations

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/hbase-annotations/issue-tracking.html
--
diff --git a/hbase-annotations/issue-tracking.html 
b/hbase-annotations/issue-tracking.html
index 9a80c7e..473c333 100644
--- a/hbase-annotations/issue-tracking.html
+++ b/hbase-annotations/issue-tracking.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-05-12
+Last Published: 2017-05-13
   | Version: 
2.0.0-SNAPSHOT
   
 Apache HBase - Annotations

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/hbase-annotations/license.html
--
diff --git a/hbase-annotations/license.html b/hbase-annotations/license.html
index b5bedbf..469ddfe 100644
--- a/hbase-annotations/license.html
+++ b/hbase-annotations/license.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-05-12
+Last Published: 2017-05-13
   | Version: 
2.0.0-SNAPSHOT
   
 Apache HBase - Annotations

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/hbase-annotations/mail-lists.html
--
diff --git a/hbase-annotations/mail-lists.html 
b/hbase-annotations/mail-lists.html
index d2651e2..63d0631 100644
--- a/hbase-annotations/mail-lists.html
+++ b/hbase-annotations/mail-lists.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-05-12
+Last Published: 2017-05-13
   | Version: 
2.0.0-SNAPSHOT
   
 Apache HBase - Annotations

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/hbase-annotations/plugin-management.html
--
diff --git a/hbase-annotations/plugin-management.html 
b/hbase-annotations/plugin-management.html
index a47dd4c..1b96060 100644
--- a/hbase-annotations/plugin-management.html
+++ b/hbase-annotations/plugin-management.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   

[08/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/testdevapidocs/org/apache/hadoop/hbase/backup/TestFullBackupWithFailures.FullTableBackupClientForTest.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/backup/TestFullBackupWithFailures.FullTableBackupClientForTest.html
 
b/testdevapidocs/org/apache/hadoop/hbase/backup/TestFullBackupWithFailures.FullTableBackupClientForTest.html
new file mode 100644
index 000..c0e92e4
--- /dev/null
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/backup/TestFullBackupWithFailures.FullTableBackupClientForTest.html
@@ -0,0 +1,406 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+TestFullBackupWithFailures.FullTableBackupClientForTest (Apache HBase 
2.0.0-SNAPSHOT Test API)
+
+
+
+
+
+var methods = {"i0":10,"i1":10,"i2":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.backup
+Class TestFullBackupWithFailures.FullTableBackupClientForTest
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.backup.impl.TableBackupClient
+
+
+org.apache.hadoop.hbase.backup.impl.FullTableBackupClient
+
+
+org.apache.hadoop.hbase.backup.TestFullBackupWithFailures.FullTableBackupClientForTest
+
+
+
+
+
+
+
+
+
+
+
+Enclosing class:
+TestFullBackupWithFailures
+
+
+
+static class TestFullBackupWithFailures.FullTableBackupClientForTest
+extends org.apache.hadoop.hbase.backup.impl.FullTableBackupClient
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+BACKUP_TEST_MODE_STAGE
+
+
+
+
+
+
+Fields inherited from 
classorg.apache.hadoop.hbase.backup.impl.TableBackupClient
+BACKUP_CLIENT_IMPL_CLASS, backupId, backupInfo, backupManager, conf, 
conn, newTimestamps, tableList
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+FullTableBackupClientForTest()
+
+
+FullTableBackupClientForTest(org.apache.hadoop.hbase.client.Connectionconn,
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringbackupId,
+
org.apache.hadoop.hbase.backup.BackupRequestrequest)
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsInstance MethodsConcrete Methods
+
+Modifier and Type
+Method and Description
+
+
+void
+execute()
+
+
+protected void
+failStageIf(intstage)
+
+
+protected int
+getTestStageId()
+
+
+
+
+
+
+Methods inherited from 
classorg.apache.hadoop.hbase.backup.impl.FullTableBackupClient
+snapshotCopy, snapshotTable
+
+
+
+
+
+Methods inherited from 
classorg.apache.hadoop.hbase.backup.impl.TableBackupClient
+addManifest, beginBackup, cleanupAndRestoreBackupSystem, 
cleanupDistCpLog, cleanupExportSnapshotLog, cleanupTargetDir, completeBackup, 
deleteBackupTableSnapshot, deleteSnapshots, failBackup, getMessage, init, 
obtainBackupMetaDataStr, restoreBackupTable, snapshotBackupTable, 
snapshotExists
+
+
+
+
+
+Methods inherited from classjava.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, 

[37/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html
index fb87256..3a87acd 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html
@@ -41,521 +41,527 @@
 033import org.apache.hadoop.fs.Path;
 034import 
org.apache.hadoop.hbase.TableName;
 035import 
org.apache.hadoop.hbase.backup.BackupAdmin;
-036import 
org.apache.hadoop.hbase.backup.BackupInfo;
-037import 
org.apache.hadoop.hbase.backup.BackupInfo.BackupState;
-038import 
org.apache.hadoop.hbase.backup.BackupRequest;
-039import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants;
-040import 
org.apache.hadoop.hbase.backup.BackupType;
-041import 
org.apache.hadoop.hbase.backup.HBackupFileSystem;
-042import 
org.apache.hadoop.hbase.backup.RestoreRequest;
-043import 
org.apache.hadoop.hbase.backup.util.BackupSet;
-044import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
-045import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-046import 
org.apache.hadoop.hbase.client.Admin;
-047import 
org.apache.hadoop.hbase.client.Connection;
-048import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-049
-050import com.google.common.collect.Lists;
-051
-052@InterfaceAudience.Private
-053public class BackupAdminImpl implements 
BackupAdmin {
-054  public final static String CHECK_OK = 
"Checking backup images: OK";
-055  public final static String CHECK_FAILED 
=
-056  "Checking backup images: Failed. 
Some dependencies are missing for restore";
-057  private static final Log LOG = 
LogFactory.getLog(BackupAdminImpl.class);
-058
-059  private final Connection conn;
-060
-061  public BackupAdminImpl(Connection conn) 
{
-062this.conn = conn;
-063  }
-064
-065  @Override
-066  public void close() throws IOException 
{
-067if (conn != null) {
-068  conn.close();
-069}
-070  }
-071
+036import 
org.apache.hadoop.hbase.backup.BackupClientFactory;
+037import 
org.apache.hadoop.hbase.backup.BackupInfo;
+038import 
org.apache.hadoop.hbase.backup.BackupInfo.BackupState;
+039import 
org.apache.hadoop.hbase.backup.BackupRequest;
+040import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants;
+041import 
org.apache.hadoop.hbase.backup.BackupType;
+042import 
org.apache.hadoop.hbase.backup.HBackupFileSystem;
+043import 
org.apache.hadoop.hbase.backup.RestoreRequest;
+044import 
org.apache.hadoop.hbase.backup.util.BackupSet;
+045import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
+046import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+047import 
org.apache.hadoop.hbase.client.Admin;
+048import 
org.apache.hadoop.hbase.client.Connection;
+049import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+050
+051import com.google.common.collect.Lists;
+052
+053@InterfaceAudience.Private
+054public class BackupAdminImpl implements 
BackupAdmin {
+055  public final static String CHECK_OK = 
"Checking backup images: OK";
+056  public final static String CHECK_FAILED 
=
+057  "Checking backup images: Failed. 
Some dependencies are missing for restore";
+058  private static final Log LOG = 
LogFactory.getLog(BackupAdminImpl.class);
+059
+060  private final Connection conn;
+061
+062  public BackupAdminImpl(Connection conn) 
{
+063this.conn = conn;
+064  }
+065
+066  @Override
+067  public void close() throws IOException 
{
+068if (conn != null) {
+069  conn.close();
+070}
+071  }
 072
-073  @Override
-074  public BackupInfo getBackupInfo(String 
backupId) throws IOException {
-075BackupInfo backupInfo = null;
-076try (final BackupSystemTable table = 
new BackupSystemTable(conn)) {
-077  if (backupId == null) {
-078ArrayListBackupInfo 
recentSessions = table.getBackupInfos(BackupState.RUNNING);
-079if (recentSessions.isEmpty()) {
-080  LOG.warn("No ongoing sessions 
found.");
-081  return null;
-082}
-083// else show status for ongoing 
session
-084// must be one maximum
-085return recentSessions.get(0);
-086  } else {
-087backupInfo = 
table.readBackupInfo(backupId);
-088return backupInfo;
-089  }
-090}
-091  }
-092
-093  @Override
-094  public int deleteBackups(String[] 
backupIds) throws IOException {
-095// TODO: requires Fault tolerance 
support, failure will leave system
-096// in a non-consistent state
-097// see HBASE-15227
-098int totalDeleted = 0;
-099MapString, 
HashSetTableName allTablesMap = new HashMapString, 
HashSetTableName();
-100
-101try (final BackupSystemTable sysTable 
= new BackupSystemTable(conn)) {
-102  for (int 

[01/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 80faeee01 -> 8e0a51670


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/testdevapidocs/src-html/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.html
new file mode 100644
index 000..b040a50
--- /dev/null
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.html
@@ -0,0 +1,252 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/*
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License.  You may obtain a 
copy of the License at
+009 *
+010 * 
http://www.apache.org/licenses/LICENSE-2.0
+011 *
+012 * Unless required by applicable law or 
agreed to in writing, software
+013 * distributed under the License is 
distributed on an "AS IS" BASIS,
+014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+015 * See the License for the specific 
language governing permissions and
+016 * limitations under the License.
+017 */
+018
+019package 
org.apache.hadoop.hbase.master.balancer;
+020
+021import 
com.google.common.base.Preconditions;
+022import 
com.google.common.base.Stopwatch;
+023import 
org.apache.commons.cli.CommandLine;
+024import org.apache.commons.cli.Option;
+025import org.apache.commons.logging.Log;
+026import 
org.apache.commons.logging.LogFactory;
+027import 
org.apache.hadoop.hbase.HBaseCommonTestingUtility;
+028import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
+029import 
org.apache.hadoop.hbase.HConstants;
+030import 
org.apache.hadoop.hbase.HRegionInfo;
+031import 
org.apache.hadoop.hbase.ServerName;
+032import 
org.apache.hadoop.hbase.TableName;
+033import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+034import 
org.apache.hadoop.hbase.master.LoadBalancer;
+035import 
org.apache.hadoop.hbase.util.AbstractHBaseTool;
+036import 
org.apache.hadoop.hbase.util.Bytes;
+037
+038import java.io.IOException;
+039import java.util.ArrayList;
+040import java.util.Collections;
+041import java.util.HashMap;
+042import java.util.List;
+043import java.util.Map;
+044
+045/**
+046 * Tool to test performance of different 
{@link org.apache.hadoop.hbase.master.LoadBalancer}
+047 * implementations.
+048 * Example command:
+049 * $ bin/hbase 
org.apache.hadoop.hbase.master.balancer.LoadBalancerPerformanceEvaluation
+050 *   -regions 1000 -servers 100
+051 *   -load_balancer 
org.apache.hadoop.hbase.master.balancer.SimpleLoadBalancer
+052 */
+053@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
+054public class 
LoadBalancerPerformanceEvaluation extends AbstractHBaseTool {
+055  private static final Log LOG =
+056  
LogFactory.getLog(LoadBalancerPerformanceEvaluation.class.getName());
+057
+058  protected static final 
HBaseCommonTestingUtility UTIL = new HBaseCommonTestingUtility();
+059
+060  private static final int 
DEFAULT_NUM_REGIONS = 100;
+061  private static Option NUM_REGIONS_OPT = 
new Option("regions", true,
+062  "Number of regions to consider by 
load balancer. Default: " + DEFAULT_NUM_REGIONS);
+063
+064  private static final int 
DEFAULT_NUM_SERVERS = 1000;
+065  private static Option NUM_SERVERS_OPT = 
new Option("servers", true,
+066  "Number of servers to consider by 
load balancer. Default: " + DEFAULT_NUM_SERVERS);
+067
+068  private static final String 
DEFAULT_LOAD_BALANCER =
+069  
"org.apache.hadoop.hbase.master.balancer.StochasticLoadBalancer";
+070  private static Option LOAD_BALANCER_OPT 
= new Option("load_balancer", true,
+071  "Type of Load Balancer to use. 
Default: " + DEFAULT_LOAD_BALANCER);
+072
+073  private int numRegions;
+074  private int numServers;
+075  private String loadBalancerType;
+076  private Class? 
loadBalancerClazz;
+077
+078  private LoadBalancer loadBalancer;
+079
+080  // data
+081  private ListServerName 
servers;
+082  private ListHRegionInfo 
regions;
+083  private MapHRegionInfo, 
ServerName regionServerMap;
+084  private MapServerName, 
ListHRegionInfo serverRegionMap;
+085
+086  // Non-default configurations.
+087  private void setupConf() {
+088
conf.setClass(HConstants.HBASE_MASTER_LOADBALANCER_CLASS, loadBalancerClazz, 
LoadBalancer.class);
+089loadBalancer = 
LoadBalancerFactory.getLoadBalancer(conf);
+090  }
+091
+092  private void 

[30/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
index cc40d21..2df0b04 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
@@ -61,8 +61,8 @@
 053import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 054import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand;
 055import 
org.apache.hadoop.hbase.backup.BackupType;
-056import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
-057import 
org.apache.hadoop.hbase.backup.util.BackupSet;
+056import 
org.apache.hadoop.hbase.backup.util.BackupSet;
+057import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
 058import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 059import 
org.apache.hadoop.hbase.client.Connection;
 060import 
org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -122,670 +122,703 @@
 114
 115  public static abstract class Command 
extends Configured {
 116CommandLine cmdline;
-117
+117Connection conn;
 118Command(Configuration conf) {
-119  super(conf);
-120}
-121
-122public void execute() throws 
IOException {
-123  if (cmdline.hasOption("h") || 
cmdline.hasOption("help")) {
-124printUsage();
-125throw new 
IOException(INCORRECT_USAGE);
-126  }
-127}
-128
-129protected abstract void 
printUsage();
-130  }
-131
-132  private BackupCommands() {
-133throw new 
AssertionError("Instantiating utility class...");
-134  }
-135
-136  public static Command 
createCommand(Configuration conf, BackupCommand type, CommandLine cmdline) {
-137Command cmd = null;
-138switch (type) {
-139case CREATE:
-140  cmd = new CreateCommand(conf, 
cmdline);
-141  break;
-142case DESCRIBE:
-143  cmd = new DescribeCommand(conf, 
cmdline);
-144  break;
-145case PROGRESS:
-146  cmd = new ProgressCommand(conf, 
cmdline);
-147  break;
-148case DELETE:
-149  cmd = new DeleteCommand(conf, 
cmdline);
-150  break;
-151case CANCEL:
-152  cmd = new CancelCommand(conf, 
cmdline);
-153  break;
-154case HISTORY:
-155  cmd = new HistoryCommand(conf, 
cmdline);
-156  break;
-157case SET:
-158  cmd = new BackupSetCommand(conf, 
cmdline);
-159  break;
-160case HELP:
-161default:
-162  cmd = new HelpCommand(conf, 
cmdline);
-163  break;
-164}
-165return cmd;
-166  }
-167
-168  static int numOfArgs(String[] args) {
-169if (args == null) return 0;
-170return args.length;
-171  }
-172
-173  public static class CreateCommand 
extends Command {
-174
-175CreateCommand(Configuration conf, 
CommandLine cmdline) {
-176  super(conf);
-177  this.cmdline = cmdline;
-178}
-179
-180@Override
-181public void execute() throws 
IOException {
-182  super.execute();
-183  if (cmdline == null || 
cmdline.getArgs() == null) {
-184printUsage();
-185throw new 
IOException(INCORRECT_USAGE);
-186  }
-187  String[] args = 
cmdline.getArgs();
-188  if (args.length !=3) {
-189printUsage();
-190throw new 
IOException(INCORRECT_USAGE);
-191  }
-192
-193  if 
(!BackupType.FULL.toString().equalsIgnoreCase(args[1])
-194   
!BackupType.INCREMENTAL.toString().equalsIgnoreCase(args[1])) {
-195System.out.println("ERROR: 
invalid backup type: " + args[1]);
-196printUsage();
-197throw new 
IOException(INCORRECT_USAGE);
-198  }
-199  if (!verifyPath(args[2])) {
-200System.out.println("ERROR: 
invalid backup destination: " + args[2]);
-201printUsage();
-202throw new 
IOException(INCORRECT_USAGE);
-203  }
-204
-205  String tables = null;
-206  Configuration conf = getConf() != 
null ? getConf() : HBaseConfiguration.create();
-207
-208  // Check if we have both: backup 
set and list of tables
-209  if (cmdline.hasOption(OPTION_TABLE) 
 cmdline.hasOption(OPTION_SET)) {
-210System.out.println("ERROR: You 
can specify either backup set or list"
-211+ " of tables, but not 
both");
-212printUsage();
-213throw new 
IOException(INCORRECT_USAGE);
-214  }
-215
-216  // Check backup set
-217  String setName = null;
-218  if (cmdline.hasOption(OPTION_SET)) 
{
-219setName = 
cmdline.getOptionValue(OPTION_SET);
-220tables = getTablesForSet(setName, 
conf);
-221
-222if (tables == null) {
-223  System.out.println("ERROR: 
Backup set '" + setName
-224  + "' is 

[12/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
index f456b7b..a2510c5 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
@@ -69,15 +69,15 @@
 061  requiredArguments = {
 062@org.jamon.annotations.Argument(name 
= "master", type = "HMaster")},
 063  optionalArguments = {
-064@org.jamon.annotations.Argument(name 
= "metaLocation", type = "ServerName"),
-065@org.jamon.annotations.Argument(name 
= "frags", type = "MapString,Integer"),
-066@org.jamon.annotations.Argument(name 
= "filter", type = "String"),
-067@org.jamon.annotations.Argument(name 
= "serverManager", type = "ServerManager"),
-068@org.jamon.annotations.Argument(name 
= "servers", type = "ListServerName"),
-069@org.jamon.annotations.Argument(name 
= "assignmentManager", type = "AssignmentManager"),
-070@org.jamon.annotations.Argument(name 
= "catalogJanitorEnabled", type = "boolean"),
+064@org.jamon.annotations.Argument(name 
= "catalogJanitorEnabled", type = "boolean"),
+065@org.jamon.annotations.Argument(name 
= "metaLocation", type = "ServerName"),
+066@org.jamon.annotations.Argument(name 
= "frags", type = "MapString,Integer"),
+067@org.jamon.annotations.Argument(name 
= "filter", type = "String"),
+068@org.jamon.annotations.Argument(name 
= "serverManager", type = "ServerManager"),
+069@org.jamon.annotations.Argument(name 
= "servers", type = "ListServerName"),
+070@org.jamon.annotations.Argument(name 
= "format", type = "String"),
 071@org.jamon.annotations.Argument(name 
= "deadServers", type = "SetServerName"),
-072@org.jamon.annotations.Argument(name 
= "format", type = "String")})
+072@org.jamon.annotations.Argument(name 
= "assignmentManager", type = "AssignmentManager")})
 073public class MasterStatusTmpl
 074  extends 
org.jamon.AbstractTemplateProxy
 075{
@@ -118,125 +118,125 @@
 110  return m_master;
 111}
 112private HMaster m_master;
-113// 22, 1
-114public void 
setMetaLocation(ServerName metaLocation)
+113// 25, 1
+114public void 
setCatalogJanitorEnabled(boolean catalogJanitorEnabled)
 115{
-116  // 22, 1
-117  m_metaLocation = metaLocation;
-118  m_metaLocation__IsNotDefault = 
true;
+116  // 25, 1
+117  m_catalogJanitorEnabled = 
catalogJanitorEnabled;
+118  
m_catalogJanitorEnabled__IsNotDefault = true;
 119}
-120public ServerName getMetaLocation()
+120public boolean 
getCatalogJanitorEnabled()
 121{
-122  return m_metaLocation;
+122  return m_catalogJanitorEnabled;
 123}
-124private ServerName m_metaLocation;
-125public boolean 
getMetaLocation__IsNotDefault()
+124private boolean 
m_catalogJanitorEnabled;
+125public boolean 
getCatalogJanitorEnabled__IsNotDefault()
 126{
-127  return 
m_metaLocation__IsNotDefault;
+127  return 
m_catalogJanitorEnabled__IsNotDefault;
 128}
-129private boolean 
m_metaLocation__IsNotDefault;
-130// 21, 1
-131public void 
setFrags(MapString,Integer frags)
+129private boolean 
m_catalogJanitorEnabled__IsNotDefault;
+130// 22, 1
+131public void 
setMetaLocation(ServerName metaLocation)
 132{
-133  // 21, 1
-134  m_frags = frags;
-135  m_frags__IsNotDefault = true;
+133  // 22, 1
+134  m_metaLocation = metaLocation;
+135  m_metaLocation__IsNotDefault = 
true;
 136}
-137public MapString,Integer 
getFrags()
+137public ServerName getMetaLocation()
 138{
-139  return m_frags;
+139  return m_metaLocation;
 140}
-141private MapString,Integer 
m_frags;
-142public boolean 
getFrags__IsNotDefault()
+141private ServerName m_metaLocation;
+142public boolean 
getMetaLocation__IsNotDefault()
 143{
-144  return m_frags__IsNotDefault;
+144  return 
m_metaLocation__IsNotDefault;
 145}
-146private boolean 
m_frags__IsNotDefault;
-147// 26, 1
-148public void setFilter(String 
filter)
+146private boolean 
m_metaLocation__IsNotDefault;
+147// 21, 1
+148public void 
setFrags(MapString,Integer frags)
 149{
-150  // 26, 1
-151  m_filter = filter;
-152  m_filter__IsNotDefault = true;
+150  // 21, 1
+151  m_frags = frags;
+152  m_frags__IsNotDefault = true;
 153}
-154public String getFilter()
+154public MapString,Integer 
getFrags()
 155{
-156  return m_filter;
+156  return m_frags;
 157}
-158private String m_filter;
-159public boolean 
getFilter__IsNotDefault()
+158private 

[33/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CreateCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CreateCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CreateCommand.html
index cc40d21..2df0b04 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CreateCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CreateCommand.html
@@ -61,8 +61,8 @@
 053import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 054import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand;
 055import 
org.apache.hadoop.hbase.backup.BackupType;
-056import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
-057import 
org.apache.hadoop.hbase.backup.util.BackupSet;
+056import 
org.apache.hadoop.hbase.backup.util.BackupSet;
+057import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
 058import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 059import 
org.apache.hadoop.hbase.client.Connection;
 060import 
org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -122,670 +122,703 @@
 114
 115  public static abstract class Command 
extends Configured {
 116CommandLine cmdline;
-117
+117Connection conn;
 118Command(Configuration conf) {
-119  super(conf);
-120}
-121
-122public void execute() throws 
IOException {
-123  if (cmdline.hasOption("h") || 
cmdline.hasOption("help")) {
-124printUsage();
-125throw new 
IOException(INCORRECT_USAGE);
-126  }
-127}
-128
-129protected abstract void 
printUsage();
-130  }
-131
-132  private BackupCommands() {
-133throw new 
AssertionError("Instantiating utility class...");
-134  }
-135
-136  public static Command 
createCommand(Configuration conf, BackupCommand type, CommandLine cmdline) {
-137Command cmd = null;
-138switch (type) {
-139case CREATE:
-140  cmd = new CreateCommand(conf, 
cmdline);
-141  break;
-142case DESCRIBE:
-143  cmd = new DescribeCommand(conf, 
cmdline);
-144  break;
-145case PROGRESS:
-146  cmd = new ProgressCommand(conf, 
cmdline);
-147  break;
-148case DELETE:
-149  cmd = new DeleteCommand(conf, 
cmdline);
-150  break;
-151case CANCEL:
-152  cmd = new CancelCommand(conf, 
cmdline);
-153  break;
-154case HISTORY:
-155  cmd = new HistoryCommand(conf, 
cmdline);
-156  break;
-157case SET:
-158  cmd = new BackupSetCommand(conf, 
cmdline);
-159  break;
-160case HELP:
-161default:
-162  cmd = new HelpCommand(conf, 
cmdline);
-163  break;
-164}
-165return cmd;
-166  }
-167
-168  static int numOfArgs(String[] args) {
-169if (args == null) return 0;
-170return args.length;
-171  }
-172
-173  public static class CreateCommand 
extends Command {
-174
-175CreateCommand(Configuration conf, 
CommandLine cmdline) {
-176  super(conf);
-177  this.cmdline = cmdline;
-178}
-179
-180@Override
-181public void execute() throws 
IOException {
-182  super.execute();
-183  if (cmdline == null || 
cmdline.getArgs() == null) {
-184printUsage();
-185throw new 
IOException(INCORRECT_USAGE);
-186  }
-187  String[] args = 
cmdline.getArgs();
-188  if (args.length !=3) {
-189printUsage();
-190throw new 
IOException(INCORRECT_USAGE);
-191  }
-192
-193  if 
(!BackupType.FULL.toString().equalsIgnoreCase(args[1])
-194   
!BackupType.INCREMENTAL.toString().equalsIgnoreCase(args[1])) {
-195System.out.println("ERROR: 
invalid backup type: " + args[1]);
-196printUsage();
-197throw new 
IOException(INCORRECT_USAGE);
-198  }
-199  if (!verifyPath(args[2])) {
-200System.out.println("ERROR: 
invalid backup destination: " + args[2]);
-201printUsage();
-202throw new 
IOException(INCORRECT_USAGE);
-203  }
-204
-205  String tables = null;
-206  Configuration conf = getConf() != 
null ? getConf() : HBaseConfiguration.create();
-207
-208  // Check if we have both: backup 
set and list of tables
-209  if (cmdline.hasOption(OPTION_TABLE) 
 cmdline.hasOption(OPTION_SET)) {
-210System.out.println("ERROR: You 
can specify either backup set or list"
-211+ " of tables, but not 
both");
-212printUsage();
-213throw new 
IOException(INCORRECT_USAGE);
-214  }
-215
-216  // Check backup set
-217  String setName = null;
-218  if (cmdline.hasOption(OPTION_SET)) 
{
-219setName = 
cmdline.getOptionValue(OPTION_SET);
-220tables = getTablesForSet(setName, 
conf);
-221
-222if (tables == null) {
-223  System.out.println("ERROR: 
Backup set '" + setName
-224  + 

[50/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 36ba1e1..a2a4aef 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -286,10 +286,10 @@
 Warnings
 Errors
 
-2159
+2160
 0
 0
-14339
+14355
 
 Files
 
@@ -579,6471 +579,6476 @@
 0
 3
 
+org/apache/hadoop/hbase/backup/BackupClientFactory.java
+0
+0
+3
+
 org/apache/hadoop/hbase/backup/BackupCopyJob.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/backup/BackupHFileCleaner.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/backup/BackupRestoreConstants.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/backup/BackupTableInfo.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/backup/FailedArchiveException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/backup/HBackupFileSystem.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/backup/HFileArchiver.java
 0
 0
 21
-
+
 org/apache/hadoop/hbase/backup/LogUtils.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/backup/RestoreDriver.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/backup/RestoreJob.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/backup/RestoreRequest.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/backup/example/TableHFileArchiveTracker.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/backup/example/ZKTableArchiveClient.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/backup/impl/BackupCommands.java
 0
 0
-55
-
+54
+
 org/apache/hadoop/hbase/backup/impl/BackupManager.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/backup/impl/BackupManifest.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java
 0
 0
-38
-
+40
+
 org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.java
 0
 0
-4
-
+5
+
 org/apache/hadoop/hbase/backup/impl/TableBackupClient.java
 0
 0
-1
-
+12
+
 org/apache/hadoop/hbase/backup/regionserver/LogRollBackupSubprocedure.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/backup/util/BackupUtils.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/backup/util/RestoreTool.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/classification/tools/ExcludePrivateAnnotationsStandardDoclet.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/classification/tools/IncludePublicAnnotationsStandardDoclet.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/classification/tools/RootDocProcessor.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/classification/tools/StabilityOptions.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/AbstractResponse.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/Action.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/Admin.java
 0
 0
 78
-
+
 org/apache/hadoop/hbase/client/Append.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/AsyncAdmin.java
 0
 0
 15
-
+
 org/apache/hadoop/hbase/client/AsyncAdminRequestRetryingCaller.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/AsyncClientScanner.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/AsyncConnection.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/AsyncConnectionConfiguration.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java
 0
 0
 34
-
+
 org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/AsyncProcess.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/client/AsyncProcessTask.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/AsyncRequestFuture.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
 0
 0
 24
-
+
 org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/AsyncSingleRequestRpcRetryingCaller.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/AsyncTableImpl.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/BatchErrors.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/BufferedMutator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/CancellableRegionServerCallable.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/ClientAsyncPrefetchScanner.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/ClientIdGenerator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ClientScanner.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/ClientServiceCallable.java
 0
 0
 2
-
+
 

[32/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
index cc40d21..2df0b04 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
@@ -61,8 +61,8 @@
 053import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 054import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand;
 055import 
org.apache.hadoop.hbase.backup.BackupType;
-056import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
-057import 
org.apache.hadoop.hbase.backup.util.BackupSet;
+056import 
org.apache.hadoop.hbase.backup.util.BackupSet;
+057import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
 058import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 059import 
org.apache.hadoop.hbase.client.Connection;
 060import 
org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -122,670 +122,703 @@
 114
 115  public static abstract class Command 
extends Configured {
 116CommandLine cmdline;
-117
+117Connection conn;
 118Command(Configuration conf) {
-119  super(conf);
-120}
-121
-122public void execute() throws 
IOException {
-123  if (cmdline.hasOption("h") || 
cmdline.hasOption("help")) {
-124printUsage();
-125throw new 
IOException(INCORRECT_USAGE);
-126  }
-127}
-128
-129protected abstract void 
printUsage();
-130  }
-131
-132  private BackupCommands() {
-133throw new 
AssertionError("Instantiating utility class...");
-134  }
-135
-136  public static Command 
createCommand(Configuration conf, BackupCommand type, CommandLine cmdline) {
-137Command cmd = null;
-138switch (type) {
-139case CREATE:
-140  cmd = new CreateCommand(conf, 
cmdline);
-141  break;
-142case DESCRIBE:
-143  cmd = new DescribeCommand(conf, 
cmdline);
-144  break;
-145case PROGRESS:
-146  cmd = new ProgressCommand(conf, 
cmdline);
-147  break;
-148case DELETE:
-149  cmd = new DeleteCommand(conf, 
cmdline);
-150  break;
-151case CANCEL:
-152  cmd = new CancelCommand(conf, 
cmdline);
-153  break;
-154case HISTORY:
-155  cmd = new HistoryCommand(conf, 
cmdline);
-156  break;
-157case SET:
-158  cmd = new BackupSetCommand(conf, 
cmdline);
-159  break;
-160case HELP:
-161default:
-162  cmd = new HelpCommand(conf, 
cmdline);
-163  break;
-164}
-165return cmd;
-166  }
-167
-168  static int numOfArgs(String[] args) {
-169if (args == null) return 0;
-170return args.length;
-171  }
-172
-173  public static class CreateCommand 
extends Command {
-174
-175CreateCommand(Configuration conf, 
CommandLine cmdline) {
-176  super(conf);
-177  this.cmdline = cmdline;
-178}
-179
-180@Override
-181public void execute() throws 
IOException {
-182  super.execute();
-183  if (cmdline == null || 
cmdline.getArgs() == null) {
-184printUsage();
-185throw new 
IOException(INCORRECT_USAGE);
-186  }
-187  String[] args = 
cmdline.getArgs();
-188  if (args.length !=3) {
-189printUsage();
-190throw new 
IOException(INCORRECT_USAGE);
-191  }
-192
-193  if 
(!BackupType.FULL.toString().equalsIgnoreCase(args[1])
-194   
!BackupType.INCREMENTAL.toString().equalsIgnoreCase(args[1])) {
-195System.out.println("ERROR: 
invalid backup type: " + args[1]);
-196printUsage();
-197throw new 
IOException(INCORRECT_USAGE);
-198  }
-199  if (!verifyPath(args[2])) {
-200System.out.println("ERROR: 
invalid backup destination: " + args[2]);
-201printUsage();
-202throw new 
IOException(INCORRECT_USAGE);
-203  }
-204
-205  String tables = null;
-206  Configuration conf = getConf() != 
null ? getConf() : HBaseConfiguration.create();
-207
-208  // Check if we have both: backup 
set and list of tables
-209  if (cmdline.hasOption(OPTION_TABLE) 
 cmdline.hasOption(OPTION_SET)) {
-210System.out.println("ERROR: You 
can specify either backup set or list"
-211+ " of tables, but not 
both");
-212printUsage();
-213throw new 
IOException(INCORRECT_USAGE);
-214  }
-215
-216  // Check backup set
-217  String setName = null;
-218  if (cmdline.hasOption(OPTION_SET)) 
{
-219setName = 
cmdline.getOptionValue(OPTION_SET);
-220tables = getTablesForSet(setName, 
conf);
-221
-222if (tables == null) {
-223  System.out.println("ERROR: 
Backup set '" + setName
-224  + 

[42/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/org/apache/hadoop/hbase/client/class-use/Admin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Admin.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Admin.html
index db108da..309c03d 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Admin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Admin.html
@@ -175,16 +175,21 @@ service.
 
 
 
-private void
+protected static boolean
+TableBackupClient.snapshotExists(Adminadmin,
+  http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringsnapshotName)
+
+
+protected void
 FullTableBackupClient.snapshotTable(Adminadmin,
  TableNametableName,
  http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringsnapshotName)
 
-
+
 private void
 BackupSystemTable.verifyNamespaceExists(Adminadmin)
 
-
+
 private void
 BackupSystemTable.waitForSystemTable(Adminadmin)
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
index 2ce743e..8863c9a 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
@@ -756,6 +756,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
+static TableBackupClient
+BackupClientFactory.create(Connectionconn,
+  http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringbackupId,
+  BackupRequestrequest)
+
+
 private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 RestoreDriver.getTablesForSet(Connectionconn,
http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
@@ -801,14 +807,18 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 TableBackupClient.conn
 
 
+(package private) Connection
+BackupCommands.Command.conn
+
+
 protected Connection
 BackupManager.conn
 
-
+
 private Connection
 BackupAdminImpl.conn
 
-
+
 private Connection
 BackupSystemTable.connection
 Connection to HBase cluster, shared among all 
instances
@@ -837,6 +847,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
+static void
+TableBackupClient.cleanupAndRestoreBackupSystem(Connectionconn,
+ BackupInfobackupInfo,
+ 
org.apache.hadoop.conf.Configurationconf)
+
+
 protected void
 TableBackupClient.completeBackup(Connectionconn,
   BackupInfobackupInfo,
@@ -846,11 +862,16 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Complete the overall backup.
 
 
+
+protected static void
+TableBackupClient.deleteBackupTableSnapshot(Connectionconn,
+ 
org.apache.hadoop.conf.Configurationconf)
+
 
-private void
-TableBackupClient.deleteSnapshot(Connectionconn,
-  BackupInfobackupInfo,
-  org.apache.hadoop.conf.Configurationconf)
+protected static void
+TableBackupClient.deleteSnapshots(Connectionconn,
+   BackupInfobackupInfo,
+   org.apache.hadoop.conf.Configurationconf)
 Delete HBase snapshot for backup.
 
 
@@ -867,7 +888,18 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-private boolean
+void
+TableBackupClient.init(Connectionconn,
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringbackupId,
+BackupRequestrequest)
+
+
+protected static void
+TableBackupClient.restoreBackupTable(Connectionconn,
+  
org.apache.hadoop.conf.Configurationconf)
+
+
+protected boolean
 IncrementalTableBackupClient.tableExists(TableNametable,
Connectionconn)
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/org/apache/hadoop/hbase/client/class-use/Put.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Put.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Put.html
index bd402e1..57785e9 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Put.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Put.html
@@ -352,11 +352,19 @@ service.
 
 
 

hbase-site git commit: INFRA-10751 Empty commit

2017-05-13 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 8e0a51670 -> b905d2402


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/b905d240
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/b905d240
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/b905d240

Branch: refs/heads/asf-site
Commit: b905d24020239857b5982921b28f7f7a6406b8eb
Parents: 8e0a516
Author: jenkins 
Authored: Sat May 13 14:58:46 2017 +
Committer: jenkins 
Committed: Sat May 13 14:58:46 2017 +

--

--




[44/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html 
b/devapidocs/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html
index c0d585e..df5f3a5 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html
@@ -146,7 +146,7 @@ extends TableBackupClient
-backupId,
 backupInfo,
 backupManager,
 conf,
 conn,
 newTimestamps,
 tableList
+BACKUP_CLIENT_IMPL_CLASS,
 backupId,
 backupInfo,
 backupManager,
 conf,
 conn,
 newTimestamps,
 tableList
 
 
 
@@ -162,6 +162,9 @@ extends Constructor and Description
 
 
+FullTableBackupClient()
+
+
 FullTableBackupClient(Connectionconn,
  http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringbackupId,
  BackupRequestrequest)
@@ -188,13 +191,13 @@ extends 
-private void
+protected void
 snapshotCopy(BackupInfobackupInfo)
 Do snapshot copy.
 
 
 
-private void
+protected void
 snapshotTable(Adminadmin,
  TableNametableName,
  http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringsnapshotName)
@@ -205,7 +208,7 @@ extends TableBackupClient
-beginBackup,
 completeBackup,
 failBackup
+addManifest,
 beginBackup,
 cleanupAndRestoreBackupSystem,
 cleanupDistCpLog,
 cleanupExportSnapshotLog,
 cleanupTargetDir,
 completeBackup,
 deleteBackupTableSnapshot,
 deleteSnapshots,
 failBackup,
 getMessage,
 init,
 obtainBackupMetaDataStr, restoreBackupTable,
 snapshotBackupTable,
 snapshotExists
 
 
 
@@ -245,13 +248,22 @@ extends 
+
+
+
+
+FullTableBackupClient
+publicFullTableBackupClient()
+
+
 
 
 
 
 
 FullTableBackupClient
-publicFullTableBackupClient(Connectionconn,
+publicFullTableBackupClient(Connectionconn,
  http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringbackupId,
  BackupRequestrequest)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
@@ -275,8 +287,8 @@ extends 
 
 snapshotCopy
-privatevoidsnapshotCopy(BackupInfobackupInfo)
-   throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
+protectedvoidsnapshotCopy(BackupInfobackupInfo)
+ throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 Do snapshot copy.
 
 Parameters:
@@ -292,7 +304,7 @@ extends 
 
 execute
-publicvoidexecute()
+publicvoidexecute()
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Backup request execution
 
@@ -309,10 +321,10 @@ extends 
 
 snapshotTable
-privatevoidsnapshotTable(Adminadmin,
-   TableNametableName,
-   http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringsnapshotName)
-throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+protectedvoidsnapshotTable(Adminadmin,
+ TableNametableName,
+ http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringsnapshotName)
+  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
 http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html 
b/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
index 273662b..aaceb40 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
@@ -257,7 +257,7 @@ extends BackupManager
-addIncrementalBackupTableSet,
 close,
 

[22/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/TableBackupClient.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/TableBackupClient.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/TableBackupClient.html
index d6d06f8..4e40739 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/TableBackupClient.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/TableBackupClient.html
@@ -48,347 +48,433 @@
 040import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 041import 
org.apache.hadoop.hbase.client.Admin;
 042import 
org.apache.hadoop.hbase.client.Connection;
-043import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-044import 
org.apache.hadoop.hbase.util.FSUtils;
-045
-046/**
-047 * Base class for backup operation. 
Concrete implementation for
-048 * full and incremental backup are 
delegated to corresponding sub-classes:
-049 * {@link FullTableBackupClient} and 
{@link IncrementalTableBackupClient}
-050 *
-051 */
-052@InterfaceAudience.Private
-053public abstract class TableBackupClient 
{
-054  private static final Log LOG = 
LogFactory.getLog(TableBackupClient.class);
+043import 
org.apache.hadoop.hbase.client.SnapshotDescription;
+044import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+045import 
org.apache.hadoop.hbase.util.FSUtils;
+046
+047/**
+048 * Base class for backup operation. 
Concrete implementation for
+049 * full and incremental backup are 
delegated to corresponding sub-classes:
+050 * {@link FullTableBackupClient} and 
{@link IncrementalTableBackupClient}
+051 *
+052 */
+053@InterfaceAudience.Private
+054public abstract class TableBackupClient 
{
 055
-056  protected Configuration conf;
-057  protected Connection conn;
-058  protected String backupId;
-059  protected ListTableName 
tableList;
-060  protected HashMapString, Long 
newTimestamps = null;
-061
-062  protected BackupManager 
backupManager;
-063  protected BackupInfo backupInfo;
-064
-065  public TableBackupClient(final 
Connection conn, final String backupId, BackupRequest request)
-066  throws IOException {
-067if (request.getBackupType() == 
BackupType.FULL) {
-068  backupManager = new 
BackupManager(conn, conn.getConfiguration());
-069} else {
-070  backupManager = new 
IncrementalBackupManager(conn, conn.getConfiguration());
-071}
-072this.backupId = backupId;
-073this.tableList = 
request.getTableList();
-074this.conn = conn;
-075this.conf = 
conn.getConfiguration();
-076backupInfo =
-077
backupManager.createBackupInfo(backupId, request.getBackupType(), tableList,
-078  request.getTargetRootDir(), 
request.getTotalTasks(), request.getBandwidth());
-079if (tableList == null || 
tableList.isEmpty()) {
-080  this.tableList = new 
ArrayList(backupInfo.getTables());
-081}
-082  }
-083
-084  /**
-085   * Begin the overall backup.
-086   * @param backupInfo backup info
-087   * @throws IOException exception
-088   */
-089  protected void 
beginBackup(BackupManager backupManager, BackupInfo backupInfo)
-090  throws IOException {
-091
backupManager.setBackupInfo(backupInfo);
-092// set the start timestamp of the 
overall backup
-093long startTs = 
EnvironmentEdgeManager.currentTime();
-094backupInfo.setStartTs(startTs);
-095// set overall backup status: 
ongoing
-096
backupInfo.setState(BackupState.RUNNING);
-097
backupInfo.setPhase(BackupPhase.REQUEST);
-098LOG.info("Backup " + 
backupInfo.getBackupId() + " started at " + startTs + ".");
-099
-100
backupManager.updateBackupInfo(backupInfo);
-101if (LOG.isDebugEnabled()) {
-102  LOG.debug("Backup session " + 
backupInfo.getBackupId() + " has been started.");
-103}
-104  }
-105
-106  private String getMessage(Exception e) 
{
-107String msg = e.getMessage();
-108if (msg == null || msg.equals("")) 
{
-109  msg = e.getClass().getName();
-110}
-111return msg;
-112  }
-113
-114  /**
-115   * Delete HBase snapshot for backup.
-116   * @param backupInfo backup info
-117   * @throws Exception exception
-118   */
-119  private void deleteSnapshot(final 
Connection conn, BackupInfo backupInfo, Configuration conf)
-120  throws IOException {
-121LOG.debug("Trying to delete snapshot 
for full backup.");
-122for (String snapshotName : 
backupInfo.getSnapshotNames()) {
-123  if (snapshotName == null) {
-124continue;
-125  }
-126  LOG.debug("Trying to delete 
snapshot: " + snapshotName);
-127
-128  try (Admin admin = 
conn.getAdmin();) {
-129
admin.deleteSnapshot(snapshotName);
-130  } catch (IOException ioe) {
-131LOG.debug("when deleting snapshot 
" + snapshotName, ioe);
-132  }
-133  LOG.debug("Deleting the snapshot " 
+ snapshotName + " for backup " + 

[03/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/testdevapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.MockMapping.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.MockMapping.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.MockMapping.html
index fb4fa30..d501058 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.MockMapping.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.MockMapping.html
@@ -29,660 +29,639 @@
 021import static 
org.junit.Assert.assertNull;
 022import static 
org.junit.Assert.assertTrue;
 023
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.Arrays;
-027import java.util.HashMap;
-028import java.util.HashSet;
-029import java.util.LinkedList;
-030import java.util.List;
-031import java.util.Map;
-032import java.util.Map.Entry;
-033import java.util.Queue;
-034import java.util.Random;
-035import java.util.Set;
-036import java.util.SortedSet;
-037import java.util.TreeMap;
-038import java.util.TreeSet;
-039
-040import com.google.protobuf.Service;
-041import org.apache.commons.logging.Log;
-042import 
org.apache.commons.logging.LogFactory;
-043import 
org.apache.hadoop.conf.Configuration;
-044import 
org.apache.hadoop.hbase.ChoreService;
-045import 
org.apache.hadoop.hbase.CoordinatedStateManager;
-046import 
org.apache.hadoop.hbase.HBaseConfiguration;
-047import 
org.apache.hadoop.hbase.HColumnDescriptor;
-048import 
org.apache.hadoop.hbase.HRegionInfo;
-049import 
org.apache.hadoop.hbase.HTableDescriptor;
-050import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-051import 
org.apache.hadoop.hbase.ProcedureInfo;
-052import 
org.apache.hadoop.hbase.ServerName;
-053import 
org.apache.hadoop.hbase.TableDescriptors;
-054import 
org.apache.hadoop.hbase.TableName;
-055import 
org.apache.hadoop.hbase.TableNotDisabledException;
-056import 
org.apache.hadoop.hbase.TableNotFoundException;
-057import 
org.apache.hadoop.hbase.client.ClusterConnection;
-058import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-059import 
org.apache.hadoop.hbase.master.RackManager;
-060import 
org.apache.hadoop.hbase.master.RegionPlan;
-061import 
org.apache.hadoop.hbase.executor.ExecutorService;
-062import 
org.apache.hadoop.hbase.master.*;
-063import 
org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
-064import 
org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
-065import 
org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-066import 
org.apache.hadoop.hbase.quotas.MasterQuotaManager;
-067import 
org.apache.hadoop.hbase.security.User;
-068import 
org.apache.hadoop.hbase.util.Bytes;
-069import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-070import 
org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
-071import 
org.apache.hadoop.net.DNSToSwitchMapping;
-072import org.junit.Assert;
-073import org.junit.BeforeClass;
-074
-075/**
-076 * Class used to be the base of unit 
tests on load balancers. It gives helper
-077 * methods to create maps of {@link 
ServerName} to lists of {@link HRegionInfo}
-078 * and to check list of region plans.
-079 *
-080 */
-081public class BalancerTestBase {
-082  private static final Log LOG = 
LogFactory.getLog(BalancerTestBase.class);
-083  protected static Random rand = new 
Random();
-084  static int regionId = 0;
-085  protected static Configuration conf;
-086  protected static StochasticLoadBalancer 
loadBalancer;
-087
-088  @BeforeClass
-089  public static void beforeAllTests() 
throws Exception {
-090conf = HBaseConfiguration.create();
-091
conf.setClass("hbase.util.ip.to.rack.determiner", MockMapping.class, 
DNSToSwitchMapping.class);
-092
conf.setFloat("hbase.master.balancer.stochastic.maxMovePercent", 0.75f);
-093conf.setFloat("hbase.regions.slop", 
0.0f);
-094
conf.setFloat("hbase.master.balancer.stochastic.localityCost", 0);
-095loadBalancer = new 
StochasticLoadBalancer();
-096loadBalancer.setConf(conf);
-097  }
-098
-099  protected int[] largeCluster = new 
int[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-100  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-101  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-102  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-103  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-104  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-105  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-106  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 

[41/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.html
 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.html
index eaaefa5..0565f36 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-protected static class BaseLoadBalancer.Cluster
+protected static class BaseLoadBalancer.Cluster
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 An efficient array based implementation similar to 
ClusterState for keeping
  the status of the cluster in terms of region assignment and distribution.
@@ -539,7 +539,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 servers
-ServerName[] servers
+ServerName[] servers
 
 
 
@@ -548,7 +548,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 hosts
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[] hosts
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[] hosts
 
 
 
@@ -557,7 +557,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 racks
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[] racks
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[] racks
 
 
 
@@ -566,7 +566,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 multiServersPerHost
-boolean multiServersPerHost
+boolean multiServersPerHost
 
 
 
@@ -575,7 +575,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 tables
-http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in java.util">ArrayListhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String tables
+http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in java.util">ArrayListhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String tables
 
 
 
@@ -584,7 +584,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 regions
-HRegionInfo[] regions
+HRegionInfo[] regions
 
 
 
@@ -593,7 +593,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 regionLoads
-http://docs.oracle.com/javase/8/docs/api/java/util/Deque.html?is-external=true;
 title="class or interface in java.util">DequeBalancerRegionLoad[] regionLoads
+http://docs.oracle.com/javase/8/docs/api/java/util/Deque.html?is-external=true;
 title="class or interface in java.util">DequeBalancerRegionLoad[] regionLoads
 
 
 
@@ -602,7 +602,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 regionFinder
-privateRegionLocationFinder regionFinder
+privateRegionLocationFinder regionFinder
 
 
 
@@ -611,7 +611,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 regionLocations
-int[][] regionLocations
+int[][] regionLocations
 
 
 
@@ -620,7 +620,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 serverIndexToHostIndex
-int[] serverIndexToHostIndex
+int[] serverIndexToHostIndex
 
 
 
@@ -629,7 +629,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 serverIndexToRackIndex
-int[] serverIndexToRackIndex
+int[] serverIndexToRackIndex
 
 
 
@@ -638,7 +638,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 regionsPerServer
-int[][] regionsPerServer
+int[][] regionsPerServer
 
 
 
@@ -647,7 +647,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 regionsPerHost
-int[][] regionsPerHost
+int[][] regionsPerHost
 
 
 
@@ -656,7 +656,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 regionsPerRack
-int[][] regionsPerRack
+int[][] regionsPerRack
 
 
 
@@ -665,7 +665,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 primariesOfRegionsPerServer
-int[][] primariesOfRegionsPerServer
+int[][] primariesOfRegionsPerServer
 
 
 
@@ -674,7 +674,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 primariesOfRegionsPerHost
-int[][] 

[16/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.html
index 7e37ca0..79c65e3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.html
@@ -70,1527 +70,1525 @@
 062import com.google.common.collect.Sets;
 063
 064/**
-065 * The base class for load balancers. It 
provides the the functions used to by
-066 * {@link 
org.apache.hadoop.hbase.master.AssignmentManager} to assign regions
-067 * in the edge cases. It doesn't provide 
an implementation of the
-068 * actual balancing algorithm.
-069 *
-070 */
-071public abstract class BaseLoadBalancer 
implements LoadBalancer {
-072  protected static final int 
MIN_SERVER_BALANCE = 2;
-073  private volatile boolean stopped = 
false;
+065 * The base class for load balancers. It 
provides functions used by
+066 * {@link 
org.apache.hadoop.hbase.master.AssignmentManager} to assign regions in the edge 
cases.
+067 * It doesn't provide an implementation 
of the actual balancing algorithm.
+068 */
+069public abstract class BaseLoadBalancer 
implements LoadBalancer {
+070  protected static final int 
MIN_SERVER_BALANCE = 2;
+071  private volatile boolean stopped = 
false;
+072
+073  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
 074
-075  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
-076
-077  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
-078= load - 
load.getNumberOfRegions() == 0;
+075  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
+076= load - 
load.getNumberOfRegions() == 0;
+077
+078  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
 079
-080  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
-081
-082  private static class DefaultRackManager 
extends RackManager {
-083@Override
-084public String getRack(ServerName 
server) {
-085  return UNKNOWN_RACK;
-086}
-087  }
-088
-089  /**
-090   * The constructor that uses the basic 
MetricsBalancer
-091   */
-092  protected BaseLoadBalancer() {
-093metricsBalancer = new 
MetricsBalancer();
-094  }
-095
-096  /**
-097   * This Constructor accepts an instance 
of MetricsBalancer,
-098   * which will be used instead of 
creating a new one
-099   */
-100  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
-101this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
-102  }
-103
-104  /**
-105   * An efficient array based 
implementation similar to ClusterState for keeping
-106   * the status of the cluster in terms 
of region assignment and distribution.
-107   * LoadBalancers, such as 
StochasticLoadBalancer uses this Cluster object because of
-108   * hundreds of thousands of hashmap 
manipulations are very costly, which is why this
-109   * class uses mostly indexes and 
arrays.
-110   *
-111   * Cluster tracks a list of unassigned 
regions, region assignments, and the server
-112   * topology in terms of server names, 
hostnames and racks.
-113   */
-114  protected static class Cluster {
-115ServerName[] servers;
-116String[] hosts; // ServerName 
uniquely identifies a region server. multiple RS can run on the same host
-117String[] racks;
-118boolean multiServersPerHost = false; 
// whether or not any host has more than one server
-119
-120ArrayListString tables;
-121HRegionInfo[] regions;
-122DequeBalancerRegionLoad[] 
regionLoads;
-123private RegionLocationFinder 
regionFinder;
+080  private static class DefaultRackManager 
extends RackManager {
+081@Override
+082public String getRack(ServerName 
server) {
+083  return UNKNOWN_RACK;
+084}
+085  }
+086
+087  /**
+088   * The constructor that uses the basic 
MetricsBalancer
+089   */
+090  protected BaseLoadBalancer() {
+091metricsBalancer = new 
MetricsBalancer();
+092  }
+093
+094  /**
+095   * This Constructor accepts an instance 
of MetricsBalancer,
+096   * which will be used instead of 
creating a new one
+097   */
+098  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
+099this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
+100  }
+101
+102  /**
+103   * An efficient array based 
implementation similar to ClusterState for keeping
+104   * the status of the cluster in terms 
of region assignment and distribution.
+105   * LoadBalancers, such as 
StochasticLoadBalancer uses this Cluster object because of
+106   * hundreds of thousands 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.DefaultRackManager.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.DefaultRackManager.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.DefaultRackManager.html
index 7e37ca0..79c65e3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.DefaultRackManager.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.DefaultRackManager.html
@@ -70,1527 +70,1525 @@
 062import com.google.common.collect.Sets;
 063
 064/**
-065 * The base class for load balancers. It 
provides the the functions used to by
-066 * {@link 
org.apache.hadoop.hbase.master.AssignmentManager} to assign regions
-067 * in the edge cases. It doesn't provide 
an implementation of the
-068 * actual balancing algorithm.
-069 *
-070 */
-071public abstract class BaseLoadBalancer 
implements LoadBalancer {
-072  protected static final int 
MIN_SERVER_BALANCE = 2;
-073  private volatile boolean stopped = 
false;
+065 * The base class for load balancers. It 
provides functions used by
+066 * {@link 
org.apache.hadoop.hbase.master.AssignmentManager} to assign regions in the edge 
cases.
+067 * It doesn't provide an implementation 
of the actual balancing algorithm.
+068 */
+069public abstract class BaseLoadBalancer 
implements LoadBalancer {
+070  protected static final int 
MIN_SERVER_BALANCE = 2;
+071  private volatile boolean stopped = 
false;
+072
+073  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
 074
-075  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
-076
-077  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
-078= load - 
load.getNumberOfRegions() == 0;
+075  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
+076= load - 
load.getNumberOfRegions() == 0;
+077
+078  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
 079
-080  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
-081
-082  private static class DefaultRackManager 
extends RackManager {
-083@Override
-084public String getRack(ServerName 
server) {
-085  return UNKNOWN_RACK;
-086}
-087  }
-088
-089  /**
-090   * The constructor that uses the basic 
MetricsBalancer
-091   */
-092  protected BaseLoadBalancer() {
-093metricsBalancer = new 
MetricsBalancer();
-094  }
-095
-096  /**
-097   * This Constructor accepts an instance 
of MetricsBalancer,
-098   * which will be used instead of 
creating a new one
-099   */
-100  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
-101this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
-102  }
-103
-104  /**
-105   * An efficient array based 
implementation similar to ClusterState for keeping
-106   * the status of the cluster in terms 
of region assignment and distribution.
-107   * LoadBalancers, such as 
StochasticLoadBalancer uses this Cluster object because of
-108   * hundreds of thousands of hashmap 
manipulations are very costly, which is why this
-109   * class uses mostly indexes and 
arrays.
-110   *
-111   * Cluster tracks a list of unassigned 
regions, region assignments, and the server
-112   * topology in terms of server names, 
hostnames and racks.
-113   */
-114  protected static class Cluster {
-115ServerName[] servers;
-116String[] hosts; // ServerName 
uniquely identifies a region server. multiple RS can run on the same host
-117String[] racks;
-118boolean multiServersPerHost = false; 
// whether or not any host has more than one server
-119
-120ArrayListString tables;
-121HRegionInfo[] regions;
-122DequeBalancerRegionLoad[] 
regionLoads;
-123private RegionLocationFinder 
regionFinder;
+080  private static class DefaultRackManager 
extends RackManager {
+081@Override
+082public String getRack(ServerName 
server) {
+083  return UNKNOWN_RACK;
+084}
+085  }
+086
+087  /**
+088   * The constructor that uses the basic 
MetricsBalancer
+089   */
+090  protected BaseLoadBalancer() {
+091metricsBalancer = new 
MetricsBalancer();
+092  }
+093
+094  /**
+095   * This Constructor accepts an instance 
of MetricsBalancer,
+096   * which will be used instead of 
creating a new one
+097   */
+098  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
+099this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
+100  }
+101
+102  /**
+103   * An efficient array based 
implementation similar to ClusterState for keeping
+104   * the status of the cluster in terms 
of region assignment and distribution.
+105   * LoadBalancers, such as 
StochasticLoadBalancer uses this 

[04/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/testdevapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.MockCluster.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.MockCluster.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.MockCluster.html
index fb4fa30..d501058 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.MockCluster.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.MockCluster.html
@@ -29,660 +29,639 @@
 021import static 
org.junit.Assert.assertNull;
 022import static 
org.junit.Assert.assertTrue;
 023
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.Arrays;
-027import java.util.HashMap;
-028import java.util.HashSet;
-029import java.util.LinkedList;
-030import java.util.List;
-031import java.util.Map;
-032import java.util.Map.Entry;
-033import java.util.Queue;
-034import java.util.Random;
-035import java.util.Set;
-036import java.util.SortedSet;
-037import java.util.TreeMap;
-038import java.util.TreeSet;
-039
-040import com.google.protobuf.Service;
-041import org.apache.commons.logging.Log;
-042import 
org.apache.commons.logging.LogFactory;
-043import 
org.apache.hadoop.conf.Configuration;
-044import 
org.apache.hadoop.hbase.ChoreService;
-045import 
org.apache.hadoop.hbase.CoordinatedStateManager;
-046import 
org.apache.hadoop.hbase.HBaseConfiguration;
-047import 
org.apache.hadoop.hbase.HColumnDescriptor;
-048import 
org.apache.hadoop.hbase.HRegionInfo;
-049import 
org.apache.hadoop.hbase.HTableDescriptor;
-050import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-051import 
org.apache.hadoop.hbase.ProcedureInfo;
-052import 
org.apache.hadoop.hbase.ServerName;
-053import 
org.apache.hadoop.hbase.TableDescriptors;
-054import 
org.apache.hadoop.hbase.TableName;
-055import 
org.apache.hadoop.hbase.TableNotDisabledException;
-056import 
org.apache.hadoop.hbase.TableNotFoundException;
-057import 
org.apache.hadoop.hbase.client.ClusterConnection;
-058import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-059import 
org.apache.hadoop.hbase.master.RackManager;
-060import 
org.apache.hadoop.hbase.master.RegionPlan;
-061import 
org.apache.hadoop.hbase.executor.ExecutorService;
-062import 
org.apache.hadoop.hbase.master.*;
-063import 
org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
-064import 
org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
-065import 
org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-066import 
org.apache.hadoop.hbase.quotas.MasterQuotaManager;
-067import 
org.apache.hadoop.hbase.security.User;
-068import 
org.apache.hadoop.hbase.util.Bytes;
-069import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-070import 
org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
-071import 
org.apache.hadoop.net.DNSToSwitchMapping;
-072import org.junit.Assert;
-073import org.junit.BeforeClass;
-074
-075/**
-076 * Class used to be the base of unit 
tests on load balancers. It gives helper
-077 * methods to create maps of {@link 
ServerName} to lists of {@link HRegionInfo}
-078 * and to check list of region plans.
-079 *
-080 */
-081public class BalancerTestBase {
-082  private static final Log LOG = 
LogFactory.getLog(BalancerTestBase.class);
-083  protected static Random rand = new 
Random();
-084  static int regionId = 0;
-085  protected static Configuration conf;
-086  protected static StochasticLoadBalancer 
loadBalancer;
-087
-088  @BeforeClass
-089  public static void beforeAllTests() 
throws Exception {
-090conf = HBaseConfiguration.create();
-091
conf.setClass("hbase.util.ip.to.rack.determiner", MockMapping.class, 
DNSToSwitchMapping.class);
-092
conf.setFloat("hbase.master.balancer.stochastic.maxMovePercent", 0.75f);
-093conf.setFloat("hbase.regions.slop", 
0.0f);
-094
conf.setFloat("hbase.master.balancer.stochastic.localityCost", 0);
-095loadBalancer = new 
StochasticLoadBalancer();
-096loadBalancer.setConf(conf);
-097  }
-098
-099  protected int[] largeCluster = new 
int[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-100  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-101  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-102  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-103  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-104  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-105  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-106  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 

[02/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/testdevapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.html
index fb4fa30..d501058 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.html
@@ -29,660 +29,639 @@
 021import static 
org.junit.Assert.assertNull;
 022import static 
org.junit.Assert.assertTrue;
 023
-024import java.io.IOException;
-025import java.util.ArrayList;
-026import java.util.Arrays;
-027import java.util.HashMap;
-028import java.util.HashSet;
-029import java.util.LinkedList;
-030import java.util.List;
-031import java.util.Map;
-032import java.util.Map.Entry;
-033import java.util.Queue;
-034import java.util.Random;
-035import java.util.Set;
-036import java.util.SortedSet;
-037import java.util.TreeMap;
-038import java.util.TreeSet;
-039
-040import com.google.protobuf.Service;
-041import org.apache.commons.logging.Log;
-042import 
org.apache.commons.logging.LogFactory;
-043import 
org.apache.hadoop.conf.Configuration;
-044import 
org.apache.hadoop.hbase.ChoreService;
-045import 
org.apache.hadoop.hbase.CoordinatedStateManager;
-046import 
org.apache.hadoop.hbase.HBaseConfiguration;
-047import 
org.apache.hadoop.hbase.HColumnDescriptor;
-048import 
org.apache.hadoop.hbase.HRegionInfo;
-049import 
org.apache.hadoop.hbase.HTableDescriptor;
-050import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-051import 
org.apache.hadoop.hbase.ProcedureInfo;
-052import 
org.apache.hadoop.hbase.ServerName;
-053import 
org.apache.hadoop.hbase.TableDescriptors;
-054import 
org.apache.hadoop.hbase.TableName;
-055import 
org.apache.hadoop.hbase.TableNotDisabledException;
-056import 
org.apache.hadoop.hbase.TableNotFoundException;
-057import 
org.apache.hadoop.hbase.client.ClusterConnection;
-058import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-059import 
org.apache.hadoop.hbase.master.RackManager;
-060import 
org.apache.hadoop.hbase.master.RegionPlan;
-061import 
org.apache.hadoop.hbase.executor.ExecutorService;
-062import 
org.apache.hadoop.hbase.master.*;
-063import 
org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
-064import 
org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
-065import 
org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-066import 
org.apache.hadoop.hbase.quotas.MasterQuotaManager;
-067import 
org.apache.hadoop.hbase.security.User;
-068import 
org.apache.hadoop.hbase.util.Bytes;
-069import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-070import 
org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
-071import 
org.apache.hadoop.net.DNSToSwitchMapping;
-072import org.junit.Assert;
-073import org.junit.BeforeClass;
-074
-075/**
-076 * Class used to be the base of unit 
tests on load balancers. It gives helper
-077 * methods to create maps of {@link 
ServerName} to lists of {@link HRegionInfo}
-078 * and to check list of region plans.
-079 *
-080 */
-081public class BalancerTestBase {
-082  private static final Log LOG = 
LogFactory.getLog(BalancerTestBase.class);
-083  protected static Random rand = new 
Random();
-084  static int regionId = 0;
-085  protected static Configuration conf;
-086  protected static StochasticLoadBalancer 
loadBalancer;
-087
-088  @BeforeClass
-089  public static void beforeAllTests() 
throws Exception {
-090conf = HBaseConfiguration.create();
-091
conf.setClass("hbase.util.ip.to.rack.determiner", MockMapping.class, 
DNSToSwitchMapping.class);
-092
conf.setFloat("hbase.master.balancer.stochastic.maxMovePercent", 0.75f);
-093conf.setFloat("hbase.regions.slop", 
0.0f);
-094
conf.setFloat("hbase.master.balancer.stochastic.localityCost", 0);
-095loadBalancer = new 
StochasticLoadBalancer();
-096loadBalancer.setConf(conf);
-097  }
-098
-099  protected int[] largeCluster = new 
int[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-100  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-101  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-102  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-103  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-104  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-105  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-106  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-107  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 
0, 

[26/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManager.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManager.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManager.html
index d0cb954..4cd76fe 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManager.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManager.html
@@ -375,125 +375,142 @@
 367  }
 368
 369  /**
-370   * Read the last backup start code 
(timestamp) of last successful backup. Will return null if
-371   * there is no startcode stored in 
backup system table or the value is of length 0. These two
-372   * cases indicate there is no 
successful backup completed so far.
-373   * @return the timestamp of a last 
successful backup
-374   * @throws IOException exception
-375   */
-376  public String readBackupStartCode() 
throws IOException {
-377return 
systemTable.readBackupStartCode(backupInfo.getBackupRootDir());
-378  }
-379
-380  /**
-381   * Write the start code (timestamp) to 
backup system table. If passed in null, then write 0 byte.
-382   * @param startCode start code
-383   * @throws IOException exception
-384   */
-385  public void writeBackupStartCode(Long 
startCode) throws IOException {
-386
systemTable.writeBackupStartCode(startCode, backupInfo.getBackupRootDir());
-387  }
-388
-389  /**
-390   * Get the RS log information after the 
last log roll from backup system table.
-391   * @return RS log info
-392   * @throws IOException exception
-393   */
-394  public HashMapString, Long 
readRegionServerLastLogRollResult() throws IOException {
-395return 
systemTable.readRegionServerLastLogRollResult(backupInfo.getBackupRootDir());
-396  }
-397
-398  public PairMapTableName, 
MapString, MapString, ListPairString, 
Boolean, Listbyte[]
-399  readBulkloadRows(ListTableName 
tableList) throws IOException {
-400return 
systemTable.readBulkloadRows(tableList);
-401  }
-402
-403  public void 
removeBulkLoadedRows(ListTableName lst, Listbyte[] rows) throws 
IOException {
-404systemTable.removeBulkLoadedRows(lst, 
rows);
-405  }
-406
-407  public void 
writeBulkLoadedFiles(ListTableName sTableList, Mapbyte[], 
ListPath[] maps)
-408  throws IOException {
-409
systemTable.writeBulkLoadedFiles(sTableList, maps, backupInfo.getBackupId());
-410  }
-411
-412  /**
-413   * Get all completed backup information 
(in desc order by time)
-414   * @return history info of 
BackupCompleteData
-415   * @throws IOException exception
-416   */
-417  public ListBackupInfo 
getBackupHistory() throws IOException {
-418return 
systemTable.getBackupHistory();
-419  }
-420
-421  public ArrayListBackupInfo 
getBackupHistory(boolean completed) throws IOException {
-422return 
systemTable.getBackupHistory(completed);
-423  }
-424
-425  /**
-426   * Write the current timestamps for 
each regionserver to backup system table after a successful
-427   * full or incremental backup. Each 
table may have a different set of log timestamps. The saved
-428   * timestamp is of the last log file 
that was backed up already.
-429   * @param tables tables
-430   * @throws IOException exception
-431   */
-432  public void 
writeRegionServerLogTimestamp(SetTableName tables,
-433  HashMapString, Long 
newTimestamps) throws IOException {
-434
systemTable.writeRegionServerLogTimestamp(tables, newTimestamps,
-435  backupInfo.getBackupRootDir());
+370   * Starts new backup session
+371   * @throws IOException if active 
session already exists
+372   */
+373  public void startBackupSession() throws 
IOException {
+374systemTable.startBackupSession();
+375  }
+376
+377  /**
+378   * Finishes active backup session
+379   * @throws IOException if no active 
session
+380   */
+381  public void finishBackupSession() 
throws IOException {
+382systemTable.finishBackupSession();
+383  }
+384
+385
+386  /**
+387   * Read the last backup start code 
(timestamp) of last successful backup. Will return null if
+388   * there is no startcode stored in 
backup system table or the value is of length 0. These two
+389   * cases indicate there is no 
successful backup completed so far.
+390   * @return the timestamp of a last 
successful backup
+391   * @throws IOException exception
+392   */
+393  public String readBackupStartCode() 
throws IOException {
+394return 
systemTable.readBackupStartCode(backupInfo.getBackupRootDir());
+395  }
+396
+397  /**
+398   * Write the start code (timestamp) to 
backup system table. If passed in null, then write 0 byte.
+399   * @param startCode start code
+400   * @throws IOException exception
+401   */
+402  public void writeBackupStartCode(Long 
startCode) throws IOException {
+403
systemTable.writeBackupStartCode(startCode, backupInfo.getBackupRootDir());
+404 

[27/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.html
index cc40d21..2df0b04 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.html
@@ -61,8 +61,8 @@
 053import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 054import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand;
 055import 
org.apache.hadoop.hbase.backup.BackupType;
-056import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
-057import 
org.apache.hadoop.hbase.backup.util.BackupSet;
+056import 
org.apache.hadoop.hbase.backup.util.BackupSet;
+057import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
 058import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 059import 
org.apache.hadoop.hbase.client.Connection;
 060import 
org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -122,670 +122,703 @@
 114
 115  public static abstract class Command 
extends Configured {
 116CommandLine cmdline;
-117
+117Connection conn;
 118Command(Configuration conf) {
-119  super(conf);
-120}
-121
-122public void execute() throws 
IOException {
-123  if (cmdline.hasOption("h") || 
cmdline.hasOption("help")) {
-124printUsage();
-125throw new 
IOException(INCORRECT_USAGE);
-126  }
-127}
-128
-129protected abstract void 
printUsage();
-130  }
-131
-132  private BackupCommands() {
-133throw new 
AssertionError("Instantiating utility class...");
-134  }
-135
-136  public static Command 
createCommand(Configuration conf, BackupCommand type, CommandLine cmdline) {
-137Command cmd = null;
-138switch (type) {
-139case CREATE:
-140  cmd = new CreateCommand(conf, 
cmdline);
-141  break;
-142case DESCRIBE:
-143  cmd = new DescribeCommand(conf, 
cmdline);
-144  break;
-145case PROGRESS:
-146  cmd = new ProgressCommand(conf, 
cmdline);
-147  break;
-148case DELETE:
-149  cmd = new DeleteCommand(conf, 
cmdline);
-150  break;
-151case CANCEL:
-152  cmd = new CancelCommand(conf, 
cmdline);
-153  break;
-154case HISTORY:
-155  cmd = new HistoryCommand(conf, 
cmdline);
-156  break;
-157case SET:
-158  cmd = new BackupSetCommand(conf, 
cmdline);
-159  break;
-160case HELP:
-161default:
-162  cmd = new HelpCommand(conf, 
cmdline);
-163  break;
-164}
-165return cmd;
-166  }
-167
-168  static int numOfArgs(String[] args) {
-169if (args == null) return 0;
-170return args.length;
-171  }
-172
-173  public static class CreateCommand 
extends Command {
-174
-175CreateCommand(Configuration conf, 
CommandLine cmdline) {
-176  super(conf);
-177  this.cmdline = cmdline;
-178}
-179
-180@Override
-181public void execute() throws 
IOException {
-182  super.execute();
-183  if (cmdline == null || 
cmdline.getArgs() == null) {
-184printUsage();
-185throw new 
IOException(INCORRECT_USAGE);
-186  }
-187  String[] args = 
cmdline.getArgs();
-188  if (args.length !=3) {
-189printUsage();
-190throw new 
IOException(INCORRECT_USAGE);
-191  }
-192
-193  if 
(!BackupType.FULL.toString().equalsIgnoreCase(args[1])
-194   
!BackupType.INCREMENTAL.toString().equalsIgnoreCase(args[1])) {
-195System.out.println("ERROR: 
invalid backup type: " + args[1]);
-196printUsage();
-197throw new 
IOException(INCORRECT_USAGE);
-198  }
-199  if (!verifyPath(args[2])) {
-200System.out.println("ERROR: 
invalid backup destination: " + args[2]);
-201printUsage();
-202throw new 
IOException(INCORRECT_USAGE);
-203  }
-204
-205  String tables = null;
-206  Configuration conf = getConf() != 
null ? getConf() : HBaseConfiguration.create();
-207
-208  // Check if we have both: backup 
set and list of tables
-209  if (cmdline.hasOption(OPTION_TABLE) 
 cmdline.hasOption(OPTION_SET)) {
-210System.out.println("ERROR: You 
can specify either backup set or list"
-211+ " of tables, but not 
both");
-212printUsage();
-213throw new 
IOException(INCORRECT_USAGE);
-214  }
-215
-216  // Check backup set
-217  String setName = null;
-218  if (cmdline.hasOption(OPTION_SET)) 
{
-219setName = 
cmdline.getOptionValue(OPTION_SET);
-220tables = getTablesForSet(setName, 
conf);
-221
-222if (tables == null) {
-223  System.out.println("ERROR: 
Backup set '" + setName
-224  + "' is either empty or 
does not exist");
-225  printUsage();

[14/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
index 7e37ca0..79c65e3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
@@ -70,1527 +70,1525 @@
 062import com.google.common.collect.Sets;
 063
 064/**
-065 * The base class for load balancers. It 
provides the the functions used to by
-066 * {@link 
org.apache.hadoop.hbase.master.AssignmentManager} to assign regions
-067 * in the edge cases. It doesn't provide 
an implementation of the
-068 * actual balancing algorithm.
-069 *
-070 */
-071public abstract class BaseLoadBalancer 
implements LoadBalancer {
-072  protected static final int 
MIN_SERVER_BALANCE = 2;
-073  private volatile boolean stopped = 
false;
+065 * The base class for load balancers. It 
provides functions used by
+066 * {@link 
org.apache.hadoop.hbase.master.AssignmentManager} to assign regions in the edge 
cases.
+067 * It doesn't provide an implementation 
of the actual balancing algorithm.
+068 */
+069public abstract class BaseLoadBalancer 
implements LoadBalancer {
+070  protected static final int 
MIN_SERVER_BALANCE = 2;
+071  private volatile boolean stopped = 
false;
+072
+073  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
 074
-075  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
-076
-077  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
-078= load - 
load.getNumberOfRegions() == 0;
+075  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
+076= load - 
load.getNumberOfRegions() == 0;
+077
+078  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
 079
-080  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
-081
-082  private static class DefaultRackManager 
extends RackManager {
-083@Override
-084public String getRack(ServerName 
server) {
-085  return UNKNOWN_RACK;
-086}
-087  }
-088
-089  /**
-090   * The constructor that uses the basic 
MetricsBalancer
-091   */
-092  protected BaseLoadBalancer() {
-093metricsBalancer = new 
MetricsBalancer();
-094  }
-095
-096  /**
-097   * This Constructor accepts an instance 
of MetricsBalancer,
-098   * which will be used instead of 
creating a new one
-099   */
-100  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
-101this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
-102  }
-103
-104  /**
-105   * An efficient array based 
implementation similar to ClusterState for keeping
-106   * the status of the cluster in terms 
of region assignment and distribution.
-107   * LoadBalancers, such as 
StochasticLoadBalancer uses this Cluster object because of
-108   * hundreds of thousands of hashmap 
manipulations are very costly, which is why this
-109   * class uses mostly indexes and 
arrays.
-110   *
-111   * Cluster tracks a list of unassigned 
regions, region assignments, and the server
-112   * topology in terms of server names, 
hostnames and racks.
-113   */
-114  protected static class Cluster {
-115ServerName[] servers;
-116String[] hosts; // ServerName 
uniquely identifies a region server. multiple RS can run on the same host
-117String[] racks;
-118boolean multiServersPerHost = false; 
// whether or not any host has more than one server
-119
-120ArrayListString tables;
-121HRegionInfo[] regions;
-122DequeBalancerRegionLoad[] 
regionLoads;
-123private RegionLocationFinder 
regionFinder;
+080  private static class DefaultRackManager 
extends RackManager {
+081@Override
+082public String getRack(ServerName 
server) {
+083  return UNKNOWN_RACK;
+084}
+085  }
+086
+087  /**
+088   * The constructor that uses the basic 
MetricsBalancer
+089   */
+090  protected BaseLoadBalancer() {
+091metricsBalancer = new 
MetricsBalancer();
+092  }
+093
+094  /**
+095   * This Constructor accepts an instance 
of MetricsBalancer,
+096   * which will be used instead of 
creating a new one
+097   */
+098  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
+099this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
+100  }
+101
+102  /**
+103   * An efficient array based 
implementation similar to ClusterState for keeping
+104   * the status of the cluster in terms 
of region assignment and distribution.
+105   * LoadBalancers, such as 
StochasticLoadBalancer uses this Cluster object because of
+106   * hundreds of thousands of hashmap 
manipulations are very 

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/testdevapidocs/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.html
 
b/testdevapidocs/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.html
index 0ee7251..7d7ddb3 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public class TestBaseLoadBalancer
+public class TestBaseLoadBalancer
 extends BalancerTestBase
 
 
@@ -333,7 +333,7 @@ extends 
 
 loadBalancer
-private staticorg.apache.hadoop.hbase.master.LoadBalancer loadBalancer
+private staticorg.apache.hadoop.hbase.master.LoadBalancer loadBalancer
 
 
 
@@ -342,7 +342,7 @@ extends 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -351,7 +351,7 @@ extends 
 
 master
-private static finalorg.apache.hadoop.hbase.ServerName master
+private static finalorg.apache.hadoop.hbase.ServerName master
 
 
 
@@ -360,7 +360,7 @@ extends 
 
 rackManager
-private staticorg.apache.hadoop.hbase.master.RackManager rackManager
+private staticorg.apache.hadoop.hbase.master.RackManager rackManager
 
 
 
@@ -369,7 +369,7 @@ extends 
 
 NUM_SERVERS
-private static finalint NUM_SERVERS
+private static finalint NUM_SERVERS
 
 See Also:
 Constant
 Field Values
@@ -382,7 +382,7 @@ extends 
 
 servers
-private staticorg.apache.hadoop.hbase.ServerName[] servers
+private staticorg.apache.hadoop.hbase.ServerName[] servers
 
 
 
@@ -391,7 +391,7 @@ extends 
 
 regionsAndServersMocks
-int[][] regionsAndServersMocks
+int[][] regionsAndServersMocks
 
 
 
@@ -400,7 +400,7 @@ extends 
 
 name
-publicorg.junit.rules.TestName name
+publicorg.junit.rules.TestName name
 
 
 
@@ -417,7 +417,7 @@ extends 
 
 TestBaseLoadBalancer
-publicTestBaseLoadBalancer()
+publicTestBaseLoadBalancer()
 
 
 
@@ -434,7 +434,7 @@ extends 
 
 beforeAllTests
-public staticvoidbeforeAllTests()
+public staticvoidbeforeAllTests()
throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -448,7 +448,7 @@ extends 
 
 assertImmediateAssignment
-privatevoidassertImmediateAssignment(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.HRegionInforegions,
+privatevoidassertImmediateAssignment(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.HRegionInforegions,
http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.ServerNameservers,
http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in 
java.util">Maporg.apache.hadoop.hbase.HRegionInfo,org.apache.hadoop.hbase.ServerNameassignments)
 All regions have an assignment.
@@ -466,7 +466,7 @@ extends 
 
 testBulkAssignment
-publicvoidtestBulkAssignment()
+publicvoidtestBulkAssignment()
 throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 Tests the bulk assignment used during cluster startup.
 
@@ -484,7 +484,7 @@ extends 
 
 testRetainAssignment
-publicvoidtestRetainAssignment()
+publicvoidtestRetainAssignment()
   throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 Test the cluster startup bulk assignment which attempts to 
retain
  assignment info.
@@ -500,7 +500,7 @@ extends 
 
 testRandomAssignment
-publicvoidtestRandomAssignment()
+publicvoidtestRandomAssignment()
   throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -514,7 +514,7 @@ extends 
 
 testRandomAssignment
-privatevoidtestRandomAssignment(intnumberOfIdleServers)
+privatevoidtestRandomAssignment(intnumberOfIdleServers)
throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -528,7 +528,7 @@ extends 
 
 testRegionAvailability
-publicvoidtestRegionAvailability()
+publicvoidtestRegionAvailability()
 throws 

[20/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.html
index 7e37ca0..79c65e3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.html
@@ -70,1527 +70,1525 @@
 062import com.google.common.collect.Sets;
 063
 064/**
-065 * The base class for load balancers. It 
provides the the functions used to by
-066 * {@link 
org.apache.hadoop.hbase.master.AssignmentManager} to assign regions
-067 * in the edge cases. It doesn't provide 
an implementation of the
-068 * actual balancing algorithm.
-069 *
-070 */
-071public abstract class BaseLoadBalancer 
implements LoadBalancer {
-072  protected static final int 
MIN_SERVER_BALANCE = 2;
-073  private volatile boolean stopped = 
false;
+065 * The base class for load balancers. It 
provides functions used by
+066 * {@link 
org.apache.hadoop.hbase.master.AssignmentManager} to assign regions in the edge 
cases.
+067 * It doesn't provide an implementation 
of the actual balancing algorithm.
+068 */
+069public abstract class BaseLoadBalancer 
implements LoadBalancer {
+070  protected static final int 
MIN_SERVER_BALANCE = 2;
+071  private volatile boolean stopped = 
false;
+072
+073  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
 074
-075  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
-076
-077  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
-078= load - 
load.getNumberOfRegions() == 0;
+075  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
+076= load - 
load.getNumberOfRegions() == 0;
+077
+078  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
 079
-080  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
-081
-082  private static class DefaultRackManager 
extends RackManager {
-083@Override
-084public String getRack(ServerName 
server) {
-085  return UNKNOWN_RACK;
-086}
-087  }
-088
-089  /**
-090   * The constructor that uses the basic 
MetricsBalancer
-091   */
-092  protected BaseLoadBalancer() {
-093metricsBalancer = new 
MetricsBalancer();
-094  }
-095
-096  /**
-097   * This Constructor accepts an instance 
of MetricsBalancer,
-098   * which will be used instead of 
creating a new one
-099   */
-100  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
-101this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
-102  }
-103
-104  /**
-105   * An efficient array based 
implementation similar to ClusterState for keeping
-106   * the status of the cluster in terms 
of region assignment and distribution.
-107   * LoadBalancers, such as 
StochasticLoadBalancer uses this Cluster object because of
-108   * hundreds of thousands of hashmap 
manipulations are very costly, which is why this
-109   * class uses mostly indexes and 
arrays.
-110   *
-111   * Cluster tracks a list of unassigned 
regions, region assignments, and the server
-112   * topology in terms of server names, 
hostnames and racks.
-113   */
-114  protected static class Cluster {
-115ServerName[] servers;
-116String[] hosts; // ServerName 
uniquely identifies a region server. multiple RS can run on the same host
-117String[] racks;
-118boolean multiServersPerHost = false; 
// whether or not any host has more than one server
-119
-120ArrayListString tables;
-121HRegionInfo[] regions;
-122DequeBalancerRegionLoad[] 
regionLoads;
-123private RegionLocationFinder 
regionFinder;
+080  private static class DefaultRackManager 
extends RackManager {
+081@Override
+082public String getRack(ServerName 
server) {
+083  return UNKNOWN_RACK;
+084}
+085  }
+086
+087  /**
+088   * The constructor that uses the basic 
MetricsBalancer
+089   */
+090  protected BaseLoadBalancer() {
+091metricsBalancer = new 
MetricsBalancer();
+092  }
+093
+094  /**
+095   * This Constructor accepts an instance 
of MetricsBalancer,
+096   * which will be used instead of 
creating a new one
+097   */
+098  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
+099this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
+100  }
+101
+102  /**
+103   * An efficient array based 
implementation similar to ClusterState for keeping
+104   * the status of the cluster in terms 
of region assignment and distribution.
+105   * LoadBalancers, such as 
StochasticLoadBalancer uses this Cluster object because 

[25/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
index 2ccefa4..87d7143 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
@@ -141,1653 +141,1703 @@
 133
 134  private final static String 
BACKUP_INFO_PREFIX = "session:";
 135  private final static String 
START_CODE_ROW = "startcode:";
-136  private final static String 
INCR_BACKUP_SET = "incrbackupset:";
-137  private final static String 
TABLE_RS_LOG_MAP_PREFIX = "trslm:";
-138  private final static String 
RS_LOG_TS_PREFIX = "rslogts:";
-139
-140  private final static String 
BULK_LOAD_PREFIX = "bulk:";
-141  private final static byte[] 
BULK_LOAD_PREFIX_BYTES = BULK_LOAD_PREFIX.getBytes();
-142  final static byte[] TBL_COL = 
Bytes.toBytes("tbl");
-143  final static byte[] FAM_COL = 
Bytes.toBytes("fam");
-144  final static byte[] PATH_COL = 
Bytes.toBytes("path");
-145  final static byte[] STATE_COL = 
Bytes.toBytes("state");
-146  // the two states a bulk loaded file 
can be
-147  final static byte[] BL_PREPARE = 
Bytes.toBytes("R");
-148  final static byte[] BL_COMMIT = 
Bytes.toBytes("D");
-149
-150  private final static String WALS_PREFIX 
= "wals:";
-151  private final static String 
SET_KEY_PREFIX = "backupset:";
-152
-153  // separator between BULK_LOAD_PREFIX 
and ordinals
-154 protected final static String 
BLK_LD_DELIM = ":";
-155  private final static byte[] EMPTY_VALUE 
= new byte[] {};
-156
-157  // Safe delimiter in a string
-158  private final static String NULL = 
"\u";
-159
-160  public BackupSystemTable(Connection 
conn) throws IOException {
-161this.connection = conn;
-162tableName = 
BackupSystemTable.getTableName(conn.getConfiguration());
-163checkSystemTable();
-164  }
+136  private final static byte[] 
ACTIVE_SESSION_ROW = "activesession:".getBytes();
+137  private final static byte[] 
ACTIVE_SESSION_COL = "c".getBytes();
+138
+139  private final static byte[] 
ACTIVE_SESSION_YES = "yes".getBytes();
+140  private final static byte[] 
ACTIVE_SESSION_NO = "no".getBytes();
+141
+142  private final static String 
INCR_BACKUP_SET = "incrbackupset:";
+143  private final static String 
TABLE_RS_LOG_MAP_PREFIX = "trslm:";
+144  private final static String 
RS_LOG_TS_PREFIX = "rslogts:";
+145
+146  private final static String 
BULK_LOAD_PREFIX = "bulk:";
+147  private final static byte[] 
BULK_LOAD_PREFIX_BYTES = BULK_LOAD_PREFIX.getBytes();
+148  final static byte[] TBL_COL = 
Bytes.toBytes("tbl");
+149  final static byte[] FAM_COL = 
Bytes.toBytes("fam");
+150  final static byte[] PATH_COL = 
Bytes.toBytes("path");
+151  final static byte[] STATE_COL = 
Bytes.toBytes("state");
+152  // the two states a bulk loaded file 
can be
+153  final static byte[] BL_PREPARE = 
Bytes.toBytes("R");
+154  final static byte[] BL_COMMIT = 
Bytes.toBytes("D");
+155
+156  private final static String WALS_PREFIX 
= "wals:";
+157  private final static String 
SET_KEY_PREFIX = "backupset:";
+158
+159  // separator between BULK_LOAD_PREFIX 
and ordinals
+160 protected final static String 
BLK_LD_DELIM = ":";
+161  private final static byte[] EMPTY_VALUE 
= new byte[] {};
+162
+163  // Safe delimiter in a string
+164  private final static String NULL = 
"\u";
 165
-166  private void checkSystemTable() throws 
IOException {
-167try (Admin admin = 
connection.getAdmin();) {
-168
-169  verifyNamespaceExists(admin);
-170
-171  if (!admin.tableExists(tableName)) 
{
-172HTableDescriptor backupHTD =
-173
BackupSystemTable.getSystemTableDescriptor(connection.getConfiguration());
-174admin.createTable(backupHTD);
-175  }
-176  waitForSystemTable(admin);
-177}
-178  }
-179
-180  private void 
verifyNamespaceExists(Admin admin) throws IOException {
-181  String namespaceName  = 
tableName.getNamespaceAsString();
-182  NamespaceDescriptor ns = 
NamespaceDescriptor.create(namespaceName).build();
-183  NamespaceDescriptor[] list = 
admin.listNamespaceDescriptors();
-184  boolean exists = false;
-185  for( NamespaceDescriptor nsd: list) 
{
-186if 
(nsd.getName().equals(ns.getName())) {
-187  exists = true;
-188  break;
-189}
-190  }
-191  if (!exists) {
-192admin.createNamespace(ns);
-193  }
-194  }
-195
-196  private void waitForSystemTable(Admin 
admin) throws IOException {
-197long TIMEOUT = 6;
-198long startTime = 
EnvironmentEdgeManager.currentTime();
-199while (!admin.tableExists(tableName) 
|| 

[28/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.ProgressCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.ProgressCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.ProgressCommand.html
index cc40d21..2df0b04 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.ProgressCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.ProgressCommand.html
@@ -61,8 +61,8 @@
 053import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 054import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand;
 055import 
org.apache.hadoop.hbase.backup.BackupType;
-056import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
-057import 
org.apache.hadoop.hbase.backup.util.BackupSet;
+056import 
org.apache.hadoop.hbase.backup.util.BackupSet;
+057import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
 058import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 059import 
org.apache.hadoop.hbase.client.Connection;
 060import 
org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -122,670 +122,703 @@
 114
 115  public static abstract class Command 
extends Configured {
 116CommandLine cmdline;
-117
+117Connection conn;
 118Command(Configuration conf) {
-119  super(conf);
-120}
-121
-122public void execute() throws 
IOException {
-123  if (cmdline.hasOption("h") || 
cmdline.hasOption("help")) {
-124printUsage();
-125throw new 
IOException(INCORRECT_USAGE);
-126  }
-127}
-128
-129protected abstract void 
printUsage();
-130  }
-131
-132  private BackupCommands() {
-133throw new 
AssertionError("Instantiating utility class...");
-134  }
-135
-136  public static Command 
createCommand(Configuration conf, BackupCommand type, CommandLine cmdline) {
-137Command cmd = null;
-138switch (type) {
-139case CREATE:
-140  cmd = new CreateCommand(conf, 
cmdline);
-141  break;
-142case DESCRIBE:
-143  cmd = new DescribeCommand(conf, 
cmdline);
-144  break;
-145case PROGRESS:
-146  cmd = new ProgressCommand(conf, 
cmdline);
-147  break;
-148case DELETE:
-149  cmd = new DeleteCommand(conf, 
cmdline);
-150  break;
-151case CANCEL:
-152  cmd = new CancelCommand(conf, 
cmdline);
-153  break;
-154case HISTORY:
-155  cmd = new HistoryCommand(conf, 
cmdline);
-156  break;
-157case SET:
-158  cmd = new BackupSetCommand(conf, 
cmdline);
-159  break;
-160case HELP:
-161default:
-162  cmd = new HelpCommand(conf, 
cmdline);
-163  break;
-164}
-165return cmd;
-166  }
-167
-168  static int numOfArgs(String[] args) {
-169if (args == null) return 0;
-170return args.length;
-171  }
-172
-173  public static class CreateCommand 
extends Command {
-174
-175CreateCommand(Configuration conf, 
CommandLine cmdline) {
-176  super(conf);
-177  this.cmdline = cmdline;
-178}
-179
-180@Override
-181public void execute() throws 
IOException {
-182  super.execute();
-183  if (cmdline == null || 
cmdline.getArgs() == null) {
-184printUsage();
-185throw new 
IOException(INCORRECT_USAGE);
-186  }
-187  String[] args = 
cmdline.getArgs();
-188  if (args.length !=3) {
-189printUsage();
-190throw new 
IOException(INCORRECT_USAGE);
-191  }
-192
-193  if 
(!BackupType.FULL.toString().equalsIgnoreCase(args[1])
-194   
!BackupType.INCREMENTAL.toString().equalsIgnoreCase(args[1])) {
-195System.out.println("ERROR: 
invalid backup type: " + args[1]);
-196printUsage();
-197throw new 
IOException(INCORRECT_USAGE);
-198  }
-199  if (!verifyPath(args[2])) {
-200System.out.println("ERROR: 
invalid backup destination: " + args[2]);
-201printUsage();
-202throw new 
IOException(INCORRECT_USAGE);
-203  }
-204
-205  String tables = null;
-206  Configuration conf = getConf() != 
null ? getConf() : HBaseConfiguration.create();
-207
-208  // Check if we have both: backup 
set and list of tables
-209  if (cmdline.hasOption(OPTION_TABLE) 
 cmdline.hasOption(OPTION_SET)) {
-210System.out.println("ERROR: You 
can specify either backup set or list"
-211+ " of tables, but not 
both");
-212printUsage();
-213throw new 
IOException(INCORRECT_USAGE);
-214  }
-215
-216  // Check backup set
-217  String setName = null;
-218  if (cmdline.hasOption(OPTION_SET)) 
{
-219setName = 
cmdline.getOptionValue(OPTION_SET);
-220tables = getTablesForSet(setName, 
conf);
-221
-222if (tables == null) {
-223  System.out.println("ERROR: 
Backup set '" + setName
-224

[40/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
index 04c9884..97afc2c 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
@@ -117,13 +117,12 @@ var activeTableTab = "activeTableTab";
 
 
 
-public abstract class BaseLoadBalancer
+public abstract class BaseLoadBalancer
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements LoadBalancer
-The base class for load balancers. It provides the the 
functions used to by
- AssignmentManager to assign 
regions
- in the edge cases. It doesn't provide an implementation of the
- actual balancing algorithm.
+The base class for load balancers. It provides functions 
used by
+ AssignmentManager to assign 
regions in the edge cases.
+ It doesn't provide an implementation of the actual balancing algorithm.
 
 
 
@@ -499,7 +498,7 @@ implements 
 
 MIN_SERVER_BALANCE
-protected static finalint MIN_SERVER_BALANCE
+protected static finalint MIN_SERVER_BALANCE
 
 See Also:
 Constant
 Field Values
@@ -512,7 +511,7 @@ implements 
 
 stopped
-private volatileboolean stopped
+private volatileboolean stopped
 
 
 
@@ -521,7 +520,7 @@ implements 
 
 EMPTY_REGION_LIST
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo EMPTY_REGION_LIST
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo EMPTY_REGION_LIST
 
 
 
@@ -530,7 +529,7 @@ implements 
 
 IDLE_SERVER_PREDICATOR
-static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/function/Predicate.html?is-external=true;
 title="class or interface in java.util.function">PredicateServerLoad IDLE_SERVER_PREDICATOR
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/function/Predicate.html?is-external=true;
 title="class or interface in java.util.function">PredicateServerLoad IDLE_SERVER_PREDICATOR
 
 
 
@@ -539,7 +538,7 @@ implements 
 
 regionFinder
-protected finalRegionLocationFinder regionFinder
+protected finalRegionLocationFinder regionFinder
 
 
 
@@ -548,7 +547,7 @@ implements 
 
 slop
-protectedfloat slop
+protectedfloat slop
 
 
 
@@ -557,7 +556,7 @@ implements 
 
 overallSlop
-protectedfloat overallSlop
+protectedfloat overallSlop
 
 
 
@@ -566,7 +565,7 @@ implements 
 
 config
-protectedorg.apache.hadoop.conf.Configuration config
+protectedorg.apache.hadoop.conf.Configuration config
 
 
 
@@ -575,7 +574,7 @@ implements 
 
 rackManager
-protectedRackManager rackManager
+protectedRackManager rackManager
 
 
 
@@ -584,7 +583,7 @@ implements 
 
 RANDOM
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true;
 title="class or interface in java.util">Random RANDOM
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true;
 title="class or interface in java.util">Random RANDOM
 
 
 
@@ -593,7 +592,7 @@ implements 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -602,7 +601,7 @@ implements 
 
 DEFAULT_TABLES_ON_MASTER
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[] DEFAULT_TABLES_ON_MASTER
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[] DEFAULT_TABLES_ON_MASTER
 
 
 
@@ -611,7 +610,7 @@ implements 
 
 TABLES_ON_MASTER
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String TABLES_ON_MASTER
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String TABLES_ON_MASTER
 
 See Also:
 Constant
 Field Values
@@ -624,7 +623,7 @@ implements 
 
 tablesOnMaster
-protected finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Sethttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String tablesOnMaster
+protected finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Sethttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 

[24/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
index 2ccefa4..87d7143 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
@@ -141,1653 +141,1703 @@
 133
 134  private final static String 
BACKUP_INFO_PREFIX = "session:";
 135  private final static String 
START_CODE_ROW = "startcode:";
-136  private final static String 
INCR_BACKUP_SET = "incrbackupset:";
-137  private final static String 
TABLE_RS_LOG_MAP_PREFIX = "trslm:";
-138  private final static String 
RS_LOG_TS_PREFIX = "rslogts:";
-139
-140  private final static String 
BULK_LOAD_PREFIX = "bulk:";
-141  private final static byte[] 
BULK_LOAD_PREFIX_BYTES = BULK_LOAD_PREFIX.getBytes();
-142  final static byte[] TBL_COL = 
Bytes.toBytes("tbl");
-143  final static byte[] FAM_COL = 
Bytes.toBytes("fam");
-144  final static byte[] PATH_COL = 
Bytes.toBytes("path");
-145  final static byte[] STATE_COL = 
Bytes.toBytes("state");
-146  // the two states a bulk loaded file 
can be
-147  final static byte[] BL_PREPARE = 
Bytes.toBytes("R");
-148  final static byte[] BL_COMMIT = 
Bytes.toBytes("D");
-149
-150  private final static String WALS_PREFIX 
= "wals:";
-151  private final static String 
SET_KEY_PREFIX = "backupset:";
-152
-153  // separator between BULK_LOAD_PREFIX 
and ordinals
-154 protected final static String 
BLK_LD_DELIM = ":";
-155  private final static byte[] EMPTY_VALUE 
= new byte[] {};
-156
-157  // Safe delimiter in a string
-158  private final static String NULL = 
"\u";
-159
-160  public BackupSystemTable(Connection 
conn) throws IOException {
-161this.connection = conn;
-162tableName = 
BackupSystemTable.getTableName(conn.getConfiguration());
-163checkSystemTable();
-164  }
+136  private final static byte[] 
ACTIVE_SESSION_ROW = "activesession:".getBytes();
+137  private final static byte[] 
ACTIVE_SESSION_COL = "c".getBytes();
+138
+139  private final static byte[] 
ACTIVE_SESSION_YES = "yes".getBytes();
+140  private final static byte[] 
ACTIVE_SESSION_NO = "no".getBytes();
+141
+142  private final static String 
INCR_BACKUP_SET = "incrbackupset:";
+143  private final static String 
TABLE_RS_LOG_MAP_PREFIX = "trslm:";
+144  private final static String 
RS_LOG_TS_PREFIX = "rslogts:";
+145
+146  private final static String 
BULK_LOAD_PREFIX = "bulk:";
+147  private final static byte[] 
BULK_LOAD_PREFIX_BYTES = BULK_LOAD_PREFIX.getBytes();
+148  final static byte[] TBL_COL = 
Bytes.toBytes("tbl");
+149  final static byte[] FAM_COL = 
Bytes.toBytes("fam");
+150  final static byte[] PATH_COL = 
Bytes.toBytes("path");
+151  final static byte[] STATE_COL = 
Bytes.toBytes("state");
+152  // the two states a bulk loaded file 
can be
+153  final static byte[] BL_PREPARE = 
Bytes.toBytes("R");
+154  final static byte[] BL_COMMIT = 
Bytes.toBytes("D");
+155
+156  private final static String WALS_PREFIX 
= "wals:";
+157  private final static String 
SET_KEY_PREFIX = "backupset:";
+158
+159  // separator between BULK_LOAD_PREFIX 
and ordinals
+160 protected final static String 
BLK_LD_DELIM = ":";
+161  private final static byte[] EMPTY_VALUE 
= new byte[] {};
+162
+163  // Safe delimiter in a string
+164  private final static String NULL = 
"\u";
 165
-166  private void checkSystemTable() throws 
IOException {
-167try (Admin admin = 
connection.getAdmin();) {
-168
-169  verifyNamespaceExists(admin);
-170
-171  if (!admin.tableExists(tableName)) 
{
-172HTableDescriptor backupHTD =
-173
BackupSystemTable.getSystemTableDescriptor(connection.getConfiguration());
-174admin.createTable(backupHTD);
-175  }
-176  waitForSystemTable(admin);
-177}
-178  }
-179
-180  private void 
verifyNamespaceExists(Admin admin) throws IOException {
-181  String namespaceName  = 
tableName.getNamespaceAsString();
-182  NamespaceDescriptor ns = 
NamespaceDescriptor.create(namespaceName).build();
-183  NamespaceDescriptor[] list = 
admin.listNamespaceDescriptors();
-184  boolean exists = false;
-185  for( NamespaceDescriptor nsd: list) 
{
-186if 
(nsd.getName().equals(ns.getName())) {
-187  exists = true;
-188  break;
-189}
-190  }
-191  if (!exists) {
-192admin.createNamespace(ns);
-193  }
-194  }
-195
-196  private void waitForSystemTable(Admin 
admin) throws IOException {
-197long TIMEOUT = 6;
-198long startTime = 
EnvironmentEdgeManager.currentTime();
-199while (!admin.tableExists(tableName) 
|| !admin.isTableAvailable(tableName)) {
-200  try {

[05/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/testdevapidocs/src-html/org/apache/hadoop/hbase/backup/TestFullBackupWithFailures.FullTableBackupClientForTest.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/backup/TestFullBackupWithFailures.FullTableBackupClientForTest.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/backup/TestFullBackupWithFailures.FullTableBackupClientForTest.html
new file mode 100644
index 000..fe44ce0
--- /dev/null
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/backup/TestFullBackupWithFailures.FullTableBackupClientForTest.html
@@ -0,0 +1,278 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/**
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements. See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership. The ASF 
licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License. You may obtain a 
copy of the License at
+009 *
+010 * 
http://www.apache.org/licenses/LICENSE-2.0
+011 *
+012 * Unless required by applicable law or 
agreed to in writing, software
+013 * distributed under the License is 
distributed on an "AS IS" BASIS,
+014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+015 * See the License for the specific 
language governing permissions and
+016 * limitations under the License.
+017 */
+018package org.apache.hadoop.hbase.backup;
+019
+020import static 
org.junit.Assert.assertFalse;
+021import static 
org.junit.Assert.assertTrue;
+022
+023import java.io.IOException;
+024import java.util.HashMap;
+025import java.util.List;
+026import java.util.Map;
+027import java.util.Random;
+028import java.util.Set;
+029
+030import org.apache.commons.logging.Log;
+031import 
org.apache.commons.logging.LogFactory;
+032import 
org.apache.hadoop.hbase.TableName;
+033import 
org.apache.hadoop.hbase.backup.BackupInfo.BackupPhase;
+034import 
org.apache.hadoop.hbase.backup.BackupInfo.BackupState;
+035import 
org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
+036import 
org.apache.hadoop.hbase.backup.impl.FullTableBackupClient;
+037import 
org.apache.hadoop.hbase.backup.impl.TableBackupClient;
+038import 
org.apache.hadoop.hbase.backup.master.LogRollMasterProcedureManager;
+039import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
+040import 
org.apache.hadoop.hbase.client.Admin;
+041import 
org.apache.hadoop.hbase.client.Connection;
+042import 
org.apache.hadoop.hbase.testclassification.LargeTests;
+043import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+044import 
org.apache.hadoop.util.ToolRunner;
+045import org.junit.Test;
+046import 
org.junit.experimental.categories.Category;
+047
+048import 
com.google.common.annotations.VisibleForTesting;
+049
+050@Category(LargeTests.class)
+051public class TestFullBackupWithFailures 
extends TestBackupBase {
+052
+053  private static final Log LOG = 
LogFactory.getLog(TestFullBackupWithFailures.class);
+054
+055  static class 
FullTableBackupClientForTest extends FullTableBackupClient
+056  {
+057public static final String 
BACKUP_TEST_MODE_STAGE = "backup.test.mode.stage";
+058
+059public FullTableBackupClientForTest() 
{
+060}
+061
+062public 
FullTableBackupClientForTest(Connection conn, String backupId, BackupRequest 
request)
+063throws IOException {
+064  super(conn, backupId, request);
+065}
+066
+067@Override
+068public void execute() throws 
IOException
+069{
+070  // Get the stage ID to fail on
+071  try (Admin admin = 
conn.getAdmin();) {
+072// Begin BACKUP
+073beginBackup(backupManager, 
backupInfo);
+074failStageIf(0);
+075String savedStartCode = null;
+076boolean firstBackup = false;
+077// do snapshot for full table 
backup
+078savedStartCode = 
backupManager.readBackupStartCode();
+079firstBackup = savedStartCode == 
null || Long.parseLong(savedStartCode) == 0L;
+080if (firstBackup) {
+081  // This is our first backup. 
Let's put some marker to system table so that we can hold the logs
+082  // while we do the backup.
+083  
backupManager.writeBackupStartCode(0L);
+084}
+085failStageIf(1);
+086// We roll log here before we do 
the snapshot. It is possible there is duplicate data
+087// in the log that is already in 
the snapshot. But if we do it after the snapshot, we
+088// could have data loss.
+089// A better approach is to do the 
roll log on each RS in the same global procedure as
+090// the snapshot.
+091LOG.info("Execute roll log 
procedure for full backup ...");
+092
+093MapString, String props = 

[48/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html
 
b/devapidocs/org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html
index 421c21d..a0758fc 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static enum BackupRestoreConstants.BackupCommand
+public static enum BackupRestoreConstants.BackupCommand
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumBackupRestoreConstants.BackupCommand
 
 
@@ -258,7 +258,7 @@ the order they are declared.
 
 
 CREATE
-public static finalBackupRestoreConstants.BackupCommand CREATE
+public static finalBackupRestoreConstants.BackupCommand CREATE
 
 
 
@@ -267,7 +267,7 @@ the order they are declared.
 
 
 CANCEL
-public static finalBackupRestoreConstants.BackupCommand CANCEL
+public static finalBackupRestoreConstants.BackupCommand CANCEL
 
 
 
@@ -276,7 +276,7 @@ the order they are declared.
 
 
 DELETE
-public static finalBackupRestoreConstants.BackupCommand DELETE
+public static finalBackupRestoreConstants.BackupCommand DELETE
 
 
 
@@ -285,7 +285,7 @@ the order they are declared.
 
 
 DESCRIBE
-public static finalBackupRestoreConstants.BackupCommand DESCRIBE
+public static finalBackupRestoreConstants.BackupCommand DESCRIBE
 
 
 
@@ -294,7 +294,7 @@ the order they are declared.
 
 
 HISTORY
-public static finalBackupRestoreConstants.BackupCommand HISTORY
+public static finalBackupRestoreConstants.BackupCommand HISTORY
 
 
 
@@ -303,7 +303,7 @@ the order they are declared.
 
 
 STATUS
-public static finalBackupRestoreConstants.BackupCommand STATUS
+public static finalBackupRestoreConstants.BackupCommand STATUS
 
 
 
@@ -312,7 +312,7 @@ the order they are declared.
 
 
 CONVERT
-public static finalBackupRestoreConstants.BackupCommand CONVERT
+public static finalBackupRestoreConstants.BackupCommand CONVERT
 
 
 
@@ -321,7 +321,7 @@ the order they are declared.
 
 
 MERGE
-public static finalBackupRestoreConstants.BackupCommand MERGE
+public static finalBackupRestoreConstants.BackupCommand MERGE
 
 
 
@@ -330,7 +330,7 @@ the order they are declared.
 
 
 STOP
-public static finalBackupRestoreConstants.BackupCommand STOP
+public static finalBackupRestoreConstants.BackupCommand STOP
 
 
 
@@ -339,7 +339,7 @@ the order they are declared.
 
 
 SHOW
-public static finalBackupRestoreConstants.BackupCommand SHOW
+public static finalBackupRestoreConstants.BackupCommand SHOW
 
 
 
@@ -348,7 +348,7 @@ the order they are declared.
 
 
 HELP
-public static finalBackupRestoreConstants.BackupCommand HELP
+public static finalBackupRestoreConstants.BackupCommand HELP
 
 
 
@@ -357,7 +357,7 @@ the order they are declared.
 
 
 PROGRESS
-public static finalBackupRestoreConstants.BackupCommand PROGRESS
+public static finalBackupRestoreConstants.BackupCommand PROGRESS
 
 
 
@@ -366,7 +366,7 @@ the order they are declared.
 
 
 SET
-public static finalBackupRestoreConstants.BackupCommand SET
+public static finalBackupRestoreConstants.BackupCommand SET
 
 
 
@@ -375,7 +375,7 @@ the order they are declared.
 
 
 SET_ADD
-public static finalBackupRestoreConstants.BackupCommand SET_ADD
+public static finalBackupRestoreConstants.BackupCommand SET_ADD
 
 
 
@@ -384,7 +384,7 @@ the order they are declared.
 
 
 SET_REMOVE
-public static finalBackupRestoreConstants.BackupCommand SET_REMOVE
+public static finalBackupRestoreConstants.BackupCommand SET_REMOVE
 
 
 
@@ -393,7 +393,7 @@ the order they are declared.
 
 
 SET_DELETE
-public static finalBackupRestoreConstants.BackupCommand SET_DELETE
+public static finalBackupRestoreConstants.BackupCommand SET_DELETE
 
 
 
@@ -402,7 +402,7 @@ the order they are declared.
 
 
 SET_DESCRIBE
-public static finalBackupRestoreConstants.BackupCommand SET_DESCRIBE
+public static finalBackupRestoreConstants.BackupCommand SET_DESCRIBE
 
 
 
@@ -411,7 +411,7 @@ the order they are declared.
 
 
 SET_LIST
-public static finalBackupRestoreConstants.BackupCommand SET_LIST
+public static finalBackupRestoreConstants.BackupCommand SET_LIST
 
 
 
@@ -428,7 +428,7 @@ the order they are declared.
 
 
 values
-public staticBackupRestoreConstants.BackupCommand[]values()
+public staticBackupRestoreConstants.BackupCommand[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -448,7 +448,7 @@ for (BackupRestoreConstants.BackupCommand c : 
BackupRestoreConstants.BackupComma
 
 
 valueOf
-public 

[45/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html 
b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
index 0948fd9..8342000 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":9,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":9,"i21":9,"i22":10,"i23":9,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":9,"i31":10,"i32":9,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":9,"i52":10,"i53":9,"i54":9,"i55":9,"i56":10,"i57":9,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":9,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":9,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":9,"i21":9,"i22":10,"i23":9,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":9,"i33":10,"i34":9,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":9,"i55":10,"i56":9,"i57":9,"i58":9,"i59":9,"i60":10,"i61":9,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":9,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -168,6 +168,22 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 Field and Description
 
 
+private static byte[]
+ACTIVE_SESSION_COL
+
+
+private static byte[]
+ACTIVE_SESSION_NO
+
+
+private static byte[]
+ACTIVE_SESSION_ROW
+
+
+private static byte[]
+ACTIVE_SESSION_YES
+
+
 private static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 BACKUP_INFO_PREFIX
 
@@ -457,12 +473,20 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 private Put
+createPutForStartBackupSession()
+
+
+private Put
 createPutForStartCode(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringstartCode,
  http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringrootPath)
 Creates Put operation to store start code to backup system 
table
 
 
-
+
+private Put
+createPutForStopBackupSession()
+
+
 private Put
 createPutForWriteRegionServerLogTimestamp(TableNametable,
  byte[]smap,
@@ -470,7 +494,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 Creates Put to write RS last roll log timestamp map
 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListPut
 createPutsForAddWALFiles(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringfiles,
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringbackupId,
@@ -478,313 +502,325 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 Creates put list for list of WAL files
 
 
-
+
 private Scan
 createScanForBackupHistory()
 Creates Scan operation to load backup history
 
 
-
+
 private Scan
 createScanForBackupSetList()
 Creates Scan operation to load backup set list
 
 
-
+
 (package private) static Scan
 createScanForBulkLoadedFiles(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringbackupId)
 
-
+
 private Scan
 createScanForGetWALs(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or 

[36/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
index cc40d21..2df0b04 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
@@ -61,8 +61,8 @@
 053import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 054import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand;
 055import 
org.apache.hadoop.hbase.backup.BackupType;
-056import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
-057import 
org.apache.hadoop.hbase.backup.util.BackupSet;
+056import 
org.apache.hadoop.hbase.backup.util.BackupSet;
+057import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
 058import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 059import 
org.apache.hadoop.hbase.client.Connection;
 060import 
org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -122,670 +122,703 @@
 114
 115  public static abstract class Command 
extends Configured {
 116CommandLine cmdline;
-117
+117Connection conn;
 118Command(Configuration conf) {
-119  super(conf);
-120}
-121
-122public void execute() throws 
IOException {
-123  if (cmdline.hasOption("h") || 
cmdline.hasOption("help")) {
-124printUsage();
-125throw new 
IOException(INCORRECT_USAGE);
-126  }
-127}
-128
-129protected abstract void 
printUsage();
-130  }
-131
-132  private BackupCommands() {
-133throw new 
AssertionError("Instantiating utility class...");
-134  }
-135
-136  public static Command 
createCommand(Configuration conf, BackupCommand type, CommandLine cmdline) {
-137Command cmd = null;
-138switch (type) {
-139case CREATE:
-140  cmd = new CreateCommand(conf, 
cmdline);
-141  break;
-142case DESCRIBE:
-143  cmd = new DescribeCommand(conf, 
cmdline);
-144  break;
-145case PROGRESS:
-146  cmd = new ProgressCommand(conf, 
cmdline);
-147  break;
-148case DELETE:
-149  cmd = new DeleteCommand(conf, 
cmdline);
-150  break;
-151case CANCEL:
-152  cmd = new CancelCommand(conf, 
cmdline);
-153  break;
-154case HISTORY:
-155  cmd = new HistoryCommand(conf, 
cmdline);
-156  break;
-157case SET:
-158  cmd = new BackupSetCommand(conf, 
cmdline);
-159  break;
-160case HELP:
-161default:
-162  cmd = new HelpCommand(conf, 
cmdline);
-163  break;
-164}
-165return cmd;
-166  }
-167
-168  static int numOfArgs(String[] args) {
-169if (args == null) return 0;
-170return args.length;
-171  }
-172
-173  public static class CreateCommand 
extends Command {
-174
-175CreateCommand(Configuration conf, 
CommandLine cmdline) {
-176  super(conf);
-177  this.cmdline = cmdline;
-178}
-179
-180@Override
-181public void execute() throws 
IOException {
-182  super.execute();
-183  if (cmdline == null || 
cmdline.getArgs() == null) {
-184printUsage();
-185throw new 
IOException(INCORRECT_USAGE);
-186  }
-187  String[] args = 
cmdline.getArgs();
-188  if (args.length !=3) {
-189printUsage();
-190throw new 
IOException(INCORRECT_USAGE);
-191  }
-192
-193  if 
(!BackupType.FULL.toString().equalsIgnoreCase(args[1])
-194   
!BackupType.INCREMENTAL.toString().equalsIgnoreCase(args[1])) {
-195System.out.println("ERROR: 
invalid backup type: " + args[1]);
-196printUsage();
-197throw new 
IOException(INCORRECT_USAGE);
-198  }
-199  if (!verifyPath(args[2])) {
-200System.out.println("ERROR: 
invalid backup destination: " + args[2]);
-201printUsage();
-202throw new 
IOException(INCORRECT_USAGE);
-203  }
-204
-205  String tables = null;
-206  Configuration conf = getConf() != 
null ? getConf() : HBaseConfiguration.create();
-207
-208  // Check if we have both: backup 
set and list of tables
-209  if (cmdline.hasOption(OPTION_TABLE) 
 cmdline.hasOption(OPTION_SET)) {
-210System.out.println("ERROR: You 
can specify either backup set or list"
-211+ " of tables, but not 
both");
-212printUsage();
-213throw new 
IOException(INCORRECT_USAGE);
-214  }
-215
-216  // Check backup set
-217  String setName = null;
-218  if (cmdline.hasOption(OPTION_SET)) 
{
-219setName = 
cmdline.getOptionValue(OPTION_SET);
-220tables = getTablesForSet(setName, 
conf);
-221
-222if (tables == null) {
-223  System.out.println("ERROR: 
Backup set '" + setName
-224   

[46/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
 
b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
index f77c68a..1c530a4 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class BackupCommands.HelpCommand
+private static class BackupCommands.HelpCommand
 extends BackupCommands.Command
 
 
@@ -146,7 +146,7 @@ extends BackupCommands.Command
-cmdline
+cmdline,
 conn
 
 
 
@@ -190,6 +190,13 @@ extends 
+
+
+
+Methods inherited from classorg.apache.hadoop.hbase.backup.impl.BackupCommands.Command
+finish,
 requiresNoActiveSession
+
+
 
 
 
@@ -223,7 +230,7 @@ extends 
 
 HelpCommand
-HelpCommand(org.apache.hadoop.conf.Configurationconf,
+HelpCommand(org.apache.hadoop.conf.Configurationconf,
 org.apache.commons.cli.CommandLinecmdline)
 
 
@@ -241,7 +248,7 @@ extends 
 
 execute
-publicvoidexecute()
+publicvoidexecute()
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Overrides:
@@ -257,7 +264,7 @@ extends 
 
 printUsage
-protectedvoidprintUsage()
+protectedvoidprintUsage()
 
 Specified by:
 printUsagein
 classBackupCommands.Command

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.HistoryCommand.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.HistoryCommand.html
 
b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.HistoryCommand.html
index 65a491d..c799a4f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.HistoryCommand.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.HistoryCommand.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class BackupCommands.HistoryCommand
+private static class BackupCommands.HistoryCommand
 extends BackupCommands.Command
 
 
@@ -157,7 +157,7 @@ extends BackupCommands.Command
-cmdline
+cmdline,
 conn
 
 
 
@@ -217,6 +217,13 @@ extends 
+
+
+
+Methods inherited from classorg.apache.hadoop.hbase.backup.impl.BackupCommands.Command
+finish,
 requiresNoActiveSession
+
+
 
 
 
@@ -250,7 +257,7 @@ extends 
 
 DEFAULT_HISTORY_LENGTH
-private static finalint DEFAULT_HISTORY_LENGTH
+private static finalint DEFAULT_HISTORY_LENGTH
 
 See Also:
 Constant
 Field Values
@@ -271,7 +278,7 @@ extends 
 
 HistoryCommand
-HistoryCommand(org.apache.hadoop.conf.Configurationconf,
+HistoryCommand(org.apache.hadoop.conf.Configurationconf,
org.apache.commons.cli.CommandLinecmdline)
 
 
@@ -289,7 +296,7 @@ extends 
 
 execute
-publicvoidexecute()
+publicvoidexecute()
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Overrides:
@@ -305,7 +312,7 @@ extends 
 
 getBackupRootPath
-privateorg.apache.hadoop.fs.PathgetBackupRootPath()
+privateorg.apache.hadoop.fs.PathgetBackupRootPath()
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -319,7 +326,7 @@ extends 
 
 getTableName
-privateTableNamegetTableName()
+privateTableNamegetTableName()
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -333,7 +340,7 @@ extends 
 
 getTableSetName
-privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetTableSetName()
+privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetTableSetName()
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -347,7 +354,7 @@ extends 
 
 parseHistoryLength
-privateintparseHistoryLength()
+privateintparseHistoryLength()
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -361,7 +368,7 @@ extends 
 
 printUsage
-protectedvoidprintUsage()
+protectedvoidprintUsage()
 
 Specified by:
 printUsagein
 

[13/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
index f456b7b..a2510c5 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
@@ -69,15 +69,15 @@
 061  requiredArguments = {
 062@org.jamon.annotations.Argument(name 
= "master", type = "HMaster")},
 063  optionalArguments = {
-064@org.jamon.annotations.Argument(name 
= "metaLocation", type = "ServerName"),
-065@org.jamon.annotations.Argument(name 
= "frags", type = "MapString,Integer"),
-066@org.jamon.annotations.Argument(name 
= "filter", type = "String"),
-067@org.jamon.annotations.Argument(name 
= "serverManager", type = "ServerManager"),
-068@org.jamon.annotations.Argument(name 
= "servers", type = "ListServerName"),
-069@org.jamon.annotations.Argument(name 
= "assignmentManager", type = "AssignmentManager"),
-070@org.jamon.annotations.Argument(name 
= "catalogJanitorEnabled", type = "boolean"),
+064@org.jamon.annotations.Argument(name 
= "catalogJanitorEnabled", type = "boolean"),
+065@org.jamon.annotations.Argument(name 
= "metaLocation", type = "ServerName"),
+066@org.jamon.annotations.Argument(name 
= "frags", type = "MapString,Integer"),
+067@org.jamon.annotations.Argument(name 
= "filter", type = "String"),
+068@org.jamon.annotations.Argument(name 
= "serverManager", type = "ServerManager"),
+069@org.jamon.annotations.Argument(name 
= "servers", type = "ListServerName"),
+070@org.jamon.annotations.Argument(name 
= "format", type = "String"),
 071@org.jamon.annotations.Argument(name 
= "deadServers", type = "SetServerName"),
-072@org.jamon.annotations.Argument(name 
= "format", type = "String")})
+072@org.jamon.annotations.Argument(name 
= "assignmentManager", type = "AssignmentManager")})
 073public class MasterStatusTmpl
 074  extends 
org.jamon.AbstractTemplateProxy
 075{
@@ -118,125 +118,125 @@
 110  return m_master;
 111}
 112private HMaster m_master;
-113// 22, 1
-114public void 
setMetaLocation(ServerName metaLocation)
+113// 25, 1
+114public void 
setCatalogJanitorEnabled(boolean catalogJanitorEnabled)
 115{
-116  // 22, 1
-117  m_metaLocation = metaLocation;
-118  m_metaLocation__IsNotDefault = 
true;
+116  // 25, 1
+117  m_catalogJanitorEnabled = 
catalogJanitorEnabled;
+118  
m_catalogJanitorEnabled__IsNotDefault = true;
 119}
-120public ServerName getMetaLocation()
+120public boolean 
getCatalogJanitorEnabled()
 121{
-122  return m_metaLocation;
+122  return m_catalogJanitorEnabled;
 123}
-124private ServerName m_metaLocation;
-125public boolean 
getMetaLocation__IsNotDefault()
+124private boolean 
m_catalogJanitorEnabled;
+125public boolean 
getCatalogJanitorEnabled__IsNotDefault()
 126{
-127  return 
m_metaLocation__IsNotDefault;
+127  return 
m_catalogJanitorEnabled__IsNotDefault;
 128}
-129private boolean 
m_metaLocation__IsNotDefault;
-130// 21, 1
-131public void 
setFrags(MapString,Integer frags)
+129private boolean 
m_catalogJanitorEnabled__IsNotDefault;
+130// 22, 1
+131public void 
setMetaLocation(ServerName metaLocation)
 132{
-133  // 21, 1
-134  m_frags = frags;
-135  m_frags__IsNotDefault = true;
+133  // 22, 1
+134  m_metaLocation = metaLocation;
+135  m_metaLocation__IsNotDefault = 
true;
 136}
-137public MapString,Integer 
getFrags()
+137public ServerName getMetaLocation()
 138{
-139  return m_frags;
+139  return m_metaLocation;
 140}
-141private MapString,Integer 
m_frags;
-142public boolean 
getFrags__IsNotDefault()
+141private ServerName m_metaLocation;
+142public boolean 
getMetaLocation__IsNotDefault()
 143{
-144  return m_frags__IsNotDefault;
+144  return 
m_metaLocation__IsNotDefault;
 145}
-146private boolean 
m_frags__IsNotDefault;
-147// 26, 1
-148public void setFilter(String 
filter)
+146private boolean 
m_metaLocation__IsNotDefault;
+147// 21, 1
+148public void 
setFrags(MapString,Integer frags)
 149{
-150  // 26, 1
-151  m_filter = filter;
-152  m_filter__IsNotDefault = true;
+150  // 21, 1
+151  m_frags = frags;
+152  m_frags__IsNotDefault = true;
 153}
-154public String getFilter()
+154public MapString,Integer 
getFrags()
 155{
-156  return m_filter;
+156  return m_frags;
 157}
-158private String m_filter;
-159public boolean