[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-08-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/346adc37/devapidocs/src-html/org/apache/hadoop/hbase/client/Get.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/Get.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/Get.html
index 438db17..48420d2 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/Get.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/Get.html
@@ -275,297 +275,321 @@
 267  /**
 268   * Get all available versions.
 269   * @return this for invocation 
chaining
-270   */
-271  public Get setMaxVersions() {
-272this.maxVersions = 
Integer.MAX_VALUE;
-273return this;
-274  }
-275
-276  /**
-277   * Get up to the specified number of 
versions of each column.
-278   * @param maxVersions maximum versions 
for each column
-279   * @throws IOException if invalid 
number of versions
-280   * @return this for invocation 
chaining
-281   */
-282  public Get setMaxVersions(int 
maxVersions) throws IOException {
-283if(maxVersions = 0) {
-284  throw new IOException("maxVersions 
must be positive");
-285}
-286this.maxVersions = maxVersions;
-287return this;
-288  }
-289
-290  public Get 
setLoadColumnFamiliesOnDemand(boolean value) {
-291return (Get) 
super.setLoadColumnFamiliesOnDemand(value);
-292  }
-293
-294  /**
-295   * Set the maximum number of values to 
return per row per Column Family
-296   * @param limit the maximum number of 
values returned / row / CF
-297   * @return this for invocation 
chaining
-298   */
-299  public Get 
setMaxResultsPerColumnFamily(int limit) {
-300this.storeLimit = limit;
-301return this;
-302  }
-303
-304  /**
-305   * Set offset for the row per Column 
Family. This offset is only within a particular row/CF
-306   * combination. It gets reset back to 
zero when we move to the next row or CF.
-307   * @param offset is the number of kvs 
that will be skipped.
-308   * @return this for invocation 
chaining
-309   */
-310  public Get 
setRowOffsetPerColumnFamily(int offset) {
-311this.storeOffset = offset;
-312return this;
-313  }
-314
-315  @Override
-316  public Get setFilter(Filter filter) {
-317super.setFilter(filter);
-318return this;
-319  }
-320
-321  /* Accessors */
-322
-323  /**
-324   * Set whether blocks should be cached 
for this Get.
-325   * p
-326   * This is true by default.  When true, 
default settings of the table and
-327   * family are used (this will never 
override caching blocks if the block
-328   * cache is disabled for that family or 
entirely).
-329   *
-330   * @param cacheBlocks if false, default 
settings are overridden and blocks
-331   * will not be cached
-332   */
-333  public Get setCacheBlocks(boolean 
cacheBlocks) {
-334this.cacheBlocks = cacheBlocks;
-335return this;
-336  }
-337
-338  /**
-339   * Get whether blocks should be cached 
for this Get.
-340   * @return true if default caching 
should be used, false if blocks should not
-341   * be cached
-342   */
-343  public boolean getCacheBlocks() {
-344return cacheBlocks;
-345  }
+270   * @deprecated It is easy to 
misunderstand with column family's max versions, so use
+271   * {@link 
#readAllVersions()} instead.
+272   */
+273  @Deprecated
+274  public Get setMaxVersions() {
+275return readAllVersions();
+276  }
+277
+278  /**
+279   * Get up to the specified number of 
versions of each column.
+280   * @param maxVersions maximum versions 
for each column
+281   * @throws IOException if invalid 
number of versions
+282   * @return this for invocation 
chaining
+283   * @deprecated It is easy to 
misunderstand with column family's max versions, so use
+284   * {@link 
#readVersions(int)} instead.
+285   */
+286  @Deprecated
+287  public Get setMaxVersions(int 
maxVersions) throws IOException {
+288return readVersions(maxVersions);
+289  }
+290
+291  /**
+292   * Get all available versions.
+293   * @return this for invocation 
chaining
+294   */
+295  public Get readAllVersions() {
+296this.maxVersions = 
Integer.MAX_VALUE;
+297return this;
+298  }
+299
+300  /**
+301   * Get up to the specified number of 
versions of each column.
+302   * @param versions specified number of 
versions for each column
+303   * @throws IOException if invalid 
number of versions
+304   * @return this for invocation 
chaining
+305   */
+306  public Get readVersions(int versions) 
throws IOException {
+307if (versions = 0) {
+308  throw new IOException("versions 
must be positive");
+309}
+310this.maxVersions = versions;
+311return this;
+312  }
+313
+314  public Get 
setLoadColumnFamiliesOnDemand(boolean value) {
+315return (Get) 
super.setLoadColumnFamiliesOnDemand(value);
+316  }
+317
+318  /**
+319   * Set the maximum number of values to 
return per row per Column Family
+320   * @param limit the maximum number of 
values returned / row / 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-31 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1837997e/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.html
index 79c41ff..0bf41f8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.html
@@ -91,7 +91,7 @@
 083
 084String suffix = "Count";
 085
-086regionPutKey = regionNamePrefix + 
MetricsRegionServerSource.MUTATE_KEY + suffix;
+086regionPutKey = regionNamePrefix + 
MetricsRegionServerSource.PUT_KEY + suffix;
 087regionPut = 
registry.getCounter(regionPutKey, 0L);
 088
 089regionDeleteKey = regionNamePrefix + 
MetricsRegionServerSource.DELETE_KEY + suffix;



[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a2b2dd19/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
index c9a18a3..c80f6d8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
@@ -2492,2617 +2492,2627 @@
 2484  return;
 2485}
 2486  }
-2487  
errors.reportError(ERROR_CODE.LINGERING_SPLIT_PARENT, "Region "
-2488  + descriptiveName + " is a 
split parent in META, in HDFS, "
-2489  + "and not deployed on any 
region server. This could be transient, "
-2490  + "consider to run the catalog 
janitor first!");
-2491  if (shouldFixSplitParents()) {
-2492setShouldRerun();
-2493resetSplitParent(hbi);
-2494  }
-2495} else if (inMeta  !inHdfs 
 !isDeployed) {
-2496  
errors.reportError(ERROR_CODE.NOT_IN_HDFS_OR_DEPLOYED, "Region "
-2497  + descriptiveName + " found in 
META, but not in HDFS "
-2498  + "or deployed on any region 
server.");
-2499  if (shouldFixMeta()) {
-2500deleteMetaRegion(hbi);
-2501  }
-2502} else if (inMeta  !inHdfs 
 isDeployed) {
-2503  
errors.reportError(ERROR_CODE.NOT_IN_HDFS, "Region " + descriptiveName
-2504  + " found in META, but not in 
HDFS, " +
-2505  "and deployed on " + 
Joiner.on(", ").join(hbi.deployedOn));
-2506  // We treat HDFS as ground truth.  
Any information in meta is transient
-2507  // and equivalent data can be 
regenerated.  So, lets unassign and remove
-2508  // these problems from META.
-2509  if (shouldFixAssignments()) {
-2510errors.print("Trying to fix 
unassigned region...");
-2511undeployRegions(hbi);
-2512  }
-2513  if (shouldFixMeta()) {
-2514// wait for it to complete
-2515deleteMetaRegion(hbi);
-2516  }
-2517} else if (inMeta  inHdfs 
 !isDeployed  shouldBeDeployed) {
-2518  
errors.reportError(ERROR_CODE.NOT_DEPLOYED, "Region " + descriptiveName
-2519  + " not deployed on any region 
server.");
-2520  tryAssignmentRepair(hbi, "Trying 
to fix unassigned region...");
-2521} else if (inMeta  inHdfs 
 isDeployed  !shouldBeDeployed) {
-2522  
errors.reportError(ERROR_CODE.SHOULD_NOT_BE_DEPLOYED,
-2523  "Region " + descriptiveName + 
" should not be deployed according " +
-2524  "to META, but is deployed on " 
+ Joiner.on(", ").join(hbi.deployedOn));
-2525  if (shouldFixAssignments()) {
-2526errors.print("Trying to close 
the region " + descriptiveName);
-2527setShouldRerun();
-2528
HBaseFsckRepair.fixMultiAssignment(connection, hbi.metaEntry, 
hbi.deployedOn);
-2529  }
-2530} else if (inMeta  inHdfs 
 isMultiplyDeployed) {
-2531  
errors.reportError(ERROR_CODE.MULTI_DEPLOYED, "Region " + descriptiveName
-2532  + " is listed in hbase:meta on 
region server " + hbi.metaEntry.regionServer
-2533  + " but is multiply assigned 
to region servers " +
-2534  Joiner.on(", 
").join(hbi.deployedOn));
-2535  // If we are trying to fix the 
errors
-2536  if (shouldFixAssignments()) {
-2537errors.print("Trying to fix 
assignment error...");
-2538setShouldRerun();
-2539
HBaseFsckRepair.fixMultiAssignment(connection, hbi.metaEntry, 
hbi.deployedOn);
-2540  }
-2541} else if (inMeta  inHdfs 
 isDeployed  !deploymentMatchesMeta) {
-2542  
errors.reportError(ERROR_CODE.SERVER_DOES_NOT_MATCH_META, "Region "
-2543  + descriptiveName + " listed 
in hbase:meta on region server " +
-2544  hbi.metaEntry.regionServer + " 
but found on region server " +
-2545  hbi.deployedOn.get(0));
-2546  // If we are trying to fix the 
errors
-2547  if (shouldFixAssignments()) {
-2548errors.print("Trying to fix 
assignment error...");
-2549setShouldRerun();
-2550
HBaseFsckRepair.fixMultiAssignment(connection, hbi.metaEntry, 
hbi.deployedOn);
-2551
HBaseFsckRepair.waitUntilAssigned(admin, hbi.getHdfsHRI());
-2552  }
-2553} else {
-2554  
errors.reportError(ERROR_CODE.UNKNOWN, "Region " + descriptiveName +
-2555  " is in an unforeseen state:" 
+
-2556  " inMeta=" + inMeta +
-2557  " inHdfs=" + inHdfs +
-2558  " isDeployed=" + isDeployed 
+
-2559  " isMultiplyDeployed=" + 
isMultiplyDeployed +
-2560  " deploymentMatchesMeta=" + 
deploymentMatchesMeta +
-2561  " shouldBeDeployed=" + 
shouldBeDeployed);
-2562}
-2563  }
-2564
-2565  /**
-2566   * Checks tables integrity. Goes over 
all regions and scans the tables.
-2567   * 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-26 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21766f4a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionInTransitionStat.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionInTransitionStat.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionInTransitionStat.html
index 0865b8f..596b800 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionInTransitionStat.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionInTransitionStat.html
@@ -30,1795 +30,1790 @@
 022import java.util.ArrayList;
 023import java.util.Collection;
 024import java.util.Collections;
-025import java.util.Comparator;
-026import java.util.HashMap;
-027import java.util.HashSet;
-028import java.util.List;
-029import java.util.Map;
-030import java.util.Set;
-031import 
java.util.concurrent.CopyOnWriteArrayList;
-032import java.util.concurrent.Future;
-033import java.util.concurrent.TimeUnit;
-034import 
java.util.concurrent.atomic.AtomicBoolean;
-035import 
java.util.concurrent.locks.Condition;
-036import 
java.util.concurrent.locks.ReentrantLock;
-037import java.util.stream.Collectors;
-038
-039import org.apache.commons.logging.Log;
-040import 
org.apache.commons.logging.LogFactory;
-041import 
org.apache.hadoop.conf.Configuration;
-042import 
org.apache.hadoop.hbase.HBaseIOException;
-043import 
org.apache.hadoop.hbase.HConstants;
-044import 
org.apache.hadoop.hbase.HRegionInfo;
-045import 
org.apache.hadoop.hbase.PleaseHoldException;
-046import 
org.apache.hadoop.hbase.RegionException;
-047import 
org.apache.hadoop.hbase.RegionStateListener;
-048import 
org.apache.hadoop.hbase.ServerName;
-049import 
org.apache.hadoop.hbase.TableName;
-050import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-051import 
org.apache.hadoop.hbase.client.TableState;
-052import 
org.apache.hadoop.hbase.exceptions.UnexpectedStateException;
-053import 
org.apache.hadoop.hbase.master.balancer.FavoredStochasticBalancer;
-054import 
org.apache.hadoop.hbase.favored.FavoredNodesManager;
-055import 
org.apache.hadoop.hbase.favored.FavoredNodesPromoter;
-056import 
org.apache.hadoop.hbase.master.AssignmentListener;
-057import 
org.apache.hadoop.hbase.master.LoadBalancer;
-058import 
org.apache.hadoop.hbase.master.MasterServices;
-059import 
org.apache.hadoop.hbase.master.MetricsAssignmentManager;
-060import 
org.apache.hadoop.hbase.master.NoSuchProcedureException;
-061import 
org.apache.hadoop.hbase.master.RegionPlan;
-062import 
org.apache.hadoop.hbase.master.RegionState;
-063import 
org.apache.hadoop.hbase.master.RegionState.State;
-064import 
org.apache.hadoop.hbase.master.ServerListener;
-065import 
org.apache.hadoop.hbase.master.TableStateManager;
-066import 
org.apache.hadoop.hbase.master.assignment.RegionStates.RegionStateNode;
-067import 
org.apache.hadoop.hbase.master.assignment.RegionStates.ServerState;
-068import 
org.apache.hadoop.hbase.master.assignment.RegionStates.ServerStateNode;
-069// TODO: why are they here?
-070import 
org.apache.hadoop.hbase.master.normalizer.NormalizationPlan.PlanType;
-071import 
org.apache.hadoop.hbase.master.normalizer.RegionNormalizer;
-072import 
org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
-073import 
org.apache.hadoop.hbase.master.procedure.MasterProcedureScheduler;
-074import 
org.apache.hadoop.hbase.master.procedure.ProcedureSyncWait;
-075import 
org.apache.hadoop.hbase.master.procedure.ServerCrashProcedure;
-076import 
org.apache.hadoop.hbase.procedure2.Procedure;
-077import 
org.apache.hadoop.hbase.procedure2.ProcedureEvent;
-078import 
org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-079import 
org.apache.hadoop.hbase.procedure2.ProcedureInMemoryChore;
-080import 
org.apache.hadoop.hbase.procedure2.util.StringUtils;
-081import 
org.apache.hadoop.hbase.quotas.QuotaExceededException;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse;
-087import 
org.apache.hadoop.hbase.util.Bytes;
-088import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-089import 
org.apache.hadoop.hbase.util.Pair;
-090import 
org.apache.hadoop.hbase.util.Threads;
-091import 
org.apache.hadoop.hbase.util.VersionInfo;
+025import java.util.HashMap;
+026import java.util.HashSet;
+027import java.util.List;
+028import java.util.Map;

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2d5075d7/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.FailedProcedure.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.FailedProcedure.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.FailedProcedure.html
new file mode 100644
index 000..904b921
--- /dev/null
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.FailedProcedure.html
@@ -0,0 +1,2074 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/**
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License.  You may obtain a 
copy of the License at
+009 *
+010 * 
http://www.apache.org/licenses/LICENSE-2.0
+011 *
+012 * Unless required by applicable law or 
agreed to in writing, software
+013 * distributed under the License is 
distributed on an "AS IS" BASIS,
+014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+015 * See the License for the specific 
language governing permissions and
+016 * limitations under the License.
+017 */
+018
+019package 
org.apache.hadoop.hbase.procedure2;
+020
+021import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+022import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
+023
+024import java.io.IOException;
+025import java.io.InputStream;
+026import java.io.OutputStream;
+027import java.util.ArrayList;
+028import java.util.Arrays;
+029import java.util.HashSet;
+030import java.util.Iterator;
+031import java.util.List;
+032import java.util.Map;
+033import java.util.Objects;
+034import java.util.Set;
+035import 
java.util.concurrent.atomic.AtomicBoolean;
+036import 
java.util.concurrent.atomic.AtomicInteger;
+037import 
java.util.concurrent.atomic.AtomicLong;
+038import java.util.stream.Collectors;
+039import java.util.stream.Stream;
+040import 
java.util.concurrent.ConcurrentHashMap;
+041import 
java.util.concurrent.CopyOnWriteArrayList;
+042import java.util.concurrent.DelayQueue;
+043import java.util.concurrent.TimeUnit;
+044
+045import org.apache.commons.logging.Log;
+046import 
org.apache.commons.logging.LogFactory;
+047import 
org.apache.hadoop.conf.Configuration;
+048import 
org.apache.hadoop.hbase.HConstants;
+049import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+050import 
org.apache.hadoop.hbase.classification.InterfaceStability;
+051import 
org.apache.hadoop.hbase.exceptions.IllegalArgumentIOException;
+052import 
org.apache.hadoop.hbase.procedure2.Procedure.LockState;
+053import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
+054import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStore.ProcedureIterator;
+055import 
org.apache.hadoop.hbase.procedure2.util.DelayedUtil;
+056import 
org.apache.hadoop.hbase.procedure2.util.DelayedUtil.DelayedWithTimeout;
+057import 
org.apache.hadoop.hbase.procedure2.util.StringUtils;
+058import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState;
+059import 
org.apache.hadoop.hbase.security.User;
+060import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+061import 
org.apache.hadoop.hbase.util.NonceKey;
+062import 
org.apache.hadoop.hbase.util.Threads;
+063
+064/**
+065 * Thread Pool that executes the 
submitted procedures.
+066 * The executor has a ProcedureStore 
associated.
+067 * Each operation is logged and on 
restart the pending procedures are resumed.
+068 *
+069 * Unless the Procedure code throws an 
error (e.g. invalid user input)
+070 * the procedure will complete (at some 
point in time), On restart the pending
+071 * procedures are resumed and the once 
failed will be rolledback.
+072 *
+073 * The user can add procedures to the 
executor via submitProcedure(proc)
+074 * check for the finished state via 
isFinished(procId)
+075 * and get the result via 
getResult(procId)
+076 */
+077@InterfaceAudience.Private
+078@InterfaceStability.Evolving
+079public class 
ProcedureExecutorTEnvironment {
+080  private static final Log LOG = 
LogFactory.getLog(ProcedureExecutor.class);
+081
+082  public static final String 
CHECK_OWNER_SET_CONF_KEY = "hbase.procedure.check.owner.set";
+083  private static final boolean 
DEFAULT_CHECK_OWNER_SET = false;
+084
+085  public static final String 
WORKER_KEEP_ALIVE_TIME_CONF_KEY =
+086  
"hbase.procedure.worker.keep.alive.time.msec";
+087  private static final long 
DEFAULT_WORKER_KEEP_ALIVE_TIME = Long.MAX_VALUE;
+088
+089  Testing testing = null;
+090  

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0383a9c2/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/CompactSplit.Rejection.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/CompactSplit.Rejection.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/CompactSplit.Rejection.html
index 06b7a03..7dabb5e 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/CompactSplit.Rejection.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/CompactSplit.Rejection.html
@@ -216,519 +216,505 @@
 208return queueLists.toString();
 209  }
 210
-211  public synchronized void 
requestRegionsMerge(final Region a,
-212  final Region b, final boolean 
forcible, long masterSystemTime, User user) {
-213try {
-214  mergePool.execute(new 
RegionMergeRequest(a, b, this.server, forcible, masterSystemTime,user));
-215  if (LOG.isDebugEnabled()) {
-216LOG.debug("Region merge requested 
for " + a + "," + b + ", forcible="
-217+ forcible + ".  " + this);
+211  public synchronized boolean 
requestSplit(final Region r) {
+212// don't split regions that are 
blocking
+213if (shouldSplitRegion()  
((HRegion)r).getCompactPriority() = Store.PRIORITY_USER) {
+214  byte[] midKey = 
((HRegion)r).checkSplit();
+215  if (midKey != null) {
+216requestSplit(r, midKey);
+217return true;
 218  }
-219} catch (RejectedExecutionException 
ree) {
-220  LOG.warn("Could not execute merge 
for " + a + "," + b + ", forcible="
-221  + forcible, ree);
-222}
-223  }
-224
-225  public synchronized boolean 
requestSplit(final Region r) {
-226// don't split regions that are 
blocking
-227if (shouldSplitRegion()  
((HRegion)r).getCompactPriority() = Store.PRIORITY_USER) {
-228  byte[] midKey = 
((HRegion)r).checkSplit();
-229  if (midKey != null) {
-230requestSplit(r, midKey);
-231return true;
-232  }
-233}
-234return false;
-235  }
-236
-237  public synchronized void 
requestSplit(final Region r, byte[] midKey) {
-238requestSplit(r, midKey, null);
-239  }
-240
-241  /*
-242   * The User parameter allows the split 
thread to assume the correct user identity
-243   */
-244  public synchronized void 
requestSplit(final Region r, byte[] midKey, User user) {
-245if (midKey == null) {
-246  LOG.debug("Region " + 
r.getRegionInfo().getRegionNameAsString() +
-247" not splittable because 
midkey=null");
-248  if 
(((HRegion)r).shouldForceSplit()) {
-249((HRegion)r).clearSplit();
-250  }
-251  return;
-252}
-253try {
-254  this.splits.execute(new 
SplitRequest(r, midKey, this.server, user));
-255  if (LOG.isDebugEnabled()) {
-256LOG.debug("Splitting " + r + ", " 
+ this);
-257  }
-258} catch (RejectedExecutionException 
ree) {
-259  LOG.info("Could not execute split 
for " + r, ree);
-260}
-261  }
-262
-263  @Override
-264  public synchronized 
ListCompactionRequest requestCompaction(final Region r, final String 
why)
-265  throws IOException {
-266return requestCompaction(r, why, 
null);
-267  }
-268
-269  @Override
-270  public synchronized 
ListCompactionRequest requestCompaction(final Region r, final String 
why,
-271  ListPairCompactionRequest, 
Store requests) throws IOException {
-272return requestCompaction(r, why, 
Store.NO_PRIORITY, requests, null);
-273  }
-274
-275  @Override
-276  public synchronized CompactionRequest 
requestCompaction(final Region r, final Store s,
-277  final String why, CompactionRequest 
request) throws IOException {
-278return requestCompaction(r, s, why, 
Store.NO_PRIORITY, request, null);
-279  }
-280
-281  @Override
-282  public synchronized 
ListCompactionRequest requestCompaction(final Region r, final String 
why,
-283  int p, 
ListPairCompactionRequest, Store requests, User user) throws 
IOException {
-284return requestCompactionInternal(r, 
why, p, requests, true, user);
-285  }
-286
-287  private ListCompactionRequest 
requestCompactionInternal(final Region r, final String why,
-288  int p, 
ListPairCompactionRequest, Store requests, boolean selectNow, 
User user)
-289  throws IOException {
-290// not a special compaction request, 
so make our own list
-291ListCompactionRequest ret = 
null;
-292if (requests == null) {
-293  ret = selectNow ? new 
ArrayListCompactionRequest(r.getStores().size()) : null;
-294  for (Store s : r.getStores()) {
-295CompactionRequest cr = 
requestCompactionInternal(r, s, why, p, null, selectNow, user);
-296if (selectNow) ret.add(cr);
-297  }
-298} else {
-299  
Preconditions.checkArgument(selectNow); // only system requests have selectNow 
== false
-300  ret = new 
ArrayListCompactionRequest(requests.size());
-301  for 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f391bcef/devapidocs/org/apache/hadoop/hbase/ipc/HBaseRpcController.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/HBaseRpcController.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/HBaseRpcController.html
index 55068c6..443baa7 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/HBaseRpcController.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/HBaseRpcController.html
@@ -75,13 +75,13 @@ var activeTableTab = "activeTableTab";
 
 Summary:
 Nested|
-Field|
+Field|
 Constr|
 Method
 
 
 Detail:
-Field|
+Field|
 Constr|
 Method
 
@@ -141,25 +141,6 @@ extends 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController, 
 
 
-
-
-
-
-
-Field Summary
-
-Fields
-
-Modifier and Type
-Field and Description
-
-
-static int
-PRIORITY_UNSET
-
-
-
-
 
 
 
@@ -263,27 +244,6 @@ extends 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController, 
 
 
-
-
-
-
-
-Field Detail
-
-
-
-
-
-PRIORITY_UNSET
-static finalint PRIORITY_UNSET
-
-See Also:
-Constant
 Field Values
-
-
-
-
-
 
 
 
@@ -296,7 +256,7 @@ extends 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController, 
 
 setCellScanner
-voidsetCellScanner(CellScannercellScanner)
+voidsetCellScanner(CellScannercellScanner)
 Only used to send cells to rpc server, the returned cells 
should be set by
  setDone(CellScanner).
 
@@ -307,7 +267,7 @@ extends 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController, 
 
 setPriority
-voidsetPriority(intpriority)
+voidsetPriority(intpriority)
 
 Parameters:
 priority - Priority for this request; should fall roughly in 
the range
@@ -321,7 +281,7 @@ extends 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController, 
 
 setPriority
-voidsetPriority(TableNametn)
+voidsetPriority(TableNametn)
 
 Parameters:
 tn - Set priority based off the table we are going 
against.
@@ -334,7 +294,7 @@ extends 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController, 
 
 getPriority
-intgetPriority()
+intgetPriority()
 
 Returns:
 The priority of this request
@@ -347,7 +307,7 @@ extends 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController, 
 
 getCallTimeout
-intgetCallTimeout()
+intgetCallTimeout()
 
 
 
@@ -356,7 +316,7 @@ extends 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController, 
 
 setCallTimeout
-voidsetCallTimeout(intcallTimeout)
+voidsetCallTimeout(intcallTimeout)
 
 
 
@@ -365,7 +325,7 @@ extends 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController, 
 
 hasCallTimeout
-booleanhasCallTimeout()
+booleanhasCallTimeout()
 
 
 
@@ -374,7 +334,7 @@ extends 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController, 
 
 setFailed
-voidsetFailed(http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOExceptione)
+voidsetFailed(http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOExceptione)
 Set failed with an exception to pass on. For use in async 
rpc clients
 
 Parameters:
@@ -388,7 +348,7 @@ extends 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController, 
 
 getFailed
-http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOExceptiongetFailed()
+http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOExceptiongetFailed()
 Return the failed exception, null if not failed.
 
 
@@ -398,7 +358,7 @@ extends 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController, 
 
 setDone
-voidsetDone(CellScannercellScanner)
+voidsetDone(CellScannercellScanner)
 IMPORTANT: always call this method if the call 
finished without any exception to tell
  the HBaseRpcController that we are done.
 
@@ -409,7 +369,7 @@ extends 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController, 
 
 notifyOnCancel
-voidnotifyOnCancel(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallbackhttp://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectcallback)
+voidnotifyOnCancel(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallbackhttp://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectcallback)
 A little different from the basic RpcController:
  
  You can register multiple callbacks to an 
HBaseRpcController.
@@ -428,7 +388,7 @@ extends 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController, 
 
 notifyOnCancel
-voidnotifyOnCancel(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallbackhttp://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectcallback,

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/KeyValueUtil.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValueUtil.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValueUtil.html
index 4bd895c..e9cc458 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValueUtil.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValueUtil.html
@@ -45,8 +45,8 @@
 037import org.apache.hadoop.io.IOUtils;
 038import 
org.apache.hadoop.io.WritableUtils;
 039
-040import com.google.common.base.Function;
-041import com.google.common.collect.Lists;
+040import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Function;
+041import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 042
 043/**
 044 * static convenience methods for dealing 
with KeyValues and collections of KeyValues

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html
index f0a063c..93cc8f3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html
@@ -75,7 +75,7 @@
 067import 
org.apache.hadoop.hbase.util.Pair;
 068import 
org.apache.hadoop.hbase.util.PairOfSameType;
 069
-070import 
com.google.common.annotations.VisibleForTesting;
+070import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 071
 072import 
edu.umd.cs.findbugs.annotations.NonNull;
 073import 
edu.umd.cs.findbugs.annotations.Nullable;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html
index f0a063c..93cc8f3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html
@@ -75,7 +75,7 @@
 067import 
org.apache.hadoop.hbase.util.Pair;
 068import 
org.apache.hadoop.hbase.util.PairOfSameType;
 069
-070import 
com.google.common.annotations.VisibleForTesting;
+070import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 071
 072import 
edu.umd.cs.findbugs.annotations.NonNull;
 073import 
edu.umd.cs.findbugs.annotations.Nullable;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html
index f0a063c..93cc8f3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html
@@ -75,7 +75,7 @@
 067import 
org.apache.hadoop.hbase.util.Pair;
 068import 
org.apache.hadoop.hbase.util.PairOfSameType;
 069
-070import 
com.google.common.annotations.VisibleForTesting;
+070import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 071
 072import 
edu.umd.cs.findbugs.annotations.NonNull;
 073import 
edu.umd.cs.findbugs.annotations.Nullable;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html
index f0a063c..93cc8f3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html
@@ -75,7 +75,7 @@
 067import 
org.apache.hadoop.hbase.util.Pair;
 068import 
org.apache.hadoop.hbase.util.PairOfSameType;
 069
-070import 
com.google.common.annotations.VisibleForTesting;
+070import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 071
 072import 
edu.umd.cs.findbugs.annotations.NonNull;
 073import 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-19 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9eba7fcf/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.Monitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.Monitor.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.Monitor.html
index e1fbce4..873e17f 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.Monitor.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.Monitor.html
@@ -1089,497 +1089,498 @@
 1081}
 1082  }
 1083  MapString, AtomicLong 
actualReadTableLatency = regionSink.getReadLatencyMap();
-1084  for (String tableName : 
this.configuredReadTableTimeouts.keySet()) {
-1085if 
(actualReadTableLatency.containsKey(tableName)) {
-1086  Long actual = 
actualReadTableLatency.get(tableName).longValue();
-1087  Long configured = 
this.configuredReadTableTimeouts.get(tableName);
-1088  LOG.info("Read operation 
for " + tableName + " took " + actual +
-1089" ms. The configured 
read timeout was " + configured + " ms.");
-1090  if (actual  
configured) {
-1091LOG.error("Read 
operation for " + tableName + " exceeded the configured read timeout.");
-1092  }
-1093} else {
-1094  LOG.error("Read operation 
for " + tableName + " failed!");
-1095}
-1096  }
-1097  if (this.writeSniffing) {
-1098String writeTableStringName 
= this.writeTableName.getNameAsString();
-1099long actualWriteLatency = 
regionSink.getWriteLatency().longValue();
-1100LOG.info("Write operation 
for " + writeTableStringName + " took " + actualWriteLatency + " ms. The 
configured write timeout was " +
-1101  
this.configuredWriteTableTimeout + " ms.");
-1102// Check that the writeTable 
write operation latency does not exceed the configured timeout.
-1103if (actualWriteLatency  
this.configuredWriteTableTimeout) {
-1104  LOG.error("Write operation 
for " + writeTableStringName + " exceeded the configured write timeout.");
-1105}
-1106  }
-1107} catch (Exception e) {
-1108  LOG.error("Run regionMonitor 
failed", e);
-1109  this.errorCode = 
ERROR_EXIT_CODE;
-1110}
-  }
-1112  this.done = true;
-1113}
-1114
-1115private String[] 
generateMonitorTables(String[] monitorTargets) throws IOException {
-1116  String[] returnTables = null;
-1117
-1118  if (this.useRegExp) {
-1119Pattern pattern = null;
-1120HTableDescriptor[] tds = null;
-1121SetString tmpTables = 
new TreeSet();
-1122try {
-1123  if (LOG.isDebugEnabled()) {
-1124
LOG.debug(String.format("reading list of tables"));
-1125  }
-1126  tds = 
this.admin.listTables(pattern);
-1127  if (tds == null) {
-1128tds = new 
HTableDescriptor[0];
-1129  }
-1130  for (String monitorTarget : 
monitorTargets) {
-1131pattern = 
Pattern.compile(monitorTarget);
-1132for (HTableDescriptor td : 
tds) {
-1133  if 
(pattern.matcher(td.getNameAsString()).matches()) {
-1134
tmpTables.add(td.getNameAsString());
-1135  }
-1136}
-1137  }
-1138} catch (IOException e) {
-1139  LOG.error("Communicate with 
admin failed", e);
-1140  throw e;
-1141}
-1142
-1143if (tmpTables.size()  0) {
-1144  returnTables = 
tmpTables.toArray(new String[tmpTables.size()]);
-1145} else {
-1146  String msg = "No HTable found, 
tablePattern:" + Arrays.toString(monitorTargets);
-1147  LOG.error(msg);
-1148  this.errorCode = 
INIT_ERROR_EXIT_CODE;
-1149  throw new 
TableNotFoundException(msg);
-1150}
-1151  } else {
-1152returnTables = monitorTargets;
-1153  }
-1154
-1155  return returnTables;
-1156}
-1157
-1158/*
-1159 * canary entry point to monitor all 
the tables.
-1160 */
-1161private 
ListFutureVoid sniff(TaskType taskType, RegionStdOutSink 
regionSink) throws Exception {
-1162  if (LOG.isDebugEnabled()) {
-1163LOG.debug(String.format("reading 
list of tables"));
-1164  }
-1165  ListFutureVoid 
taskFutures = new LinkedList();
-1166  for (HTableDescriptor table : 
admin.listTables()) {
-1167if 
(admin.isTableEnabled(table.getTableName())
-1168 
(!table.getTableName().equals(writeTableName))) {
-1169  AtomicLong readLatency = 
regionSink.initializeAndGetReadLatencyForTable(table.getNameAsString());
-1170  
taskFutures.addAll(Canary.sniff(admin, sink, table, executor, taskType, 
this.rawScanEnabled, readLatency));
-1171}
-1172  }
-1173  

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-15 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/17128d27/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html
index feb42ea..4bd98f4 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html
@@ -185,4189 +185,4266 @@
 177import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest;
 178import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest;
 179import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse;
-180import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest;
-181import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest;
-182import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse;
-183import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest;
-184import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
-185import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos;
-186import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse;
-187import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
-188import 
org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
-189import 
org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
-190import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-191import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-192import 
org.apache.hadoop.hbase.snapshot.UnknownSnapshotException;
-193import 
org.apache.hadoop.hbase.util.Addressing;
-194import 
org.apache.hadoop.hbase.util.Bytes;
-195import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-196import 
org.apache.hadoop.hbase.util.ForeignExceptionUtil;
-197import 
org.apache.hadoop.hbase.util.Pair;
-198import 
org.apache.hadoop.hbase.zookeeper.MasterAddressTracker;
-199import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-200import 
org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
-201import 
org.apache.hadoop.ipc.RemoteException;
-202import 
org.apache.hadoop.util.StringUtils;
-203import 
org.apache.zookeeper.KeeperException;
-204
-205import 
com.google.common.annotations.VisibleForTesting;
-206import com.google.protobuf.Descriptors;
-207import com.google.protobuf.Message;
-208import 
com.google.protobuf.RpcController;
-209import java.util.stream.Collectors;
-210
-211/**
-212 * HBaseAdmin is no longer a client API. 
It is marked InterfaceAudience.Private indicating that
-213 * this is an HBase-internal class as 
defined in
-214 * 
https://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-common/InterfaceClassification.html
-215 * There are no guarantees for backwards 
source / binary compatibility and methods or class can
-216 * change or go away without 
deprecation.
-217 * Use {@link Connection#getAdmin()} to 
obtain an instance of {@link Admin} instead of constructing
-218 * an HBaseAdmin directly.
-219 *
-220 * pConnection should be an 
iunmanaged/i connection obtained via
-221 * {@link 
ConnectionFactory#createConnection(Configuration)}
-222 *
-223 * @see ConnectionFactory
-224 * @see Connection
-225 * @see Admin
-226 */
-227@InterfaceAudience.Private
-228@InterfaceStability.Evolving
-229public class HBaseAdmin implements Admin 
{
-230  private static final Log LOG = 
LogFactory.getLog(HBaseAdmin.class);
-231
-232  private static final String 
ZK_IDENTIFIER_PREFIX =  "hbase-admin-on-";
+180import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionRequest;
+181import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionResponse;
+182import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest;
+183import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest;
+184import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse;
+185import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest;
+186import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
+187import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos;
+188import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse;
+189import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
+190import 
org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
+191import 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2777c693/testdevapidocs/org/apache/hadoop/hbase/regionserver/class-use/TestStore.MyScannerHook.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/regionserver/class-use/TestStore.MyScannerHook.html
 
b/testdevapidocs/org/apache/hadoop/hbase/regionserver/class-use/TestStore.MyScannerHook.html
deleted file mode 100644
index 372b21f..000
--- 
a/testdevapidocs/org/apache/hadoop/hbase/regionserver/class-use/TestStore.MyScannerHook.html
+++ /dev/null
@@ -1,212 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-
-Uses of Interface 
org.apache.hadoop.hbase.regionserver.TestStore.MyScannerHook (Apache HBase 
3.0.0-SNAPSHOT Test API)
-
-
-
-
-
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-
-
-
-Uses of 
Interfaceorg.apache.hadoop.hbase.regionserver.TestStore.MyScannerHook
-
-
-
-
-
-Packages that use TestStore.MyScannerHook
-
-Package
-Description
-
-
-
-org.apache.hadoop.hbase.regionserver
-
-
-
-
-
-
-
-
-
-
-Uses of TestStore.MyScannerHook in org.apache.hadoop.hbase.regionserver
-
-Fields in org.apache.hadoop.hbase.regionserver
 declared as TestStore.MyScannerHook
-
-Modifier and Type
-Field and Description
-
-
-
-private TestStore.MyScannerHook
-TestStore.MyStore.hook
-
-
-
-
-Methods in org.apache.hadoop.hbase.regionserver
 with parameters of type TestStore.MyScannerHook
-
-Modifier and Type
-Method and Description
-
-
-
-private 
org.apache.hadoop.hbase.regionserver.Store
-TestStore.init(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringmethodName,
-org.apache.hadoop.conf.Configurationconf,
-org.apache.hadoop.hbase.HTableDescriptorhtd,
-org.apache.hadoop.hbase.HColumnDescriptorhcd,
-TestStore.MyScannerHookhook)
-
-
-private 
org.apache.hadoop.hbase.regionserver.Store
-TestStore.init(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringmethodName,
-org.apache.hadoop.conf.Configurationconf,
-org.apache.hadoop.hbase.HTableDescriptorhtd,
-org.apache.hadoop.hbase.HColumnDescriptorhcd,
-TestStore.MyScannerHookhook,
-booleanswitchToPread)
-
-
-private TestStore.MyStore
-TestStore.initMyStore(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringmethodName,
-   org.apache.hadoop.conf.Configurationconf,
-   TestStore.MyScannerHookhook)
-
-
-
-
-Constructors in org.apache.hadoop.hbase.regionserver
 with parameters of type TestStore.MyScannerHook
-
-Constructor and Description
-
-
-
-MyStore(org.apache.hadoop.hbase.regionserver.HRegionregion,
-   org.apache.hadoop.hbase.HColumnDescriptorfamily,
-   org.apache.hadoop.conf.ConfigurationconfParam,
-   TestStore.MyScannerHookhook,
-   booleanswitchToPread)
-
-
-
-
-
-
-
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-
-
-Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
-
-

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2777c693/testdevapidocs/org/apache/hadoop/hbase/regionserver/class-use/TestStore.MyStore.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/regionserver/class-use/TestStore.MyStore.html
 
b/testdevapidocs/org/apache/hadoop/hbase/regionserver/class-use/TestStore.MyStore.html
index ef53af7..a266cd7 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/regionserver/class-use/TestStore.MyStore.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/regionserver/class-use/TestStore.MyStore.html
@@ -104,9 +104,9 @@
 
 
 private TestStore.MyStore
-TestStore.initMyStore(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringmethodName,

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableOperator.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableOperator.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableOperator.html
index 75db22d..99a09f9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableOperator.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableOperator.html
@@ -37,2710 +37,2816 @@
 029import java.util.List;
 030import java.util.Map;
 031import java.util.Optional;
-032import 
java.util.concurrent.CompletableFuture;
-033import java.util.concurrent.TimeUnit;
-034import 
java.util.concurrent.atomic.AtomicReference;
-035import java.util.function.BiConsumer;
-036import java.util.regex.Pattern;
-037import java.util.stream.Collectors;
-038
-039import 
com.google.common.annotations.VisibleForTesting;
-040
-041import io.netty.util.Timeout;
-042import io.netty.util.TimerTask;
-043
-044import java.util.stream.Stream;
-045
-046import org.apache.commons.io.IOUtils;
-047import org.apache.commons.logging.Log;
-048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.hadoop.hbase.ClusterStatus;
-050import 
org.apache.hadoop.hbase.HRegionInfo;
-051import 
org.apache.hadoop.hbase.HRegionLocation;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor;
-053import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-054import 
org.apache.hadoop.hbase.NotServingRegionException;
-055import 
org.apache.hadoop.hbase.ProcedureInfo;
-056import 
org.apache.hadoop.hbase.RegionLoad;
-057import 
org.apache.hadoop.hbase.RegionLocations;
-058import 
org.apache.hadoop.hbase.ServerName;
-059import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.TableExistsException;
-062import 
org.apache.hadoop.hbase.TableName;
-063import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-064import 
org.apache.hadoop.hbase.TableNotDisabledException;
-065import 
org.apache.hadoop.hbase.TableNotEnabledException;
-066import 
org.apache.hadoop.hbase.TableNotFoundException;
-067import 
org.apache.hadoop.hbase.UnknownRegionException;
-068import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-069import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-070import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-071import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-072import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-073import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-074import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-075import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-076import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-077import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-078import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-079import 
org.apache.hadoop.hbase.replication.ReplicationException;
-080import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-081import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-082import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-099import 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
index 71844ce..75db22d 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
@@ -105,2564 +105,2642 @@
 097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
 098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
 099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-105import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-106import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;
-140import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse;
-141import 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2d27954a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
index 16c0042..71844ce 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
@@ -126,2499 +126,2543 @@
 118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
 119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
 120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest;
-140import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse;
-141import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest;
-142import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse;
-143import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest;
-144import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse;
-145import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest;
-146import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse;
-147import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest;
-148import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse;
-149import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledRequest;
-150import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledResponse;
-151import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest;
-152import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse;
-153import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest;
-154import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse;
-155import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest;
-156import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse;
-157import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest;
-158import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse;
-159import 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
index 8278dde..da29251 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":18,"i6":6,"i7":6,"i8":6,"i9":18,"i10":6,"i11":18,"i12":6,"i13":6,"i14":6,"i15":18,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":18,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":18,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":18,"i43":6,"i44":6,"i45":6,"i46":18,"i47":6,"i48":18,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":18,"i57":6,"i58":6,"i59":6,"i60":6,"i61":6,"i62":6,"i63":18,"i64":6,"i65":18,"i66":6,"i67":18,"i68":6,"i69":18,"i70":6,"i71":6,"i72":18,"i73":6,"i74":18,"i75":6,"i76":6,"i77":6,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":6,"i86":6,"i87":6,"i88":6,"i89":18,"i90":18,"i91":6,"i92":6,"i93":18,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":18,"i6":6,"i7":6,"i8":6,"i9":18,"i10":6,"i11":18,"i12":6,"i13":6,"i14":6,"i15":18,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":18,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":18,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":18,"i43":6,"i44":6,"i45":6,"i46":18,"i47":6,"i48":18,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":18,"i60":6,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":18,"i67":6,"i68":18,"i69":6,"i70":18,"i71":6,"i72":18,"i73":6,"i74":6,"i75":18,"i76":6,"i77":18,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":6,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":18,"i99":18,"i100":6,"i101":6,"i102":18,"i103":6,"i104":6,"i105":6,"i106":6,"i107":6};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],16:["t5","Default Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -460,11 +460,29 @@ public interface 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+isCatalogJanitorOn()
+Query on the catalog janitor state.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+isCleanerChoreOn()
+Query the current state of the cleaner chore.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
 isMasterInMaintenanceMode()
 Check whether master is in maintenance mode
 
 
-
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+isNormalizerOn()
+Query the current state of the region normalizer
+
+
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
 isProcedureFinished(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringsignature,
http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringinstance,
@@ -472,17 +490,17 @@ public interface Check the current state of the specified procedure.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b3b50f22/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html
index d262744..7464ef7 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html
@@ -894,2603 +894,2617 @@
 886// Set master as 'initialized'.
 887setInitialized(true);
 888
-889status.setStatus("Assign meta 
replicas");
-890metaBootstrap.assignMetaReplicas();
-891
-892status.setStatus("Starting quota 
manager");
-893initQuotaManager();
-894if (QuotaUtil.isQuotaEnabled(conf)) 
{
-895  // Create the quota snapshot 
notifier
-896  spaceQuotaSnapshotNotifier = 
createQuotaSnapshotNotifier();
-897  
spaceQuotaSnapshotNotifier.initialize(getClusterConnection());
-898  this.quotaObserverChore = new 
QuotaObserverChore(this, getMasterMetrics());
-899  // Start the chore to read the 
region FS space reports and act on them
-900  
getChoreService().scheduleChore(quotaObserverChore);
-901
-902  this.snapshotQuotaChore = new 
SnapshotQuotaObserverChore(this, getMasterMetrics());
-903  // Start the chore to read 
snapshots and add their usage to table/NS quotas
-904  
getChoreService().scheduleChore(snapshotQuotaChore);
-905}
-906
-907// clear the dead servers with same 
host name and port of online server because we are not
-908// removing dead server with same 
hostname and port of rs which is trying to check in before
-909// master initialization. See 
HBASE-5916.
-910
this.serverManager.clearDeadServersWithSameHostNameAndPortOfOnlineServer();
-911
-912// Check and set the znode ACLs if 
needed in case we are overtaking a non-secure configuration
-913status.setStatus("Checking ZNode 
ACLs");
-914zooKeeper.checkAndSetZNodeAcls();
-915
-916status.setStatus("Initializing MOB 
Cleaner");
-917initMobCleaner();
-918
-919status.setStatus("Calling 
postStartMaster coprocessors");
-920if (this.cpHost != null) {
-921  // don't let cp initialization 
errors kill the master
-922  try {
-923this.cpHost.postStartMaster();
-924  } catch (IOException ioe) {
-925LOG.error("Coprocessor 
postStartMaster() hook failed", ioe);
-926  }
-927}
-928
-929zombieDetector.interrupt();
-930  }
-931
-932  /**
-933   * Adds the {@code 
MasterSpaceQuotaObserver} to the list of configured Master observers to
-934   * automatically remove space quotas 
for a table when that table is deleted.
-935   */
-936  @VisibleForTesting
-937  public void 
updateConfigurationForSpaceQuotaObserver(Configuration conf) {
-938// We're configured to not delete 
quotas on table deletion, so we don't need to add the obs.
-939if (!conf.getBoolean(
-940  
MasterSpaceQuotaObserver.REMOVE_QUOTA_ON_TABLE_DELETE,
-941  
MasterSpaceQuotaObserver.REMOVE_QUOTA_ON_TABLE_DELETE_DEFAULT)) {
-942  return;
-943}
-944String[] masterCoprocs = 
conf.getStrings(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY);
-945final int length = null == 
masterCoprocs ? 0 : masterCoprocs.length;
-946String[] updatedCoprocs = new 
String[length + 1];
-947if (length  0) {
-948  System.arraycopy(masterCoprocs, 0, 
updatedCoprocs, 0, masterCoprocs.length);
-949}
-950updatedCoprocs[length] = 
MasterSpaceQuotaObserver.class.getName();
-951
conf.setStrings(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, updatedCoprocs);
-952  }
-953
-954  private void initMobCleaner() {
-955this.expiredMobFileCleanerChore = new 
ExpiredMobFileCleanerChore(this);
-956
getChoreService().scheduleChore(expiredMobFileCleanerChore);
-957
-958int mobCompactionPeriod = 
conf.getInt(MobConstants.MOB_COMPACTION_CHORE_PERIOD,
-959
MobConstants.DEFAULT_MOB_COMPACTION_CHORE_PERIOD);
-960if (mobCompactionPeriod  0) {
-961  this.mobCompactChore = new 
MobCompactionChore(this, mobCompactionPeriod);
-962  
getChoreService().scheduleChore(mobCompactChore);
-963} else {
-964  LOG
-965.info("The period is " + 
mobCompactionPeriod + " seconds, MobCompactionChore is disabled");
-966}
-967this.mobCompactThread = new 
MasterMobCompactionThread(this);
-968  }
-969
-970  /**
-971   * Create a {@link MasterMetaBootstrap} 
instance.
-972   */
-973  MasterMetaBootstrap 
createMetaBootstrap(final HMaster master, final MonitoredTask status) {
-974// We put this out here in a method 
so can do a Mockito.spy and stub it out
-975// w/ a mocked up 
MasterMetaBootstrap.
-976return new 
MasterMetaBootstrap(master, status);
-977  }
-978
-979  /**
-980   * Create a {@link ServerManager} 
instance.
-981   */
-982  ServerManager createServerManager(final 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca9f6925/devapidocs/org/apache/hadoop/hbase/client/class-use/RawAsyncHBaseAdmin.Converter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RawAsyncHBaseAdmin.Converter.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RawAsyncHBaseAdmin.Converter.html
new file mode 100644
index 000..ede810d
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RawAsyncHBaseAdmin.Converter.html
@@ -0,0 +1,186 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+Uses of Interface 
org.apache.hadoop.hbase.client.RawAsyncHBaseAdmin.Converter (Apache HBase 
3.0.0-SNAPSHOT API)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+
+
+
+Uses 
of Interfaceorg.apache.hadoop.hbase.client.RawAsyncHBaseAdmin.Converter
+
+
+
+
+
+Packages that use RawAsyncHBaseAdmin.Converter
+
+Package
+Description
+
+
+
+org.apache.hadoop.hbase.client
+
+Provides HBase Client
+
+
+
+
+
+
+
+
+
+
+Uses of RawAsyncHBaseAdmin.Converter in org.apache.hadoop.hbase.client
+
+Methods in org.apache.hadoop.hbase.client
 with parameters of type RawAsyncHBaseAdmin.Converter
+
+Modifier and Type
+Method and Description
+
+
+
+private PREQ,PRESP,RESPhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 
java.util.concurrent">CompletableFutureRESP
+RawAsyncHBaseAdmin.adminCall(HBaseRpcControllercontroller,
+ 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.Interfacestub,
+ PREQpreq,
+ RawAsyncHBaseAdmin.AdminRpcCallPRESP,PREQrpcCall,
+ RawAsyncHBaseAdmin.ConverterRESP,PRESPrespConverter)
+
+
+private PREQ,PRESP,RESPhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in 
java.util.concurrent">CompletableFutureRESP
+RawAsyncHBaseAdmin.call(HBaseRpcControllercontroller,
+
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.Interfacestub,
+PREQpreq,
+RawAsyncHBaseAdmin.MasterRpcCallPRESP,PREQrpcCall,
+RawAsyncHBaseAdmin.ConverterRESP,PRESPrespConverter)
+
+
+private PREQ,PRESPhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+RawAsyncHBaseAdmin.procedureCall(PREQpreq,
+ RawAsyncHBaseAdmin.MasterRpcCallPRESP,PREQrpcCall,
+ RawAsyncHBaseAdmin.Converterhttp://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long,PRESPrespConverter,
+ RawAsyncHBaseAdmin.ProcedureBiConsumerconsumer)
+
+
+
+
+
+
+
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+
+
+Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca9f6925/devapidocs/org/apache/hadoop/hbase/client/class-use/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
new file mode 100644
index 000..08ebaa3
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
@@ -0,0 +1,125 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+Uses of Class 
org.apache.hadoop.hbase.client.RawAsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer
 (Apache HBase 3.0.0-SNAPSHOT API)
+
+
+

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-26 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aecb1286/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AdminRpcCall.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AdminRpcCall.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AdminRpcCall.html
index 01496d6..dc12c09 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AdminRpcCall.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AdminRpcCall.html
@@ -48,2406 +48,2267 @@
 040
 041import io.netty.util.Timeout;
 042import io.netty.util.TimerTask;
-043import java.util.stream.Stream;
-044import org.apache.commons.io.IOUtils;
-045import org.apache.commons.logging.Log;
-046import 
org.apache.commons.logging.LogFactory;
-047import 
org.apache.hadoop.hbase.HRegionInfo;
-048import 
org.apache.hadoop.hbase.HRegionLocation;
-049import 
org.apache.hadoop.hbase.MetaTableAccessor;
-050import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-051import 
org.apache.hadoop.hbase.NotServingRegionException;
-052import 
org.apache.hadoop.hbase.ProcedureInfo;
-053import 
org.apache.hadoop.hbase.RegionLocations;
-054import 
org.apache.hadoop.hbase.ServerName;
-055import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-056import 
org.apache.hadoop.hbase.HConstants;
-057import 
org.apache.hadoop.hbase.TableExistsException;
-058import 
org.apache.hadoop.hbase.TableName;
-059import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-060import 
org.apache.hadoop.hbase.TableNotDisabledException;
-061import 
org.apache.hadoop.hbase.TableNotEnabledException;
-062import 
org.apache.hadoop.hbase.TableNotFoundException;
-063import 
org.apache.hadoop.hbase.UnknownRegionException;
-064import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-065import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-066import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-067import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-068import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-069import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-070import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-071import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-072import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-073import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-074import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-075import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-076import 
org.apache.hadoop.hbase.replication.ReplicationException;
-077import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-078import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-079import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-101import 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a719cd00/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.ReadRequestCostFunction.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.ReadRequestCostFunction.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.ReadRequestCostFunction.html
index 6de986f..c895448 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.ReadRequestCostFunction.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.ReadRequestCostFunction.html
@@ -26,1592 +26,1693 @@
 018package 
org.apache.hadoop.hbase.master.balancer;
 019
 020import java.util.ArrayDeque;
-021import java.util.Arrays;
-022import java.util.Collection;
-023import java.util.Deque;
-024import java.util.HashMap;
-025import java.util.LinkedList;
-026import java.util.List;
-027import java.util.Map;
-028import java.util.Map.Entry;
-029import java.util.Random;
-030
-031import org.apache.commons.logging.Log;
-032import 
org.apache.commons.logging.LogFactory;
-033import 
org.apache.hadoop.conf.Configuration;
-034import 
org.apache.hadoop.hbase.ClusterStatus;
-035import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-036import 
org.apache.hadoop.hbase.HConstants;
-037import 
org.apache.hadoop.hbase.HRegionInfo;
-038import 
org.apache.hadoop.hbase.RegionLoad;
-039import 
org.apache.hadoop.hbase.ServerLoad;
-040import 
org.apache.hadoop.hbase.ServerName;
-041import 
org.apache.hadoop.hbase.TableName;
-042import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-043import 
org.apache.hadoop.hbase.master.MasterServices;
-044import 
org.apache.hadoop.hbase.master.RegionPlan;
-045import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action;
-046import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.Type;
-047import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.AssignRegionAction;
-048import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.MoveRegionAction;
-049import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.SwapRegionsAction;
-050import 
org.apache.hadoop.hbase.util.Bytes;
-051import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-052
-053import com.google.common.collect.Lists;
-054
-055/**
-056 * pThis is a best effort load 
balancer. Given a Cost function F(C) =gt; x It will
-057 * randomly try and mutate the cluster to 
Cprime. If F(Cprime) lt; F(C) then the
-058 * new cluster state becomes the plan. It 
includes costs functions to compute the cost of:/p
-059 * ul
-060 * liRegion Load/li
-061 * liTable Load/li
-062 * liData Locality/li
-063 * liMemstore Sizes/li
-064 * liStorefile Sizes/li
-065 * /ul
-066 *
-067 *
-068 * pEvery cost function returns a 
number between 0 and 1 inclusive; where 0 is the lowest cost
-069 * best solution, and 1 is the highest 
possible cost and the worst solution.  The computed costs are
-070 * scaled by their respective 
multipliers:/p
+021import java.util.ArrayList;
+022import java.util.Arrays;
+023import java.util.Collection;
+024import java.util.Collections;
+025import java.util.Deque;
+026import java.util.HashMap;
+027import java.util.LinkedList;
+028import java.util.List;
+029import java.util.Map;
+030import java.util.Map.Entry;
+031import java.util.Random;
+032
+033import org.apache.commons.logging.Log;
+034import 
org.apache.commons.logging.LogFactory;
+035import 
org.apache.hadoop.conf.Configuration;
+036import 
org.apache.hadoop.hbase.ClusterStatus;
+037import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
+038import 
org.apache.hadoop.hbase.HConstants;
+039import 
org.apache.hadoop.hbase.HRegionInfo;
+040import 
org.apache.hadoop.hbase.RegionLoad;
+041import 
org.apache.hadoop.hbase.ServerLoad;
+042import 
org.apache.hadoop.hbase.ServerName;
+043import 
org.apache.hadoop.hbase.TableName;
+044import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+045import 
org.apache.hadoop.hbase.master.MasterServices;
+046import 
org.apache.hadoop.hbase.master.RegionPlan;
+047import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action;
+048import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.Type;
+049import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.AssignRegionAction;
+050import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.LocalityType;
+051import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.MoveRegionAction;
+052import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.SwapRegionsAction;
+053import 
org.apache.hadoop.hbase.util.Bytes;
+054import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+055
+056import com.google.common.base.Optional;
+057import 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/476c54ed/devapidocs/src-html/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.html
new file mode 100644
index 000..b7ca7d9
--- /dev/null
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.html
@@ -0,0 +1,615 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/*
+002 * Licensed to the Apache Software 
Foundation (ASF) under one or more
+003 * contributor license agreements.  See 
the NOTICE file distributed with
+004 * this work for additional information 
regarding copyright ownership.
+005 * The ASF licenses this file to you 
under the Apache License, Version 2.0
+006 * (the "License"); you may not use this 
file except in compliance with
+007 * the License.  You may obtain a copy of 
the License at
+008 *
+009 * 
http://www.apache.org/licenses/LICENSE-2.0
+010 *
+011 * Unless required by applicable law or 
agreed to in writing, software
+012 * distributed under the License is 
distributed on an "AS IS" BASIS,
+013 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+014 * See the License for the specific 
language governing permissions and
+015 * limitations under the License.
+016 */
+017package org.apache.hadoop.hbase.quotas;
+018
+019import java.io.IOException;
+020import java.util.ArrayList;
+021import java.util.Arrays;
+022import java.util.Collection;
+023import java.util.Collections;
+024import java.util.HashSet;
+025import java.util.List;
+026import java.util.Map;
+027import java.util.Map.Entry;
+028import java.util.Objects;
+029import java.util.Set;
+030import java.util.concurrent.TimeUnit;
+031import java.util.function.Predicate;
+032import java.util.stream.Collectors;
+033
+034import 
org.apache.commons.lang.builder.HashCodeBuilder;
+035import org.apache.commons.logging.Log;
+036import 
org.apache.commons.logging.LogFactory;
+037import 
org.apache.hadoop.conf.Configuration;
+038import org.apache.hadoop.fs.FileStatus;
+039import org.apache.hadoop.fs.FileSystem;
+040import org.apache.hadoop.fs.Path;
+041import 
org.apache.hadoop.hbase.HRegionInfo;
+042import 
org.apache.hadoop.hbase.ScheduledChore;
+043import 
org.apache.hadoop.hbase.Stoppable;
+044import 
org.apache.hadoop.hbase.TableName;
+045import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+046import 
org.apache.hadoop.hbase.client.Admin;
+047import 
org.apache.hadoop.hbase.client.Connection;
+048import 
org.apache.hadoop.hbase.client.Table;
+049import 
org.apache.hadoop.hbase.master.HMaster;
+050import 
org.apache.hadoop.hbase.master.MetricsMaster;
+051import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
+052import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
+053import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles;
+054import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile;
+055import 
org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
+056import 
org.apache.hadoop.hbase.snapshot.SnapshotManifest;
+057import 
org.apache.hadoop.hbase.util.FSUtils;
+058import 
org.apache.hadoop.hbase.util.HFileArchiveUtil;
+059import 
org.apache.hadoop.util.StringUtils;
+060
+061import 
com.google.common.collect.HashMultimap;
+062import 
com.google.common.collect.Multimap;
+063
+064/**
+065 * A Master-invoked {@code Chore} that 
computes the size of each snapshot which was created from
+066 * a table which has a space quota.
+067 */
+068@InterfaceAudience.Private
+069public class SnapshotQuotaObserverChore 
extends ScheduledChore {
+070  private static final Log LOG = 
LogFactory.getLog(SnapshotQuotaObserverChore.class);
+071  static final String 
SNAPSHOT_QUOTA_CHORE_PERIOD_KEY =
+072  
"hbase.master.quotas.snapshot.chore.period";
+073  static final int 
SNAPSHOT_QUOTA_CHORE_PERIOD_DEFAULT = 1000 * 60 * 5; // 5 minutes in millis
+074
+075  static final String 
SNAPSHOT_QUOTA_CHORE_DELAY_KEY =
+076  
"hbase.master.quotas.snapshot.chore.delay";
+077  static final long 
SNAPSHOT_QUOTA_CHORE_DELAY_DEFAULT = 1000L * 60L; // 1 minute in millis
+078
+079  static final String 
SNAPSHOT_QUOTA_CHORE_TIMEUNIT_KEY =
+080  
"hbase.master.quotas.snapshot.chore.timeunit";
+081  static final String 
SNAPSHOT_QUOTA_CHORE_TIMEUNIT_DEFAULT = TimeUnit.MILLISECONDS.name();
+082
+083  private final Connection conn;
+084  private final Configuration conf;
+085  private final MetricsMaster metrics;
+086  private final FileSystem fs;
+087
+088  public 
SnapshotQuotaObserverChore(HMaster master, MetricsMaster metrics) {
+089this(
+090

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/77a552c4/devapidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html
index fbfd9d2..787145a 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html
@@ -18,6 +18,12 @@
 catch(err) {
 }
 //-->
+var methods = {"i0":42,"i1":42};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
 
 
 JavaScript is disabled on your browser.
@@ -43,8 +49,8 @@
 
 
 
-PrevClass
-NextClass
+PrevClass
+NextClass
 
 
 Frames
@@ -68,16 +74,16 @@
 
 
 Summary:
-Nested|
+Nested|
 Field|
 Constr|
-Method
+Method
 
 
 Detail:
 Field|
 Constr|
-Method
+Method
 
 
 
@@ -124,27 +130,6 @@ extends 
 
 
-
-
-
-
-
-Nested Class Summary
-
-Nested Classes
-
-Modifier and Type
-Class and Description
-
-
-private static class
-ImmutableHTableDescriptor.UnmodifyableTableDescriptor
-Deprecated.
-
-
-
-
-
 
 
 
@@ -156,7 +141,14 @@ extends 
 
 Fields inherited from classorg.apache.hadoop.hbase.HTableDescriptor
-COMPACTION_ENABLED,
 DEFAULT_COMPACTION_ENABLED,
 DEFAULT_MEMSTORE_FLUSH_SIZE,
 DEFAULT_NORMALIZATION_ENABLED,
 DEFAULT_READONLY,
 DEFAULT_REGION_MEMSTORE_REPLICATION,
 DEFAULT_REGION_REPLICATION,
 DURABILIT
 Y, FLUSH_POLICY,
 IS_META,
 IS_ROOT,
 MAX_FILESIZE,
 MEMSTORE_FLUSHSIZE,
 NAMESPACE_COL_DESC_BYTES,
 NAMESPACE_FAMILY_INFO,
 NAMESPACE_FAMILY_INFO_BYTES,
 NAMESPACE_TABLEDESC
 , NORMALIZATION_ENABLED,
 OWNER,
 OWNER_KEY,
 PRIORITY,
 READONLY,
 REGION_MEMSTORE_REPLICATION,
 REGION_REPLICATION,
 SPLIT_POLICY
+COMPACTION_ENABLED,
 DEFAULT_COMPACTION_ENABLED,
 DEFAULT_MEMSTORE_FLUSH_SIZE,
 DEFAULT_NORMALIZATION_ENABLED,
 DEFAULT_READONLY,
 DEFAULT_REGION_MEMSTORE_REPLICATION,
 DEFAULT_REGION_REPLICATION,
 delegatee<
 /a>, DURABILITY,
 FLUSH_POLICY,
 IS_META,
 IS_ROOT,
 MAX_FILESIZE,
 MEMSTORE_FLUSHSIZE,
 NAMESPACE_COL_DESC_BYTES,
 NAMESPACE_FAMILY_INFO,
 NAMESPACE_FAMILY_INFO_BYTES,
 NAMESPACE_TABLEDESC,
 NORMALIZATION_ENABLED,
 OWNER,
 OWNER_KEY,
 PRIORITY,
 READONLY,
 REGION_MEMSTORE_REPLICATION,
 REGION_REPLICATION,
 SPLIT_POLICY
+
+
+
+
+
+Fields inherited from interfaceorg.apache.hadoop.hbase.client.TableDescriptor
+COMPARATOR
 
 
 
@@ -185,12 +177,32 @@ extends 
 
 Method Summary
+
+All MethodsInstance MethodsConcrete MethodsDeprecated Methods
+
+Modifier and Type
+Method and Description
+
+
+protected TableDescriptorBuilder.ModifyableTableDescriptor
+getDelegateeForModification()
+Deprecated.
+
+
+
+protected HColumnDescriptor
+toHColumnDescriptor(ColumnFamilyDescriptordesc)
+Deprecated.
+Return a HColumnDescriptor for user to keep the 
compatibility as much as possible.
+
+
+
 
 
 
 
 Methods inherited from classorg.apache.hadoop.hbase.HTableDescriptor
-addCoprocessor,
 addCoprocessor,
 addCoprocessorWithSpec,
 addFamily,
 compareTo,
 equals,
 getColumnFamilies,
 getColumnFamilyCount, getConfiguration,
 getConfigurationValue,
 getCoprocessors,
 getDurability,
 getFamilies,
 getFamiliesKeys,
 getFamily,
 getFlushPolicyClassName,
 getMaxFileSize, 
getMemStoreFlushSize,
 getNameAsString,
 getOwnerString,
 getPriority,
 getRegionReplication,
 getRegionSplitPolicyClassName,
 getTableName,
 getValue,
 getValue,
 getValues,
 hasCoprocessor,
 hasFamily,
 hashCode,
 hasRegionMemstoreReplication,
 hasSerialReplicationScope,
 isCompactionEnabled,
 isMetaR
 egion, isMetaTable,
 isNormalizationEnabled,
 isReadOnly,
 isRootRegion,
 modifyFamily,
 parseFrom,
 remove,
 remove,
 remove, removeConfiguration,
 removeCoprocessor,
 removeFamily,
 setCompactionEnabled,
 setConfiguration,
 setDurability,
 setFlushPolicyClassName,
 setMaxFileSize, setMemStoreFlushSize,
 setNormalizationEnabled,
 setOwner,
 setOwnerString,
 setPriority,
 setReadOnly,
 setRegionMemstoreReplication,
 setRegionReplication, setRegionSplitPolicyClassName,
 setValue,
 setValue,
 setValue,
 toByteArray,
 toString,
 toStringCustomizedValues,
 toStringTableAttributes
+addCoprocessor,
 addCoprocessor,
 addCoprocessorWithSpec,
 addFamily,
 compareTo,
 equals,
 getColumnFamilies,
 getColumnFamily, getColumnFamilyCount,
 getColumnFamilyNames,
 getConfiguration,
 getConfigurationValue,
 getCoprocessors,
 getDurability,
 getFamilies,
 getFamiliesKeys,
 getFamily,
 getFlushPolicyClassName,
 getMaxFileSize,
 getMemStoreFlushSize,
 getNameAsString,
 getOwnerString,
 getPriority,
 getRegionReplication,
 getRegionSplitPolic
 yClassName, getTableName,
 getValue,
 getValue,
 getValue,
 getValues,
 hasColumnFamily,
 hasCoprocessor,
 hasFamily,
 hashCode<
 /a>, hasRegionMemstoreReplication,

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b44796ef/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html 
b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
index 8342000..f4eee5a 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
@@ -4,7 +4,7 @@
 
 
 
-BackupSystemTable (Apache HBase 2.0.0-SNAPSHOT API)
+BackupSystemTable (Apache HBase 3.0.0-SNAPSHOT API)
 
 
 
@@ -12,13 +12,13 @@
 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html
index cc0de48..30a49d0 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html
@@ -315,7 +315,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 (package private) void
-commitStoreFiles(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilestoreFiles)
+commitStoreFiles(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilestoreFiles)
 Moves multiple store files to the relative region's family 
store directory.
 
 
@@ -527,7 +527,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 org.apache.hadoop.fs.Path
 mergeStoreFile(HRegionInfomergedRegion,
   http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringfamilyName,
-  StoreFilef,
+  StoreFilef,
   org.apache.hadoop.fs.PathmergedDir)
 Write out a merge reference under the given merges 
directory.
 
@@ -568,7 +568,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 void
 removeStoreFiles(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringfamilyName,
-http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilestoreFiles)
+http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilestoreFiles)
 Closes and archives the specified store files from the 
specified family.
 
 
@@ -606,7 +606,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 org.apache.hadoop.fs.Path
 splitStoreFile(HRegionInfohri,
   http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringfamilyName,
-  StoreFilef,
+  StoreFilef,
   byte[]splitRow,
   booleantop,
   RegionSplitPolicysplitPolicy)
@@ -1326,7 +1326,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.h
 
 
 commitStoreFiles
-voidcommitStoreFiles(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilestoreFiles)
+voidcommitStoreFiles(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilestoreFiles)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Moves multiple store files to the relative region's family 
store directory.
 
@@ -1363,7 +1363,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.h
 
 removeStoreFiles
 publicvoidremoveStoreFiles(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringfamilyName,
- http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilestoreFiles)
+ http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilestoreFiles)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Closes and archives the specified store files from the 
specified family.
 
@@ -1512,7 +1512,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.h
 splitStoreFile
 publicorg.apache.hadoop.fs.PathsplitStoreFile(HRegionInfohri,
   

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c9d35424/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html
index 6414009..2ad3a12 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html
@@ -25,42 +25,42 @@
 017 */
 018package 
org.apache.hadoop.hbase.io.hfile;
 019
-020import 
com.google.common.annotations.VisibleForTesting;
-021import 
com.google.common.base.Preconditions;
-022
-023import java.io.DataInputStream;
-024import java.io.DataOutput;
-025import java.io.DataOutputStream;
-026import java.io.IOException;
-027import java.io.InputStream;
-028import java.nio.ByteBuffer;
-029import 
java.util.concurrent.atomic.AtomicReference;
-030
-031import org.apache.commons.logging.Log;
-032import 
org.apache.commons.logging.LogFactory;
-033import 
org.apache.hadoop.fs.FSDataInputStream;
-034import 
org.apache.hadoop.fs.FSDataOutputStream;
-035import org.apache.hadoop.fs.Path;
-036import org.apache.hadoop.hbase.Cell;
-037import 
org.apache.hadoop.hbase.HConstants;
-038import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-039import 
org.apache.hadoop.hbase.fs.HFileSystem;
-040import 
org.apache.hadoop.hbase.io.ByteArrayOutputStream;
-041import 
org.apache.hadoop.hbase.io.ByteBuffInputStream;
-042import 
org.apache.hadoop.hbase.io.ByteBufferWriterDataOutputStream;
-043import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
-044import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-045import 
org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
-046import 
org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext;
-047import 
org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext;
-048import 
org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;
-049import 
org.apache.hadoop.hbase.nio.ByteBuff;
-050import 
org.apache.hadoop.hbase.nio.MultiByteBuff;
-051import 
org.apache.hadoop.hbase.nio.SingleByteBuff;
-052import 
org.apache.hadoop.hbase.util.Bytes;
-053import 
org.apache.hadoop.hbase.util.ChecksumType;
-054import 
org.apache.hadoop.hbase.util.ClassSize;
-055import org.apache.hadoop.io.IOUtils;
+020import java.io.DataInputStream;
+021import java.io.DataOutput;
+022import java.io.DataOutputStream;
+023import java.io.IOException;
+024import java.io.InputStream;
+025import java.nio.ByteBuffer;
+026import 
java.util.concurrent.atomic.AtomicReference;
+027
+028import org.apache.commons.logging.Log;
+029import 
org.apache.commons.logging.LogFactory;
+030import 
org.apache.hadoop.fs.FSDataInputStream;
+031import 
org.apache.hadoop.fs.FSDataOutputStream;
+032import org.apache.hadoop.fs.Path;
+033import org.apache.hadoop.hbase.Cell;
+034import 
org.apache.hadoop.hbase.HConstants;
+035import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+036import 
org.apache.hadoop.hbase.fs.HFileSystem;
+037import 
org.apache.hadoop.hbase.io.ByteArrayOutputStream;
+038import 
org.apache.hadoop.hbase.io.ByteBuffInputStream;
+039import 
org.apache.hadoop.hbase.io.ByteBufferWriterDataOutputStream;
+040import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
+041import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+042import 
org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
+043import 
org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext;
+044import 
org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext;
+045import 
org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;
+046import 
org.apache.hadoop.hbase.nio.ByteBuff;
+047import 
org.apache.hadoop.hbase.nio.MultiByteBuff;
+048import 
org.apache.hadoop.hbase.nio.SingleByteBuff;
+049import 
org.apache.hadoop.hbase.util.Bytes;
+050import 
org.apache.hadoop.hbase.util.ChecksumType;
+051import 
org.apache.hadoop.hbase.util.ClassSize;
+052import org.apache.hadoop.io.IOUtils;
+053
+054import 
com.google.common.annotations.VisibleForTesting;
+055import 
com.google.common.base.Preconditions;
 056
 057/**
 058 * Reads {@link HFile} version 2 blocks 
to HFiles and via {@link Cacheable} Interface to caches.
@@ -443,1645 +443,1656 @@
 435return nextBlockOnDiskSize;
 436  }
 437
-438  public BlockType getBlockType() {
-439return blockType;
-440  }
-441
-442  /** @return get data block encoding id 
that was used to encode this block */
-443  public short getDataBlockEncodingId() 
{
-444if (blockType != 
BlockType.ENCODED_DATA) {
-445  throw new 
IllegalArgumentException("Querying encoder ID of a block " +
-446  "of type other than " + 
BlockType.ENCODED_DATA + ": " + blockType);
-447}
-448return 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-01 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6dd31117/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html 
b/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html
index 2720e7e..fa6 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public interface MasterServices
+public interface MasterServices
 extends Server
 Services Master supplies
 
@@ -207,18 +207,28 @@ extends 
+long
+dispatchMergingRegions(HRegionInforegion_a,
+  HRegionInforegion_b,
+  booleanforcible,
+  longnonceGroup,
+  longnonce)
+Merge two regions.
+
+
+
 void
 drainRegionServer(ServerNameserver)
 Mark a region server as draining to prevent additional 
regions from getting assigned to it.
 
 
-
+
 void
 enableReplicationPeer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId)
 Restart the replication stream to the specified peer
 
 
-
+
 long
 enableTable(TableNametableName,
longnonceGroup,
@@ -226,149 +236,157 @@ extends Enable an existing table
 
 
-
-AssignmentManager
+
+AssignmentManager
 getAssignmentManager()
 
-
+
 CatalogJanitor
 getCatalogJanitor()
 
-
+
 ClusterSchema
 getClusterSchema()
 
-
+
 ExecutorService
 getExecutorService()
 
-
+
 FavoredNodesManager
 getFavoredNodesManager()
 
-
+
+ProcedureEvent
+getInitializedEvent()
+
+
 long
 getLastMajorCompactionTimestamp(TableNametable)
 
-
+
 long
 getLastMajorCompactionTimestampForRegion(byte[]regionName)
 
-
+
 LoadBalancer
 getLoadBalancer()
 
-
+
 LockManager
 getLockManager()
 
-
+
 MasterCoprocessorHost
 getMasterCoprocessorHost()
 
-
+
 MasterFileSystem
 getMasterFileSystem()
 
-
+
 ProcedureExecutorMasterProcedureEnv
 getMasterProcedureExecutor()
 
-
+
 MasterProcedureManagerHost
 getMasterProcedureManagerHost()
 
-
+
 MasterQuotaManager
 getMasterQuotaManager()
 
-
+
 MasterWalManager
 getMasterWalManager()
 
-
+
 RegionNormalizer
 getRegionNormalizer()
 
-
+
 ReplicationPeerConfig
 getReplicationPeerConfig(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId)
 Returns the configured ReplicationPeerConfig for the 
specified peer
 
 
-
+
 ServerManager
 getServerManager()
 
-
+
 SnapshotManager
 getSnapshotManager()
 
-
+
 TableDescriptors
 getTableDescriptors()
 
-
+
 TableStateManager
 getTableStateManager()
 
-
+
 boolean
 isActiveMaster()
 
-
+
 boolean
 isInitialized()
 
-
+
 boolean
 isInMaintenanceMode()
 
-
+
 boolean
 isServerCrashProcessingEnabled()
 
-
+
+boolean
+isSplitOrMergeEnabled(MasterSwitchTypeswitchType)
+
+
 boolean
 isStopping()
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
 listDrainingRegionServers()
 List region servers marked as draining to not get 
additional regions assigned to them.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListLockInfo
 listLocks()
 List locks
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListProcedureInfo
 listProcedures()
 List procedures
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListReplicationPeerDescription
 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dab57116/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
index 2aeabf2..8efbcf5 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
@@ -121,28 +121,23 @@
 
 io.netty.channel.ChannelInboundHandlerAdapter (implements 
io.netty.channel.ChannelInboundHandler)
 
-io.netty.handler.codec.ByteToMessageDecoder
-
-org.apache.hadoop.hbase.ipc.NettyRpcServer.ConnectionHeaderHandler
-
-
 io.netty.channel.ChannelDuplexHandler (implements 
io.netty.channel.ChannelOutboundHandler)
 
 org.apache.hadoop.hbase.ipc.BufferCallBeforeInitHandler
 org.apache.hadoop.hbase.ipc.NettyRpcDuplexHandler
 
 
-io.netty.channel.ChannelInitializerC
+org.apache.hadoop.hbase.ipc.NettyRpcServerRequestDecoder
+io.netty.channel.SimpleChannelInboundHandlerI
 
-org.apache.hadoop.hbase.ipc.NettyRpcServer.Initializer
+org.apache.hadoop.hbase.ipc.NettyRpcServerPreambleHandler
 
 
-org.apache.hadoop.hbase.ipc.NettyRpcServer.MessageDecoder
 
 
 io.netty.channel.ChannelOutboundHandlerAdapter (implements 
io.netty.channel.ChannelOutboundHandler)
 
-org.apache.hadoop.hbase.ipc.NettyRpcServer.MessageEncoder
+org.apache.hadoop.hbase.ipc.NettyRpcServerResponseEncoder
 
 
 
@@ -163,7 +158,6 @@
 
 org.apache.hadoop.hbase.ipc.MetricsHBaseServerWrapperImpl (implements 
org.apache.hadoop.hbase.ipc.MetricsHBaseServerWrapper)
 org.apache.hadoop.hbase.ipc.NettyRpcClientConfigHelper
-org.apache.hadoop.hbase.ipc.NettyRpcServer.CallWriteListener (implements 
io.netty.channel.ChannelFutureListener)
 org.apache.hadoop.hbase.ipc.RpcClientFactory
 org.apache.hadoop.hbase.ipc.RpcConnection
 
@@ -207,7 +201,7 @@
 
 org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface
 org.apache.hadoop.hbase.ipc.RpcServerFactory
-org.apache.hadoop.hbase.ipc.ServerCallT (implements 
org.apache.hadoop.hbase.ipc.RpcCall)
+org.apache.hadoop.hbase.ipc.ServerCallT (implements 
org.apache.hadoop.hbase.ipc.RpcCall, org.apache.hadoop.hbase.ipc.RpcResponse)
 
 org.apache.hadoop.hbase.ipc.NettyServerCall
 org.apache.hadoop.hbase.ipc.SimpleServerCall
@@ -333,6 +327,7 @@
 org.apache.hadoop.hbase.ipc.HBaseRpcController (also extends 
org.apache.hadoop.hbase.CellScannable)
 
 
+org.apache.hadoop.hbase.ipc.RpcResponse
 org.apache.hadoop.hbase.ipc.RpcServer.CallCleanup
 org.apache.hadoop.hbase.ipc.RpcServerInterface
 
@@ -346,8 +341,8 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.ipc.MetricsHBaseServerSourceFactoryImpl.SourceStorage
 org.apache.hadoop.hbase.ipc.CallEvent.Type
+org.apache.hadoop.hbase.ipc.MetricsHBaseServerSourceFactoryImpl.SourceStorage
 org.apache.hadoop.hbase.ipc.BufferCallBeforeInitHandler.BufferCallAction
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dab57116/devapidocs/org/apache/hadoop/hbase/ipc/package-use.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/package-use.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/package-use.html
index dc4388d..6aa27d3 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/package-use.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/package-use.html
@@ -361,72 +361,77 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 RpcExecutor.QueueBalancer
 
 
+RpcResponse
+An interface represent the response of an rpc call.
+
+
+
 RpcScheduler
 An interface for RPC request scheduling algorithm.
 
 
-
+
 RpcScheduler.Context
 Exposes runtime information of a RpcServer 
that a RpcScheduler may need.
 
 
-
+
 RpcServer
 An RPC server that hosts protobuf described Services.
 
 
-
+
 RpcServer.BlockingServiceAndInterface
 Datastructure for passing a BlockingService 
and its associated class of
  protobuf service interface.
 
 
-
+
 RpcServer.CallCleanup
 
-
+
 RpcServerInterface
 
-
+
 ServerCall
 Datastructure that holds all necessary to a method 
invocation and then afterward, carries
  the result.
 
 
-
+
 ServerRpcConnection
 Reads calls from a connection and queues them for 
handling.
 
 
-
+
 SimpleRpcServer
 The RPC server with native java NIO implementation deriving 
from Hadoop to
  host protobuf described Services.
 
 
-
+
 SimpleRpcServer.ConnectionManager
 
-
+
 SimpleRpcServer.Listener
 Listens on the socket.
 
 
-
+
 SimpleRpcServer.Listener.Reader
 
-
+
 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-19 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c635e71b/devapidocs/src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html
index bc14b2e..547dec5 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html
@@ -139,2012 +139,2013 @@
 131int retry = 
conf.getInt("zookeeper.recovery.retry", 3);
 132int retryIntervalMillis =
 133  
conf.getInt("zookeeper.recovery.retry.intervalmill", 1000);
-134zkDumpConnectionTimeOut = 
conf.getInt("zookeeper.dump.connection.timeout",
-1351000);
-136return new 
RecoverableZooKeeper(ensemble, timeout, watcher,
-137retry, retryIntervalMillis, 
identifier);
-138  }
-139
-140  /**
-141   * Log in the current zookeeper server 
process using the given configuration
-142   * keys for the credential file and 
login principal.
-143   *
-144   * pstrongThis is only 
applicable when running on secure hbase/strong
-145   * On regular HBase (without security 
features), this will safely be ignored.
-146   * /p
-147   *
-148   * @param conf The configuration data 
to use
-149   * @param keytabFileKey Property key 
used to configure the path to the credential file
-150   * @param userNameKey Property key used 
to configure the login principal
-151   * @param hostname Current hostname to 
use in any credentials
-152   * @throws IOException underlying 
exception from SecurityUtil.login() call
-153   */
-154  public static void 
loginServer(Configuration conf, String keytabFileKey,
-155  String userNameKey, String 
hostname) throws IOException {
-156login(conf, keytabFileKey, 
userNameKey, hostname,
-157  
ZooKeeperSaslServer.LOGIN_CONTEXT_NAME_KEY,
-158  
JaasConfiguration.SERVER_KEYTAB_KERBEROS_CONFIG_NAME);
-159  }
-160
-161  /**
-162   * Log in the current zookeeper client 
using the given configuration
-163   * keys for the credential file and 
login principal.
-164   *
-165   * pstrongThis is only 
applicable when running on secure hbase/strong
-166   * On regular HBase (without security 
features), this will safely be ignored.
-167   * /p
-168   *
-169   * @param conf The configuration data 
to use
-170   * @param keytabFileKey Property key 
used to configure the path to the credential file
-171   * @param userNameKey Property key used 
to configure the login principal
-172   * @param hostname Current hostname to 
use in any credentials
-173   * @throws IOException underlying 
exception from SecurityUtil.login() call
-174   */
-175  public static void 
loginClient(Configuration conf, String keytabFileKey,
-176  String userNameKey, String 
hostname) throws IOException {
-177login(conf, keytabFileKey, 
userNameKey, hostname,
-178  
ZooKeeperSaslClient.LOGIN_CONTEXT_NAME_KEY,
-179  
JaasConfiguration.CLIENT_KEYTAB_KERBEROS_CONFIG_NAME);
-180  }
-181
-182  /**
-183   * Log in the current process using the 
given configuration keys for the
-184   * credential file and login 
principal.
-185   *
-186   * pstrongThis is only 
applicable when running on secure hbase/strong
-187   * On regular HBase (without security 
features), this will safely be ignored.
-188   * /p
-189   *
-190   * @param conf The configuration data 
to use
-191   * @param keytabFileKey Property key 
used to configure the path to the credential file
-192   * @param userNameKey Property key used 
to configure the login principal
-193   * @param hostname Current hostname to 
use in any credentials
-194   * @param loginContextProperty property 
name to expose the entry name
-195   * @param loginContextName jaas entry 
name
-196   * @throws IOException underlying 
exception from SecurityUtil.login() call
-197   */
-198  private static void login(Configuration 
conf, String keytabFileKey,
-199  String userNameKey, String 
hostname,
-200  String loginContextProperty, String 
loginContextName)
-201  throws IOException {
-202if (!isSecureZooKeeper(conf))
-203  return;
-204
-205// User has specified a jaas.conf, 
keep this one as the good one.
-206// 
HBASE_OPTS="-Djava.security.auth.login.config=jaas.conf"
-207if 
(System.getProperty("java.security.auth.login.config") != null)
-208  return;
-209
-210// No keytab specified, no auth
-211String keytabFilename = 
conf.get(keytabFileKey);
-212if (keytabFilename == null) {
-213  LOG.warn("no keytab specified for: 
" + keytabFileKey);
-214  return;
-215}
-216
-217String principalConfig = 
conf.get(userNameKey, System.getProperty("user.name"));
-218String principalName = 
SecurityUtil.getServerPrincipal(principalConfig, hostname);
-219
-220// Initialize the "jaas.conf" for 
keyTab/principal,
-221// If keyTab is not 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-18 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b4cf63f/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html 
b/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
index d0c8417..b00962d 100644
--- a/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
@@ -337,13 +337,13 @@ extends org.jamon.AbstractTemplateProxy
 
 
 Field Detail
-
+
 
 
 
 
-filter
-protectedhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String filter
+catalogJanitorEnabled
+protectedboolean catalogJanitorEnabled
 
 
 
@@ -355,13 +355,13 @@ extends org.jamon.AbstractTemplateProxy
 protectedhttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer frags
 
 
-
+
 
 
 
 
-format
-protectedhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String format
+servers
+protectedhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName servers
 
 
 
@@ -373,13 +373,13 @@ extends org.jamon.AbstractTemplateProxy
 protectedAssignmentManager assignmentManager
 
 
-
+
 
 
 
 
-metaLocation
-protectedServerName metaLocation
+format
+protectedhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String format
 
 
 
@@ -391,13 +391,13 @@ extends org.jamon.AbstractTemplateProxy
 protectedhttp://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetServerName deadServers
 
 
-
+
 
 
 
 
-servers
-protectedhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName servers
+metaLocation
+protectedServerName metaLocation
 
 
 
@@ -409,13 +409,13 @@ extends org.jamon.AbstractTemplateProxy
 protectedServerManager serverManager
 
 
-
+
 
 
 
 
-catalogJanitorEnabled
-protectedboolean catalogJanitorEnabled
+filter
+protectedhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String filter
 
 
 
@@ -487,13 +487,13 @@ extends org.jamon.AbstractTemplateProxy
 
 
 
-
+
 
 
 
 
-setFilter
-public finalMasterStatusTmplsetFilter(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringp_filter)
+setCatalogJanitorEnabled
+public finalMasterStatusTmplsetCatalogJanitorEnabled(booleanp_catalogJanitorEnabled)
 
 
 
@@ -505,13 +505,13 @@ extends org.jamon.AbstractTemplateProxy
 public finalMasterStatusTmplsetFrags(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integerp_frags)
 
 
-
+
 
 
 
 
-setFormat
-public finalMasterStatusTmplsetFormat(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringp_format)
+setServers
+public finalMasterStatusTmplsetServers(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerNamep_servers)
 
 
 
@@ -523,13 +523,13 @@ extends org.jamon.AbstractTemplateProxy
 public finalMasterStatusTmplsetAssignmentManager(AssignmentManagerp_assignmentManager)
 
 
-
+
 
 
 
 
-setMetaLocation
-public finalMasterStatusTmplsetMetaLocation(ServerNamep_metaLocation)
+setFormat
+public finalMasterStatusTmplsetFormat(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringp_format)
 
 
 
@@ -541,13 +541,13 @@ extends org.jamon.AbstractTemplateProxy
 public finalMasterStatusTmplsetDeadServers(http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetServerNamep_deadServers)
 
 
-
+
 
 
 
 
-setServers
-public finalMasterStatusTmplsetServers(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerNamep_servers)
+setMetaLocation
+public 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-15 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f55ebeaa/devapidocs/src-html/org/apache/hadoop/hbase/ipc/SimpleRpcServer.Connection.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/SimpleRpcServer.Connection.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/SimpleRpcServer.Connection.html
deleted file mode 100644
index 7f61b54..000
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/SimpleRpcServer.Connection.html
+++ /dev/null
@@ -1,1446 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-Source code
-
-
-
-
-001/**
-002 * Licensed to the Apache Software 
Foundation (ASF) under one
-003 * or more contributor license 
agreements.  See the NOTICE file
-004 * distributed with this work for 
additional information
-005 * regarding copyright ownership.  The 
ASF licenses this file
-006 * to you under the Apache License, 
Version 2.0 (the
-007 * "License"); you may not use this file 
except in compliance
-008 * with the License.  You may obtain a 
copy of the License at
-009 *
-010 * 
http://www.apache.org/licenses/LICENSE-2.0
-011 *
-012 * Unless required by applicable law or 
agreed to in writing, software
-013 * distributed under the License is 
distributed on an "AS IS" BASIS,
-014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
-015 * See the License for the specific 
language governing permissions and
-016 * limitations under the License.
-017 */
-018
-019package org.apache.hadoop.hbase.ipc;
-020
-021import java.io.IOException;
-022import java.io.InputStream;
-023import java.net.BindException;
-024import java.net.InetAddress;
-025import java.net.InetSocketAddress;
-026import java.net.ServerSocket;
-027import java.net.Socket;
-028import java.net.SocketException;
-029import java.net.UnknownHostException;
-030import java.nio.ByteBuffer;
-031import 
java.nio.channels.CancelledKeyException;
-032import 
java.nio.channels.ClosedChannelException;
-033import 
java.nio.channels.GatheringByteChannel;
-034import 
java.nio.channels.ReadableByteChannel;
-035import java.nio.channels.SelectionKey;
-036import java.nio.channels.Selector;
-037import 
java.nio.channels.ServerSocketChannel;
-038import java.nio.channels.SocketChannel;
-039import java.util.ArrayList;
-040import java.util.Arrays;
-041import java.util.Collections;
-042import java.util.Iterator;
-043import java.util.List;
-044import java.util.Set;
-045import java.util.Timer;
-046import java.util.TimerTask;
-047import 
java.util.concurrent.ConcurrentHashMap;
-048import 
java.util.concurrent.ConcurrentLinkedDeque;
-049import 
java.util.concurrent.ExecutorService;
-050import java.util.concurrent.Executors;
-051import 
java.util.concurrent.LinkedBlockingQueue;
-052import 
java.util.concurrent.atomic.AtomicInteger;
-053import 
java.util.concurrent.atomic.LongAdder;
-054import java.util.concurrent.locks.Lock;
-055import 
java.util.concurrent.locks.ReentrantLock;
-056
-057import 
org.apache.hadoop.conf.Configuration;
-058import 
org.apache.hadoop.hbase.CellScanner;
-059import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-060import 
org.apache.hadoop.hbase.HBaseIOException;
-061import 
org.apache.hadoop.hbase.HConstants;
-062import org.apache.hadoop.hbase.Server;
-063import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-064import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-065import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
-066import 
org.apache.hadoop.hbase.exceptions.RequestTooBigException;
-067import 
org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler;
-068import 
org.apache.hadoop.hbase.nio.ByteBuff;
-069import 
org.apache.hadoop.hbase.nio.SingleByteBuff;
-070import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-071import 
org.apache.hadoop.hbase.security.AuthMethod;
-072import 
org.apache.hadoop.hbase.security.HBasePolicyProvider;
-073import 
org.apache.hadoop.hbase.security.SaslStatus;
-074import 
org.apache.hadoop.hbase.security.SaslUtil;
-075import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService;
-076import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream;
-077import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor;
-078import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader;
-081import 
org.apache.hadoop.hbase.util.Bytes;
-082import 
org.apache.hadoop.hbase.util.Pair;
-083import 
org.apache.hadoop.hbase.util.Threads;
-084import org.apache.hadoop.io.IOUtils;
-085import 
org.apache.hadoop.io.IntWritable;
-086import 
org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
-087import 
org.apache.hadoop.util.StringUtils;
-088import org.apache.htrace.TraceInfo;
-089
-090import 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e0a5167/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.DefaultRackManager.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.DefaultRackManager.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.DefaultRackManager.html
index 7e37ca0..79c65e3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.DefaultRackManager.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.DefaultRackManager.html
@@ -70,1527 +70,1525 @@
 062import com.google.common.collect.Sets;
 063
 064/**
-065 * The base class for load balancers. It 
provides the the functions used to by
-066 * {@link 
org.apache.hadoop.hbase.master.AssignmentManager} to assign regions
-067 * in the edge cases. It doesn't provide 
an implementation of the
-068 * actual balancing algorithm.
-069 *
-070 */
-071public abstract class BaseLoadBalancer 
implements LoadBalancer {
-072  protected static final int 
MIN_SERVER_BALANCE = 2;
-073  private volatile boolean stopped = 
false;
+065 * The base class for load balancers. It 
provides functions used by
+066 * {@link 
org.apache.hadoop.hbase.master.AssignmentManager} to assign regions in the edge 
cases.
+067 * It doesn't provide an implementation 
of the actual balancing algorithm.
+068 */
+069public abstract class BaseLoadBalancer 
implements LoadBalancer {
+070  protected static final int 
MIN_SERVER_BALANCE = 2;
+071  private volatile boolean stopped = 
false;
+072
+073  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
 074
-075  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
-076
-077  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
-078= load - 
load.getNumberOfRegions() == 0;
+075  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
+076= load - 
load.getNumberOfRegions() == 0;
+077
+078  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
 079
-080  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
-081
-082  private static class DefaultRackManager 
extends RackManager {
-083@Override
-084public String getRack(ServerName 
server) {
-085  return UNKNOWN_RACK;
-086}
-087  }
-088
-089  /**
-090   * The constructor that uses the basic 
MetricsBalancer
-091   */
-092  protected BaseLoadBalancer() {
-093metricsBalancer = new 
MetricsBalancer();
-094  }
-095
-096  /**
-097   * This Constructor accepts an instance 
of MetricsBalancer,
-098   * which will be used instead of 
creating a new one
-099   */
-100  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
-101this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
-102  }
-103
-104  /**
-105   * An efficient array based 
implementation similar to ClusterState for keeping
-106   * the status of the cluster in terms 
of region assignment and distribution.
-107   * LoadBalancers, such as 
StochasticLoadBalancer uses this Cluster object because of
-108   * hundreds of thousands of hashmap 
manipulations are very costly, which is why this
-109   * class uses mostly indexes and 
arrays.
-110   *
-111   * Cluster tracks a list of unassigned 
regions, region assignments, and the server
-112   * topology in terms of server names, 
hostnames and racks.
-113   */
-114  protected static class Cluster {
-115ServerName[] servers;
-116String[] hosts; // ServerName 
uniquely identifies a region server. multiple RS can run on the same host
-117String[] racks;
-118boolean multiServersPerHost = false; 
// whether or not any host has more than one server
-119
-120ArrayListString tables;
-121HRegionInfo[] regions;
-122DequeBalancerRegionLoad[] 
regionLoads;
-123private RegionLocationFinder 
regionFinder;
+080  private static class DefaultRackManager 
extends RackManager {
+081@Override
+082public String getRack(ServerName 
server) {
+083  return UNKNOWN_RACK;
+084}
+085  }
+086
+087  /**
+088   * The constructor that uses the basic 
MetricsBalancer
+089   */
+090  protected BaseLoadBalancer() {
+091metricsBalancer = new 
MetricsBalancer();
+092  }
+093
+094  /**
+095   * This Constructor accepts an instance 
of MetricsBalancer,
+096   * which will be used instead of 
creating a new one
+097   */
+098  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
+099this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
+100  }
+101
+102  /**
+103   * An efficient array based 
implementation similar to ClusterState for keeping
+104   * the status of the cluster in terms 
of region assignment and distribution.
+105   * LoadBalancers, such as 
StochasticLoadBalancer uses this 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/709b8fcc/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html
index f2c44db..6cf2fc8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html
@@ -2581,7 +2581,7 @@
 2573try {
 2574  // Restore snapshot
 2575  get(
-2576
internalRestoreSnapshotAsync(snapshotName, tableName, false),
+2576
internalRestoreSnapshotAsync(snapshotName, tableName),
 2577syncWaitTimeout,
 2578TimeUnit.MILLISECONDS);
 2579} catch (IOException e) {
@@ -2590,7 +2590,7 @@
 2582  if (takeFailSafeSnapshot) {
 2583try {
 2584  get(
-2585
internalRestoreSnapshotAsync(failSafeSnapshotSnapshotName, tableName, false),
+2585
internalRestoreSnapshotAsync(failSafeSnapshotSnapshotName, tableName),
 2586syncWaitTimeout,
 2587TimeUnit.MILLISECONDS);
 2588  String msg = "Restore 
snapshot=" + snapshotName +
@@ -2633,7 +2633,7 @@
 2625  throw new 
TableNotDisabledException(tableName);
 2626}
 2627
-2628return 
internalRestoreSnapshotAsync(snapshotName, tableName, false);
+2628return 
internalRestoreSnapshotAsync(snapshotName, tableName);
 2629  }
 2630
 2631  @Override
@@ -2643,1621 +2643,1614 @@
 2635  }
 2636
 2637  @Override
-2638  public void cloneSnapshot(String 
snapshotName, TableName tableName, boolean restoreAcl)
+2638  public void cloneSnapshot(final String 
snapshotName, final TableName tableName)
 2639  throws IOException, 
TableExistsException, RestoreSnapshotException {
 2640if (tableExists(tableName)) {
 2641  throw new 
TableExistsException(tableName);
 2642}
 2643get(
-2644  
internalRestoreSnapshotAsync(snapshotName, tableName, restoreAcl),
+2644  
internalRestoreSnapshotAsync(snapshotName, tableName),
 2645  Integer.MAX_VALUE,
 2646  TimeUnit.MILLISECONDS);
 2647  }
 2648
 2649  @Override
-2650  public void cloneSnapshot(final String 
snapshotName, final TableName tableName)
-2651  throws IOException, 
TableExistsException, RestoreSnapshotException {
-2652cloneSnapshot(snapshotName, 
tableName, false);
-2653  }
-2654
-2655  @Override
-2656  public FutureVoid 
cloneSnapshotAsync(final String snapshotName, final TableName tableName)
-2657  throws IOException, 
TableExistsException {
-2658if (tableExists(tableName)) {
-2659  throw new 
TableExistsException(tableName);
-2660}
-2661return 
internalRestoreSnapshotAsync(snapshotName, tableName, false);
-2662  }
-2663
-2664  @Override
-2665  public byte[] 
execProcedureWithRet(String signature, String instance, MapString, 
String props)
-2666  throws IOException {
-2667ProcedureDescription desc = 
ProtobufUtil.buildProcedureDescription(signature, instance, props);
-2668final ExecProcedureRequest request 
=
-2669
ExecProcedureRequest.newBuilder().setProcedure(desc).build();
-2670// run the procedure on the master
-2671ExecProcedureResponse response = 
executeCallable(
-2672  new 
MasterCallableExecProcedureResponse(getConnection(), 
getRpcControllerFactory()) {
-2673@Override
-2674protected ExecProcedureResponse 
rpcCall() throws Exception {
-2675  return 
master.execProcedureWithRet(getRpcController(), request);
-2676}
-2677  });
-2678
-2679return response.hasReturnData() ? 
response.getReturnData().toByteArray() : null;
-2680  }
-2681
-2682  @Override
-2683  public void execProcedure(String 
signature, String instance, MapString, String props)
-2684  throws IOException {
-2685ProcedureDescription desc = 
ProtobufUtil.buildProcedureDescription(signature, instance, props);
-2686final ExecProcedureRequest request 
=
-2687
ExecProcedureRequest.newBuilder().setProcedure(desc).build();
-2688// run the procedure on the master
-2689ExecProcedureResponse response = 
executeCallable(new MasterCallableExecProcedureResponse(
-2690getConnection(), 
getRpcControllerFactory()) {
-2691  @Override
-2692  protected ExecProcedureResponse 
rpcCall() throws Exception {
-2693return 
master.execProcedure(getRpcController(), request);
-2694  }
-2695});
-2696
-2697long start = 
EnvironmentEdgeManager.currentTime();
-2698long max = 
response.getExpectedTimeout();
-2699long maxPauseTime = max / 
this.numRetries;
-2700int tries = 0;
-2701LOG.debug("Waiting a max of " + max 
+ " ms for procedure '" +
-2702signature + " : " + instance + 
"'' to complete. (max " + maxPauseTime + " ms per retry)");

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1241ee85/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
index e9107e6..1f0030b 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
@@ -69,15 +69,15 @@
 061  requiredArguments = {
 062@org.jamon.annotations.Argument(name 
= "master", type = "HMaster")},
 063  optionalArguments = {
-064@org.jamon.annotations.Argument(name 
= "serverManager", type = "ServerManager"),
-065@org.jamon.annotations.Argument(name 
= "metaLocation", type = "ServerName"),
-066@org.jamon.annotations.Argument(name 
= "deadServers", type = "SetServerName"),
+064@org.jamon.annotations.Argument(name 
= "catalogJanitorEnabled", type = "boolean"),
+065@org.jamon.annotations.Argument(name 
= "filter", type = "String"),
+066@org.jamon.annotations.Argument(name 
= "serverManager", type = "ServerManager"),
 067@org.jamon.annotations.Argument(name 
= "assignmentManager", type = "AssignmentManager"),
-068@org.jamon.annotations.Argument(name 
= "frags", type = "MapString,Integer"),
+068@org.jamon.annotations.Argument(name 
= "deadServers", type = "SetServerName"),
 069@org.jamon.annotations.Argument(name 
= "servers", type = "ListServerName"),
-070@org.jamon.annotations.Argument(name 
= "catalogJanitorEnabled", type = "boolean"),
-071@org.jamon.annotations.Argument(name 
= "filter", type = "String"),
-072@org.jamon.annotations.Argument(name 
= "format", type = "String")})
+070@org.jamon.annotations.Argument(name 
= "format", type = "String"),
+071@org.jamon.annotations.Argument(name 
= "frags", type = "MapString,Integer"),
+072@org.jamon.annotations.Argument(name 
= "metaLocation", type = "ServerName")})
 073public class MasterStatusTmpl
 074  extends 
org.jamon.AbstractTemplateProxy
 075{
@@ -118,57 +118,57 @@
 110  return m_master;
 111}
 112private HMaster m_master;
-113// 28, 1
-114public void 
setServerManager(ServerManager serverManager)
+113// 25, 1
+114public void 
setCatalogJanitorEnabled(boolean catalogJanitorEnabled)
 115{
-116  // 28, 1
-117  m_serverManager = serverManager;
-118  m_serverManager__IsNotDefault = 
true;
+116  // 25, 1
+117  m_catalogJanitorEnabled = 
catalogJanitorEnabled;
+118  
m_catalogJanitorEnabled__IsNotDefault = true;
 119}
-120public ServerManager 
getServerManager()
+120public boolean 
getCatalogJanitorEnabled()
 121{
-122  return m_serverManager;
+122  return m_catalogJanitorEnabled;
 123}
-124private ServerManager 
m_serverManager;
-125public boolean 
getServerManager__IsNotDefault()
+124private boolean 
m_catalogJanitorEnabled;
+125public boolean 
getCatalogJanitorEnabled__IsNotDefault()
 126{
-127  return 
m_serverManager__IsNotDefault;
+127  return 
m_catalogJanitorEnabled__IsNotDefault;
 128}
-129private boolean 
m_serverManager__IsNotDefault;
-130// 22, 1
-131public void 
setMetaLocation(ServerName metaLocation)
+129private boolean 
m_catalogJanitorEnabled__IsNotDefault;
+130// 26, 1
+131public void setFilter(String 
filter)
 132{
-133  // 22, 1
-134  m_metaLocation = metaLocation;
-135  m_metaLocation__IsNotDefault = 
true;
+133  // 26, 1
+134  m_filter = filter;
+135  m_filter__IsNotDefault = true;
 136}
-137public ServerName getMetaLocation()
+137public String getFilter()
 138{
-139  return m_metaLocation;
+139  return m_filter;
 140}
-141private ServerName m_metaLocation;
-142public boolean 
getMetaLocation__IsNotDefault()
+141private String m_filter;
+142public boolean 
getFilter__IsNotDefault()
 143{
-144  return 
m_metaLocation__IsNotDefault;
+144  return m_filter__IsNotDefault;
 145}
-146private boolean 
m_metaLocation__IsNotDefault;
-147// 24, 1
-148public void 
setDeadServers(SetServerName deadServers)
+146private boolean 
m_filter__IsNotDefault;
+147// 28, 1
+148public void 
setServerManager(ServerManager serverManager)
 149{
-150  // 24, 1
-151  m_deadServers = deadServers;
-152  m_deadServers__IsNotDefault = 
true;
+150  // 28, 1
+151  m_serverManager = serverManager;
+152  m_serverManager__IsNotDefault = 
true;
 153}
-154public SetServerName 
getDeadServers()
+154public ServerManager 
getServerManager()
 155{
-156  return m_deadServers;
+156  return m_serverManager;
 157}
-158private SetServerName 
m_deadServers;
-159public boolean 
getDeadServers__IsNotDefault()

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dd7176bf/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
index 8b22aa1..f2c44db 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
@@ -100,4135 +100,4164 @@
 092import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
 093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
 094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse;
-103import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest;
-104import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest;
-105import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-106import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest;
-107import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
-108import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-109import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair;
-110import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-137import 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7ef4c5a9/devapidocs/src-html/org/apache/hadoop/hbase/ipc/SimpleRpcServer.Call.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/SimpleRpcServer.Call.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/SimpleRpcServer.Call.html
deleted file mode 100644
index 2987e7b..000
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/SimpleRpcServer.Call.html
+++ /dev/null
@@ -1,1500 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-Source code
-
-
-
-
-001/**
-002 * Licensed to the Apache Software 
Foundation (ASF) under one
-003 * or more contributor license 
agreements.  See the NOTICE file
-004 * distributed with this work for 
additional information
-005 * regarding copyright ownership.  The 
ASF licenses this file
-006 * to you under the Apache License, 
Version 2.0 (the
-007 * "License"); you may not use this file 
except in compliance
-008 * with the License.  You may obtain a 
copy of the License at
-009 *
-010 * 
http://www.apache.org/licenses/LICENSE-2.0
-011 *
-012 * Unless required by applicable law or 
agreed to in writing, software
-013 * distributed under the License is 
distributed on an "AS IS" BASIS,
-014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
-015 * See the License for the specific 
language governing permissions and
-016 * limitations under the License.
-017 */
-018
-019package org.apache.hadoop.hbase.ipc;
-020
-021import java.io.IOException;
-022import java.io.InputStream;
-023import java.net.BindException;
-024import java.net.InetAddress;
-025import java.net.InetSocketAddress;
-026import java.net.ServerSocket;
-027import java.net.Socket;
-028import java.net.SocketException;
-029import java.net.UnknownHostException;
-030import java.nio.ByteBuffer;
-031import 
java.nio.channels.CancelledKeyException;
-032import 
java.nio.channels.ClosedChannelException;
-033import 
java.nio.channels.GatheringByteChannel;
-034import 
java.nio.channels.ReadableByteChannel;
-035import java.nio.channels.SelectionKey;
-036import java.nio.channels.Selector;
-037import 
java.nio.channels.ServerSocketChannel;
-038import java.nio.channels.SocketChannel;
-039import java.util.ArrayList;
-040import java.util.Arrays;
-041import java.util.Collections;
-042import java.util.Iterator;
-043import java.util.List;
-044import java.util.Set;
-045import java.util.Timer;
-046import java.util.TimerTask;
-047import 
java.util.concurrent.ConcurrentHashMap;
-048import 
java.util.concurrent.ConcurrentLinkedDeque;
-049import 
java.util.concurrent.ExecutorService;
-050import java.util.concurrent.Executors;
-051import 
java.util.concurrent.LinkedBlockingQueue;
-052import 
java.util.concurrent.atomic.AtomicInteger;
-053import 
java.util.concurrent.atomic.LongAdder;
-054import java.util.concurrent.locks.Lock;
-055import 
java.util.concurrent.locks.ReentrantLock;
-056
-057import 
org.apache.hadoop.conf.Configuration;
-058import 
org.apache.hadoop.hbase.CellScanner;
-059import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-060import 
org.apache.hadoop.hbase.HBaseIOException;
-061import 
org.apache.hadoop.hbase.HConstants;
-062import org.apache.hadoop.hbase.Server;
-063import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-064import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-065import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
-066import 
org.apache.hadoop.hbase.exceptions.RequestTooBigException;
-067import 
org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler;
-068import 
org.apache.hadoop.hbase.nio.ByteBuff;
-069import 
org.apache.hadoop.hbase.nio.SingleByteBuff;
-070import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-071import 
org.apache.hadoop.hbase.security.AuthMethod;
-072import 
org.apache.hadoop.hbase.security.HBasePolicyProvider;
-073import 
org.apache.hadoop.hbase.security.SaslStatus;
-074import 
org.apache.hadoop.hbase.security.SaslUtil;
-075import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService;
-076import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream;
-077import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor;
-078import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader;
-081import 
org.apache.hadoop.hbase.util.Bytes;
-082import 
org.apache.hadoop.hbase.util.Pair;
-083import 
org.apache.hadoop.hbase.util.Threads;
-084import org.apache.hadoop.io.IOUtils;
-085import 
org.apache.hadoop.io.IntWritable;
-086import 
org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
-087import 
org.apache.hadoop.util.StringUtils;
-088import org.apache.htrace.TraceInfo;
-089
-090import 
com.google.common.util.concurrent.ThreadFactoryBuilder;
-091
-092/**
-093 * The 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/31df4674/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
index f3f7a46..8750fa2 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.DisableTableProcedureBiConsumer.html
@@ -56,2015 +56,2125 @@
 048import 
org.apache.hadoop.hbase.MetaTableAccessor;
 049import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
 050import 
org.apache.hadoop.hbase.NotServingRegionException;
-051import 
org.apache.hadoop.hbase.RegionLocations;
-052import 
org.apache.hadoop.hbase.ServerName;
-053import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-054import 
org.apache.hadoop.hbase.HConstants;
-055import 
org.apache.hadoop.hbase.TableExistsException;
-056import 
org.apache.hadoop.hbase.TableName;
-057import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-058import 
org.apache.hadoop.hbase.TableNotDisabledException;
-059import 
org.apache.hadoop.hbase.TableNotFoundException;
-060import 
org.apache.hadoop.hbase.UnknownRegionException;
-061import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-062import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-063import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-064import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-065import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-066import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-067import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-068import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-069import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-070import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-071import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-072import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-073import 
org.apache.hadoop.hbase.replication.ReplicationException;
-074import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-075import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-076import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-102import 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6f2e75f2/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.ProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.ProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.ProcedureBiConsumer.html
index 6c52543..f3f7a46 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.ProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.ProcedureBiConsumer.html
@@ -31,1797 +31,2040 @@
 023import java.util.ArrayList;
 024import java.util.Arrays;
 025import java.util.Collection;
-026import java.util.HashMap;
-027import java.util.LinkedList;
-028import java.util.List;
-029import java.util.Map;
-030import java.util.Optional;
-031import 
java.util.concurrent.CompletableFuture;
-032import java.util.concurrent.TimeUnit;
-033import 
java.util.concurrent.atomic.AtomicReference;
-034import java.util.function.BiConsumer;
-035import java.util.regex.Pattern;
-036import java.util.stream.Collectors;
-037
-038import 
com.google.common.annotations.VisibleForTesting;
-039
-040import io.netty.util.Timeout;
-041import io.netty.util.TimerTask;
-042import org.apache.commons.logging.Log;
-043import 
org.apache.commons.logging.LogFactory;
-044import 
org.apache.hadoop.hbase.HColumnDescriptor;
-045import 
org.apache.hadoop.hbase.HRegionInfo;
-046import 
org.apache.hadoop.hbase.HRegionLocation;
-047import 
org.apache.hadoop.hbase.MetaTableAccessor;
-048import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-049import 
org.apache.hadoop.hbase.NotServingRegionException;
-050import 
org.apache.hadoop.hbase.RegionLocations;
-051import 
org.apache.hadoop.hbase.ServerName;
-052import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-053import 
org.apache.hadoop.hbase.HConstants;
-054import 
org.apache.hadoop.hbase.TableExistsException;
-055import 
org.apache.hadoop.hbase.TableName;
-056import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-057import 
org.apache.hadoop.hbase.TableNotFoundException;
-058import 
org.apache.hadoop.hbase.UnknownRegionException;
-059import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-060import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-061import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-062import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-063import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-064import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-065import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-066import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-067import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-068import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-069import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-070import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-071import 
org.apache.hadoop.hbase.replication.ReplicationException;
-072import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-073import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-074import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-092import 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/efd0601e/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
index 0306658..b3c0b30 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private class AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer
+private class AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer
 extends AsyncHBaseAdmin.TableProcedureBiConsumer
 
 
@@ -240,7 +240,7 @@ extends 
 
 AddColumnFamilyProcedureBiConsumer
-AddColumnFamilyProcedureBiConsumer(AsyncAdminadmin,
+AddColumnFamilyProcedureBiConsumer(AsyncAdminadmin,
TableNametableName)
 
 
@@ -258,7 +258,7 @@ extends 
 
 getOperationType
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetOperationType()
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetOperationType()
 
 Specified by:
 getOperationTypein
 classAsyncHBaseAdmin.TableProcedureBiConsumer

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/efd0601e/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AdminRpcCall.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AdminRpcCall.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AdminRpcCall.html
index c63c62f..a359542 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AdminRpcCall.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AdminRpcCall.html
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 http://docs.oracle.com/javase/8/docs/api/java/lang/FunctionalInterface.html?is-external=true;
 title="class or interface in java.lang">@FunctionalInterface
-private static interface AsyncHBaseAdmin.AdminRpcCallRESP,REQ
+private static interface AsyncHBaseAdmin.AdminRpcCallRESP,REQ
 
 
 
@@ -159,7 +159,7 @@ private static interface 
 
 call
-voidcall(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.Interfacestub,
+voidcall(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.Interfacestub,
   HBaseRpcControllercontroller,
   REQreq,
   org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallbackRESPdone)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/efd0601e/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.Converter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.Converter.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.Converter.html
index a189fa9..732bfa3 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.Converter.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.Converter.html
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 http://docs.oracle.com/javase/8/docs/api/java/lang/FunctionalInterface.html?is-external=true;
 title="class or interface in java.lang">@FunctionalInterface
-private static interface AsyncHBaseAdmin.ConverterD,S
+private static interface AsyncHBaseAdmin.ConverterD,S
 
 
 
@@ -156,7 +156,7 @@ private static interface 
 
 convert
-Dconvert(Ssrc)
+Dconvert(Ssrc)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/efd0601e/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
index 824821f..5e2583d 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private class AsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer
+private class AsyncHBaseAdmin.CreateNamespaceProcedureBiConsumer
 extends 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/10601a30/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
index be839b7..72853dd 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
@@ -45,1639 +45,1784 @@
 037
 038import 
com.google.common.annotations.VisibleForTesting;
 039
-040import org.apache.commons.logging.Log;
-041import 
org.apache.commons.logging.LogFactory;
-042import 
org.apache.hadoop.hbase.HColumnDescriptor;
-043import 
org.apache.hadoop.hbase.HRegionInfo;
-044import 
org.apache.hadoop.hbase.HRegionLocation;
-045import 
org.apache.hadoop.hbase.HTableDescriptor;
-046import 
org.apache.hadoop.hbase.MetaTableAccessor;
-047import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-048import 
org.apache.hadoop.hbase.NotServingRegionException;
-049import 
org.apache.hadoop.hbase.RegionLocations;
-050import 
org.apache.hadoop.hbase.ServerName;
-051import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-052import 
org.apache.hadoop.hbase.HConstants;
-053import 
org.apache.hadoop.hbase.TableName;
-054import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-055import 
org.apache.hadoop.hbase.TableNotFoundException;
-056import 
org.apache.hadoop.hbase.UnknownRegionException;
-057import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-058import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-059import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-060import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-061import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-062import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-063import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-064import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-065import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-066import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-067import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-068import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-069import 
org.apache.hadoop.hbase.replication.ReplicationException;
-070import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-071import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-072import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/662ea7dc/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.TableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.TableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.TableProcedureBiConsumer.html
index ac4a9b3..be839b7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.TableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.TableProcedureBiConsumer.html
@@ -30,212 +30,212 @@
 022import java.io.IOException;
 023import java.util.ArrayList;
 024import java.util.Arrays;
-025import java.util.LinkedList;
-026import java.util.List;
-027import java.util.Optional;
-028import 
java.util.concurrent.CompletableFuture;
-029import java.util.concurrent.TimeUnit;
-030import 
java.util.concurrent.atomic.AtomicReference;
-031import java.util.function.BiConsumer;
-032import java.util.regex.Pattern;
-033
-034import 
com.google.common.annotations.VisibleForTesting;
-035import org.apache.commons.logging.Log;
-036import 
org.apache.commons.logging.LogFactory;
-037import 
org.apache.hadoop.hbase.HColumnDescriptor;
-038import 
org.apache.hadoop.hbase.HRegionInfo;
-039import 
org.apache.hadoop.hbase.HRegionLocation;
-040import 
org.apache.hadoop.hbase.HTableDescriptor;
-041import 
org.apache.hadoop.hbase.MetaTableAccessor;
-042import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-043import 
org.apache.hadoop.hbase.NotServingRegionException;
-044import 
org.apache.hadoop.hbase.RegionLocations;
-045import 
org.apache.hadoop.hbase.ServerName;
-046import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import 
org.apache.hadoop.hbase.TableName;
-049import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-050import 
org.apache.hadoop.hbase.TableNotFoundException;
-051import 
org.apache.hadoop.hbase.UnknownRegionException;
-052import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-053import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-054import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-055import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-057import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-058import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-059import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-060import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-061import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-085import 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-19 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6b4bae59/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileInfo.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileInfo.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileInfo.html
index 37e0513..d399861 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileInfo.html
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class StoreFileInfo
+public class StoreFileInfo
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Describe a StoreFile (hfile, reference, link)
 
@@ -363,9 +363,13 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 StoreFileReader
-open(org.apache.hadoop.fs.FileSystemfs,
+open(org.apache.hadoop.fs.FileSystemfs,
 CacheConfigcacheConf,
-booleancanUseDropBehind)
+booleancanUseDropBehind,
+longreadahead,
+booleanisPrimaryReplicaStoreFile,
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicIntegerrefCount,
+booleanshared)
 Open a Reader for the StoreFile
 
 
@@ -413,7 +417,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -422,7 +426,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 HFILE_NAME_REGEX
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String HFILE_NAME_REGEX
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String HFILE_NAME_REGEX
 A non-capture group, for hfiles, so that this can be 
embedded.
  HFiles are uuid ([0-9a-z]+). Bulk loaded hfiles has (_SeqId_[0-9]+_) has 
suffix.
  The mob del file has (_del) as suffix.
@@ -438,7 +442,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 HFILE_NAME_PATTERN
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern HFILE_NAME_PATTERN
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern HFILE_NAME_PATTERN
 Regex that will work for hfiles
 
 
@@ -448,7 +452,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 DELFILE_NAME_REGEX
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String DELFILE_NAME_REGEX
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String DELFILE_NAME_REGEX
 A non-capture group, for del files, so that this can be 
embedded.
  A del file has (_del) as suffix.
 
@@ -463,7 +467,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 DELFILE_NAME_PATTERN
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern DELFILE_NAME_PATTERN
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern DELFILE_NAME_PATTERN
 Regex that will work for del files
 
 
@@ -473,7 +477,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 REF_NAME_PATTERN
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern REF_NAME_PATTERN
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern REF_NAME_PATTERN
 Regex that will work for straight reference names 
(hfile.parentEncRegion)
  and hfilelink reference names 
(table=region-hfile.parentEncRegion)
  If reference, then the regex has more than just one group.
@@ -487,7 +491,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 conf
-privateorg.apache.hadoop.conf.Configuration conf
+privateorg.apache.hadoop.conf.Configuration conf
 
 
 
@@ -496,7 +500,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 fs
-private finalorg.apache.hadoop.fs.FileSystem fs
+private 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-18 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2fcc2ae0/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
index 1934610..a2eb716 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
@@ -1480,7 +1480,7 @@
 1472  startServiceThreads();
 1473  startHeapMemoryManager();
 1474  // Call it after starting 
HeapMemoryManager.
-1475  
initializeMemStoreChunkCreator();
+1475  initializeMemStoreChunkPool();
 1476  LOG.info("Serving as " + 
this.serverName +
 1477", RpcServer on " + 
rpcServices.isa +
 1478", sessionid=0x" +
@@ -1500,7 +1500,7 @@
 1492}
 1493  }
 1494
-1495  protected void 
initializeMemStoreChunkCreator() {
+1495  private void 
initializeMemStoreChunkPool() {
 1496if (MemStoreLAB.isEnabled(conf)) {
 1497  // MSLAB is enabled. So initialize 
MemStoreChunkPool
 1498  // By this time, the 
MemstoreFlusher is already initialized. We can get the global limits from
@@ -1514,2158 +1514,2162 @@
 1506  float initialCountPercentage = 
conf.getFloat(MemStoreLAB.CHUNK_POOL_INITIALSIZE_KEY,
 1507  
MemStoreLAB.POOL_INITIAL_SIZE_DEFAULT);
 1508  int chunkSize = 
conf.getInt(MemStoreLAB.CHUNK_SIZE_KEY, MemStoreLAB.CHUNK_SIZE_DEFAULT);
-1509  // init the chunkCreator
-1510  ChunkCreator chunkCreator =
-1511  
ChunkCreator.initialize(chunkSize, offheap, globalMemStoreSize, 
poolSizePercentage,
-1512initialCountPercentage, 
this.hMemManager);
-1513}
-1514  }
-1515
-1516  private void startHeapMemoryManager() 
{
-1517this.hMemManager = 
HeapMemoryManager.create(this.conf, this.cacheFlusher, this,
-1518this.regionServerAccounting);
-1519if (this.hMemManager != null) {
-1520  
this.hMemManager.start(getChoreService());
-1521}
-1522  }
-1523
-1524  private void createMyEphemeralNode() 
throws KeeperException, IOException {
-1525RegionServerInfo.Builder rsInfo = 
RegionServerInfo.newBuilder();
-1526rsInfo.setInfoPort(infoServer != 
null ? infoServer.getPort() : -1);
-1527
rsInfo.setVersionInfo(ProtobufUtil.getVersionInfo());
-1528byte[] data = 
ProtobufUtil.prependPBMagic(rsInfo.build().toByteArray());
-1529
ZKUtil.createEphemeralNodeAndWatch(this.zooKeeper,
-1530  getMyEphemeralNodePath(), data);
-1531  }
-1532
-1533  private void deleteMyEphemeralNode() 
throws KeeperException {
-1534ZKUtil.deleteNode(this.zooKeeper, 
getMyEphemeralNodePath());
-1535  }
-1536
-1537  @Override
-1538  public RegionServerAccounting 
getRegionServerAccounting() {
-1539return regionServerAccounting;
-1540  }
-1541
-1542  /*
-1543   * @param r Region to get RegionLoad 
for.
-1544   * @param regionLoadBldr the 
RegionLoad.Builder, can be null
-1545   * @param regionSpecifier the 
RegionSpecifier.Builder, can be null
-1546   * @return RegionLoad instance.
-1547   *
-1548   * @throws IOException
-1549   */
-1550  RegionLoad createRegionLoad(final 
Region r, RegionLoad.Builder regionLoadBldr,
-1551  RegionSpecifier.Builder 
regionSpecifier) throws IOException {
-1552byte[] name = 
r.getRegionInfo().getRegionName();
-1553int stores = 0;
-1554int storefiles = 0;
-1555int storeUncompressedSizeMB = 0;
-1556int storefileSizeMB = 0;
-1557int memstoreSizeMB = (int) 
(r.getMemstoreSize() / 1024 / 1024);
-1558int storefileIndexSizeMB = 0;
-1559int rootIndexSizeKB = 0;
-1560int totalStaticIndexSizeKB = 0;
-1561int totalStaticBloomSizeKB = 0;
-1562long totalCompactingKVs = 0;
-1563long currentCompactedKVs = 0;
-1564ListStore storeList = 
r.getStores();
-1565stores += storeList.size();
-1566for (Store store : storeList) {
-1567  storefiles += 
store.getStorefilesCount();
-1568  storeUncompressedSizeMB += (int) 
(store.getStoreSizeUncompressed() / 1024 / 1024);
-1569  storefileSizeMB += (int) 
(store.getStorefilesSize() / 1024 / 1024);
-1570  storefileIndexSizeMB += (int) 
(store.getStorefilesIndexSize() / 1024 / 1024);
-1571  CompactionProgress progress = 
store.getCompactionProgress();
-1572  if (progress != null) {
-1573totalCompactingKVs += 
progress.totalCompactingKVs;
-1574currentCompactedKVs += 
progress.currentCompactedKVs;
-1575  }
-1576  rootIndexSizeKB += (int) 
(store.getStorefilesIndexSize() / 1024);
-1577  totalStaticIndexSizeKB += (int) 
(store.getTotalStaticIndexSize() / 1024);
-1578  totalStaticBloomSizeKB += (int) 
(store.getTotalStaticBloomSize() / 1024);
-1579}
-1580
-1581float dataLocality =
-1582

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-17 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e4348f53/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
index 31517f6..ac4a9b3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
@@ -64,1374 +64,1421 @@
 056import 
org.apache.hadoop.hbase.client.Scan.ReadType;
 057import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
 058import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-059import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-060import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-061import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-062import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-074import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-075import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-076import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-077import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-078import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest;
-099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest;
-101import 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-04-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e57d1b63/apidocs/org/apache/hadoop/hbase/classification/package-use.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/classification/package-use.html 
b/apidocs/org/apache/hadoop/hbase/classification/package-use.html
deleted file mode 100644
index 2982638..000
--- a/apidocs/org/apache/hadoop/hbase/classification/package-use.html
+++ /dev/null
@@ -1,125 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-
-Uses of Package org.apache.hadoop.hbase.classification (Apache HBase 
2.0.0-SNAPSHOT API)
-
-
-
-
-
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-
-
-
-Uses of Packageorg.apache.hadoop.hbase.classification
-
-No usage of 
org.apache.hadoop.hbase.classification
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-
-
-Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
-
-

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e57d1b63/apidocs/org/apache/hadoop/hbase/classification/tools/package-frame.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/classification/tools/package-frame.html 
b/apidocs/org/apache/hadoop/hbase/classification/tools/package-frame.html
deleted file mode 100644
index 4e57c5c..000
--- a/apidocs/org/apache/hadoop/hbase/classification/tools/package-frame.html
+++ /dev/null
@@ -1,14 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-
-org.apache.hadoop.hbase.classification.tools (Apache HBase 
2.0.0-SNAPSHOT API)
-
-
-
-
-org.apache.hadoop.hbase.classification.tools
-
-

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e57d1b63/apidocs/org/apache/hadoop/hbase/classification/tools/package-summary.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/classification/tools/package-summary.html 
b/apidocs/org/apache/hadoop/hbase/classification/tools/package-summary.html
deleted file mode 100644
index a97fbab..000
--- a/apidocs/org/apache/hadoop/hbase/classification/tools/package-summary.html
+++ /dev/null
@@ -1,124 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-
-org.apache.hadoop.hbase.classification.tools (Apache HBase 
2.0.0-SNAPSHOT API)
-
-
-
-
-
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-PrevPackage
-NextPackage
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-
-
-
-Packageorg.apache.hadoop.hbase.classification.tools
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-PrevPackage
-NextPackage
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-
-
-Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
-
-

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e57d1b63/apidocs/org/apache/hadoop/hbase/classification/tools/package-tree.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/classification/tools/package-tree.html 
b/apidocs/org/apache/hadoop/hbase/classification/tools/package-tree.html
deleted file mode 100644