[12/51] [partial] hbase-site git commit: Published site at 0b28155d274910b4e667b949d51f78809a1eff0b.

2018-06-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e11cf2cb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.ByteStringUncompressor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.ByteStringUncompressor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.ByteStringUncompressor.html
index 83c17c0..9df0225 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.ByteStringUncompressor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.ByteStringUncompressor.html
@@ -54,323 +54,362 @@
 046import org.apache.hadoop.io.IOUtils;
 047
 048import 
org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
-049
+049import 
org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
 050
-051/**
-052 * Compression in this class is lifted 
off Compressor/KeyValueCompression.
-053 * This is a pure coincidence... they are 
independent and don't have to be compatible.
-054 *
-055 * This codec is used at server side for 
writing cells to WAL as well as for sending edits
-056 * as part of the distributed splitting 
process.
-057 */
-058@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
-059  HBaseInterfaceAudience.PHOENIX, 
HBaseInterfaceAudience.CONFIG})
-060public class WALCellCodec implements 
Codec {
-061  /** Configuration key for the class to 
use when encoding cells in the WAL */
-062  public static final String 
WAL_CELL_CODEC_CLASS_KEY = "hbase.regionserver.wal.codec";
-063
-064  protected final CompressionContext 
compression;
-065  protected final ByteStringUncompressor 
statelessUncompressor = new ByteStringUncompressor() {
-066@Override
-067public byte[] uncompress(ByteString 
data, Dictionary dict) throws IOException {
-068  return 
WALCellCodec.uncompressByteString(data, dict);
-069}
-070  };
-071
-072  /**
-073   * bAll subclasses must 
implement a no argument constructor/b
-074   */
-075  public WALCellCodec() {
-076this.compression = null;
-077  }
-078
-079  /**
-080   * Default constructor - ball 
subclasses must implement a constructor with this signature /b
-081   * if they are to be dynamically loaded 
from the {@link Configuration}.
-082   * @param conf configuration to 
configure ttthis/tt
-083   * @param compression compression the 
codec should support, can be ttnull/tt to indicate no
-084   *  compression
-085   */
-086  public WALCellCodec(Configuration conf, 
CompressionContext compression) {
-087this.compression = compression;
-088  }
-089
-090  public static String 
getWALCellCodecClass(Configuration conf) {
-091return 
conf.get(WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());
-092  }
-093
-094  /**
-095   * Create and setup a {@link 
WALCellCodec} from the {@code cellCodecClsName} and
-096   * CompressionContext, if {@code 
cellCodecClsName} is specified.
-097   * Otherwise Cell Codec classname is 
read from {@link Configuration}.
-098   * Fully prepares the codec for use.
-099   * @param conf {@link Configuration} to 
read for the user-specified codec. If none is specified,
-100   *  uses a {@link 
WALCellCodec}.
-101   * @param cellCodecClsName name of 
codec
-102   * @param compression compression the 
codec should use
-103   * @return a {@link WALCellCodec} ready 
for use.
-104   * @throws 
UnsupportedOperationException if the codec cannot be instantiated
-105   */
-106
-107  public static WALCellCodec 
create(Configuration conf, String cellCodecClsName,
-108  CompressionContext compression) 
throws UnsupportedOperationException {
-109if (cellCodecClsName == null) {
-110  cellCodecClsName = 
getWALCellCodecClass(conf);
-111}
-112return 
ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
-113{ Configuration.class, 
CompressionContext.class }, new Object[] { conf, compression });
-114  }
-115
-116  /**
-117   * Create and setup a {@link 
WALCellCodec} from the
-118   * CompressionContext.
-119   * Cell Codec classname is read from 
{@link Configuration}.
-120   * Fully prepares the codec for use.
-121   * @param conf {@link Configuration} to 
read for the user-specified codec. If none is specified,
-122   *  uses a {@link 
WALCellCodec}.
-123   * @param compression compression the 
codec should use
-124   * @return a {@link WALCellCodec} ready 
for use.
-125   * @throws 
UnsupportedOperationException if the codec cannot be instantiated
-126   */
-127  public static WALCellCodec 
create(Configuration conf,
-128  CompressionContext compression) 
throws UnsupportedOperationException {
-129String cellCodecClsName = 
getWALCellCodecClass(conf);
-130return 
ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
-131{ Configuration.class, 
CompressionContext.class }, new Object[] { conf, 

[12/51] [partial] hbase-site git commit: Published site at 7d3750bd9fc9747623549c242cc4171e224b3eaf.

2018-06-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3469cbc0/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStates.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStates.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStates.html
index 5420d82..6ea3672 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStates.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/RegionStates.html
@@ -316,687 +316,728 @@
 308}
 309  }
 310
-311  public enum ServerState { ONLINE, 
SPLITTING, OFFLINE }
-312  public static class ServerStateNode 
implements ComparableServerStateNode {
-313private final ServerReportEvent 
reportEvent;
-314
-315private final 
SetRegionStateNode regions;
-316private final ServerName 
serverName;
-317
-318private volatile ServerState state = 
ServerState.ONLINE;
-319private volatile int versionNumber = 
0;
-320
-321public ServerStateNode(final 
ServerName serverName) {
-322  this.serverName = serverName;
-323  this.regions = 
ConcurrentHashMap.newKeySet();
-324  this.reportEvent = new 
ServerReportEvent(serverName);
-325}
-326
-327public ServerName getServerName() {
-328  return serverName;
-329}
+311  /**
+312   * Server State.
+313   */
+314  public enum ServerState {
+315/**
+316 * Initial state. Available.
+317 */
+318ONLINE,
+319
+320/**
+321 * Server expired/crashed. Currently 
undergoing WAL splitting.
+322 */
+323SPLITTING,
+324
+325/**
+326 * WAL splitting done.
+327 */
+328OFFLINE
+329  }
 330
-331public ServerState getState() {
-332  return state;
-333}
-334
-335public int getVersionNumber() {
-336  return versionNumber;
-337}
-338
-339public ProcedureEvent? 
getReportEvent() {
-340  return reportEvent;
-341}
+331  /**
+332   * State of Server; list of hosted 
regions, etc.
+333   */
+334  public static class ServerStateNode 
implements ComparableServerStateNode {
+335private final ServerReportEvent 
reportEvent;
+336
+337private final 
SetRegionStateNode regions;
+338private final ServerName 
serverName;
+339
+340private volatile ServerState state = 
ServerState.ONLINE;
+341private volatile int versionNumber = 
0;
 342
-343public boolean isInState(final 
ServerState... expected) {
-344  boolean expectedState = false;
-345  if (expected != null) {
-346for (int i = 0; i  
expected.length; ++i) {
-347  expectedState |= (state == 
expected[i]);
-348}
-349  }
-350  return expectedState;
+343public ServerStateNode(final 
ServerName serverName) {
+344  this.serverName = serverName;
+345  this.regions = 
ConcurrentHashMap.newKeySet();
+346  this.reportEvent = new 
ServerReportEvent(serverName);
+347}
+348
+349public ServerName getServerName() {
+350  return serverName;
 351}
 352
-353public void setState(final 
ServerState state) {
-354  this.state = state;
+353public ServerState getState() {
+354  return state;
 355}
 356
-357public void setVersionNumber(final 
int versionNumber) {
-358  this.versionNumber = 
versionNumber;
+357public int getVersionNumber() {
+358  return versionNumber;
 359}
 360
-361public SetRegionStateNode 
getRegions() {
-362  return regions;
+361public ProcedureEvent? 
getReportEvent() {
+362  return reportEvent;
 363}
 364
-365public int getRegionCount() {
-366  return regions.size();
+365public boolean isOffline() {
+366  return 
this.state.equals(ServerState.OFFLINE);
 367}
 368
-369public ArrayListRegionInfo 
getRegionInfoList() {
-370  ArrayListRegionInfo hris = 
new ArrayListRegionInfo(regions.size());
-371  for (RegionStateNode region: 
regions) {
-372
hris.add(region.getRegionInfo());
-373  }
-374  return hris;
-375}
-376
-377public void addRegion(final 
RegionStateNode regionNode) {
-378  this.regions.add(regionNode);
-379}
-380
-381public void removeRegion(final 
RegionStateNode regionNode) {
-382  this.regions.remove(regionNode);
-383}
-384
-385@Override
-386public int compareTo(final 
ServerStateNode other) {
-387  return 
getServerName().compareTo(other.getServerName());
-388}
-389
-390@Override
-391public int hashCode() {
-392  return 
getServerName().hashCode();
+369public boolean isInState(final 
ServerState... expected) {
+370  boolean expectedState = false;
+371  if (expected != null) {
+372for (int i = 0; i  
expected.length; ++i) {
+373  expectedState |= (state == 
expected[i]);
+374}
+375  }
+376  return expectedState;
+377}
+378
+379public void setState(final 
ServerState state) {
+380  

[12/51] [partial] hbase-site git commit: Published site at 997747076d8ec0b4346d7cb99c4b0667a7c14905.

2018-05-30 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4df09ed9/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.html
index 168462e..67da347 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.html
@@ -213,330 +213,337 @@
 205if 
(master.getMasterCoprocessorHost() != null) {
 206  
master.getMasterCoprocessorHost().preMoveServers(hostPorts, 
request.getTargetGroup());
 207}
-208
groupAdminServer.moveServers(hostPorts, request.getTargetGroup());
-209if 
(master.getMasterCoprocessorHost() != null) {
-210  
master.getMasterCoprocessorHost().postMoveServers(hostPorts, 
request.getTargetGroup());
-211}
-212  } catch (IOException e) {
-213
CoprocessorRpcUtils.setControllerException(controller, e);
-214  }
-215  done.run(builder.build());
-216}
-217
-218@Override
-219public void moveTables(RpcController 
controller, MoveTablesRequest request,
-220
RpcCallbackMoveTablesResponse done) {
-221  MoveTablesResponse.Builder builder 
= MoveTablesResponse.newBuilder();
-222  SetTableName tables = new 
HashSet(request.getTableNameList().size());
-223  for (HBaseProtos.TableName 
tableName : request.getTableNameList()) {
-224
tables.add(ProtobufUtil.toTableName(tableName));
-225  }
-226  
LOG.info(master.getClientIdAuditPrefix() + " move tables " + tables +" to 
rsgroup "
-227  + request.getTargetGroup());
-228  try {
-229if 
(master.getMasterCoprocessorHost() != null) {
-230  
master.getMasterCoprocessorHost().preMoveTables(tables, 
request.getTargetGroup());
-231}
-232
groupAdminServer.moveTables(tables, request.getTargetGroup());
-233if 
(master.getMasterCoprocessorHost() != null) {
-234  
master.getMasterCoprocessorHost().postMoveTables(tables, 
request.getTargetGroup());
-235}
-236  } catch (IOException e) {
-237
CoprocessorRpcUtils.setControllerException(controller, e);
-238  }
-239  done.run(builder.build());
-240}
-241
-242@Override
-243public void addRSGroup(RpcController 
controller, AddRSGroupRequest request,
-244
RpcCallbackAddRSGroupResponse done) {
-245  AddRSGroupResponse.Builder builder 
= AddRSGroupResponse.newBuilder();
-246  
LOG.info(master.getClientIdAuditPrefix() + " add rsgroup " + 
request.getRSGroupName());
-247  try {
-248if 
(master.getMasterCoprocessorHost() != null) {
-249  
master.getMasterCoprocessorHost().preAddRSGroup(request.getRSGroupName());
-250}
-251
groupAdminServer.addRSGroup(request.getRSGroupName());
-252if 
(master.getMasterCoprocessorHost() != null) {
-253  
master.getMasterCoprocessorHost().postAddRSGroup(request.getRSGroupName());
-254}
-255  } catch (IOException e) {
-256
CoprocessorRpcUtils.setControllerException(controller, e);
-257  }
-258  done.run(builder.build());
-259}
-260
-261@Override
-262public void 
removeRSGroup(RpcController controller,
-263RemoveRSGroupRequest request, 
RpcCallbackRemoveRSGroupResponse done) {
-264  RemoveRSGroupResponse.Builder 
builder =
-265  
RemoveRSGroupResponse.newBuilder();
-266  
LOG.info(master.getClientIdAuditPrefix() + " remove rsgroup " + 
request.getRSGroupName());
-267  try {
-268if 
(master.getMasterCoprocessorHost() != null) {
-269  
master.getMasterCoprocessorHost().preRemoveRSGroup(request.getRSGroupName());
-270}
-271
groupAdminServer.removeRSGroup(request.getRSGroupName());
-272if 
(master.getMasterCoprocessorHost() != null) {
-273  
master.getMasterCoprocessorHost().postRemoveRSGroup(request.getRSGroupName());
-274}
-275  } catch (IOException e) {
-276
CoprocessorRpcUtils.setControllerException(controller, e);
-277  }
-278  done.run(builder.build());
-279}
-280
-281@Override
-282public void 
balanceRSGroup(RpcController controller,
-283BalanceRSGroupRequest request, 
RpcCallbackBalanceRSGroupResponse done) {
-284  BalanceRSGroupResponse.Builder 
builder = BalanceRSGroupResponse.newBuilder();
-285  
LOG.info(master.getClientIdAuditPrefix() + " balance rsgroup, group=" +
-286  
request.getRSGroupName());
-287  try {
-288if 
(master.getMasterCoprocessorHost() != null) {
-289  
master.getMasterCoprocessorHost().preBalanceRSGroup(request.getRSGroupName());
-290}
-291boolean balancerRan = 
groupAdminServer.balanceRSGroup(request.getRSGroupName());

[12/51] [partial] hbase-site git commit: Published site at f3d1c021de2264301f68eadb9ef126ff83d7ef53.

2018-05-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/883dde2f/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html
index 54b1f96..ed95cbf 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html
@@ -31,922 +31,906 @@
 023import java.io.ByteArrayInputStream;
 024import java.io.IOException;
 025import java.util.ArrayList;
-026import java.util.Arrays;
-027import java.util.Collections;
-028import java.util.HashMap;
-029import java.util.HashSet;
-030import java.util.LinkedList;
-031import java.util.List;
-032import java.util.Map;
-033import java.util.NavigableSet;
-034import java.util.Set;
-035import java.util.SortedSet;
-036import java.util.TreeSet;
-037import 
java.util.concurrent.atomic.AtomicBoolean;
-038
-039import 
org.apache.hadoop.conf.Configuration;
-040import org.apache.hadoop.hbase.Cell;
-041import 
org.apache.hadoop.hbase.CellUtil;
-042import 
org.apache.hadoop.hbase.Coprocessor;
-043import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-044import 
org.apache.hadoop.hbase.HColumnDescriptor;
-045import 
org.apache.hadoop.hbase.HConstants;
-046import 
org.apache.hadoop.hbase.HTableDescriptor;
-047import 
org.apache.hadoop.hbase.MetaTableAccessor;
-048import 
org.apache.hadoop.hbase.MetaTableAccessor.DefaultVisitorBase;
-049import 
org.apache.hadoop.hbase.ServerName;
-050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.hadoop.hbase.client.ClusterConnection;
-052import 
org.apache.hadoop.hbase.client.Delete;
-053import 
org.apache.hadoop.hbase.client.Get;
-054import 
org.apache.hadoop.hbase.client.Mutation;
-055import 
org.apache.hadoop.hbase.client.Put;
-056import 
org.apache.hadoop.hbase.client.RegionInfo;
-057import 
org.apache.hadoop.hbase.client.Result;
-058import 
org.apache.hadoop.hbase.client.Scan;
-059import 
org.apache.hadoop.hbase.client.Table;
-060import 
org.apache.hadoop.hbase.client.TableState;
-061import 
org.apache.hadoop.hbase.constraint.ConstraintException;
-062import 
org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint;
-063import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-064import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-065import 
org.apache.hadoop.hbase.master.MasterServices;
-066import 
org.apache.hadoop.hbase.master.ServerListener;
-067import 
org.apache.hadoop.hbase.master.TableStateManager;
-068import 
org.apache.hadoop.hbase.master.assignment.RegionStates.RegionStateNode;
-069import 
org.apache.hadoop.hbase.net.Address;
-070import 
org.apache.hadoop.hbase.procedure2.Procedure;
-071import 
org.apache.hadoop.hbase.protobuf.ProtobufMagic;
-072import 
org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-073import 
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos;
-074import 
org.apache.hadoop.hbase.protobuf.generated.RSGroupProtos;
-075import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-076import 
org.apache.hadoop.hbase.quotas.QuotaUtil;
-077import 
org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
-078import 
org.apache.hadoop.hbase.security.access.AccessControlLists;
-079import 
org.apache.hadoop.hbase.util.Bytes;
-080import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-081import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-082import 
org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-083import 
org.apache.yetus.audience.InterfaceAudience;
-084import 
org.apache.zookeeper.KeeperException;
-085import org.slf4j.Logger;
-086import org.slf4j.LoggerFactory;
-087
-088import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-089import 
org.apache.hbase.thirdparty.com.google.common.collect.Maps;
-090import 
org.apache.hbase.thirdparty.com.google.common.collect.Sets;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-093
-094/**
-095 * This is an implementation of {@link 
RSGroupInfoManager} which makes
-096 * use of an HBase table as the 
persistence store for the group information.
-097 * It also makes use of zookeeper to 
store group information needed
-098 * for bootstrapping during offline 
mode.
-099 *
-100 * h2Concurrency/h2
-101 * RSGroup state is kept locally in Maps. 
There is a rsgroup name to cached
-102 * RSGroupInfo Map at {@link #rsGroupMap} 
and a Map of tables to the name of the
-103 * rsgroup they belong too (in {@link 
#tableMap}). These Maps are persisted to the
-104 * hbase:rsgroup table (and cached in zk) 
on each modification.
-105 *
-106 * pMutations on state are 

[12/51] [partial] hbase-site git commit: Published site at cf529f18a9959589fa635f78df4840472526ea2c.

2018-05-17 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7bcc960d/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncRandomWriteTest.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncRandomWriteTest.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncRandomWriteTest.html
index 3f8844b..cdb9398 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncRandomWriteTest.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.AsyncRandomWriteTest.html
@@ -140,2712 +140,2713 @@
 132public class PerformanceEvaluation 
extends Configured implements Tool {
 133  static final String RANDOM_SEEK_SCAN = 
"randomSeekScan";
 134  static final String RANDOM_READ = 
"randomRead";
-135  private static final Logger LOG = 
LoggerFactory.getLogger(PerformanceEvaluation.class.getName());
-136  private static final ObjectMapper 
MAPPER = new ObjectMapper();
-137  static {
-138
MAPPER.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true);
-139  }
-140
-141  public static final String TABLE_NAME = 
"TestTable";
-142  public static final String 
FAMILY_NAME_BASE = "info";
-143  public static final byte[] FAMILY_ZERO 
= Bytes.toBytes("info0");
-144  public static final byte[] COLUMN_ZERO 
= Bytes.toBytes("" + 0);
-145  public static final int 
DEFAULT_VALUE_LENGTH = 1000;
-146  public static final int ROW_LENGTH = 
26;
-147
-148  private static final int ONE_GB = 1024 
* 1024 * 1000;
-149  private static final int 
DEFAULT_ROWS_PER_GB = ONE_GB / DEFAULT_VALUE_LENGTH;
-150  // TODO : should we make this 
configurable
-151  private static final int TAG_LENGTH = 
256;
-152  private static final DecimalFormat FMT 
= new DecimalFormat("0.##");
-153  private static final MathContext CXT = 
MathContext.DECIMAL64;
-154  private static final BigDecimal 
MS_PER_SEC = BigDecimal.valueOf(1000);
-155  private static final BigDecimal 
BYTES_PER_MB = BigDecimal.valueOf(1024 * 1024);
-156  private static final TestOptions 
DEFAULT_OPTS = new TestOptions();
-157
-158  private static MapString, 
CmdDescriptor COMMANDS = new TreeMap();
-159  private static final Path PERF_EVAL_DIR 
= new Path("performance_evaluation");
-160
-161  static {
-162
addCommandDescriptor(AsyncRandomReadTest.class, "asyncRandomRead",
-163"Run async random read test");
-164
addCommandDescriptor(AsyncRandomWriteTest.class, "asyncRandomWrite",
-165"Run async random write test");
-166
addCommandDescriptor(AsyncSequentialReadTest.class, "asyncSequentialRead",
-167"Run async sequential read 
test");
-168
addCommandDescriptor(AsyncSequentialWriteTest.class, "asyncSequentialWrite",
-169"Run async sequential write 
test");
-170
addCommandDescriptor(AsyncScanTest.class, "asyncScan",
-171"Run async scan test (read every 
row)");
-172
addCommandDescriptor(RandomReadTest.class, RANDOM_READ,
-173  "Run random read test");
-174
addCommandDescriptor(RandomSeekScanTest.class, RANDOM_SEEK_SCAN,
-175  "Run random seek and scan 100 
test");
-176
addCommandDescriptor(RandomScanWithRange10Test.class, "scanRange10",
-177  "Run random seek scan with both 
start and stop row (max 10 rows)");
-178
addCommandDescriptor(RandomScanWithRange100Test.class, "scanRange100",
-179  "Run random seek scan with both 
start and stop row (max 100 rows)");
-180
addCommandDescriptor(RandomScanWithRange1000Test.class, "scanRange1000",
-181  "Run random seek scan with both 
start and stop row (max 1000 rows)");
-182
addCommandDescriptor(RandomScanWithRange1Test.class, "scanRange1",
-183  "Run random seek scan with both 
start and stop row (max 1 rows)");
-184
addCommandDescriptor(RandomWriteTest.class, "randomWrite",
-185  "Run random write test");
-186
addCommandDescriptor(SequentialReadTest.class, "sequentialRead",
-187  "Run sequential read test");
-188
addCommandDescriptor(SequentialWriteTest.class, "sequentialWrite",
-189  "Run sequential write test");
-190addCommandDescriptor(ScanTest.class, 
"scan",
-191  "Run scan test (read every 
row)");
-192
addCommandDescriptor(FilteredScanTest.class, "filterScan",
-193  "Run scan test using a filter to 
find a specific row based on it's value " +
-194  "(make sure to use --rows=20)");
-195
addCommandDescriptor(IncrementTest.class, "increment",
-196  "Increment on each row; clients 
overlap on keyspace so some concurrent operations");
-197
addCommandDescriptor(AppendTest.class, "append",
-198  "Append on each row; clients 
overlap on keyspace so some concurrent operations");
-199
addCommandDescriptor(CheckAndMutateTest.class, "checkAndMutate",
-200  "CheckAndMutate on each row; 
clients overlap on keyspace so some concurrent operations");
-201

[12/51] [partial] hbase-site git commit: Published site at 021f66d11d2cbb7308308093e29e69d6e7661ee9.

2018-05-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/92a26cfb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
index cca21a9..2f8a48b 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
@@ -2280,7 +2280,7 @@
 2272  this.cacheFlushCount = 
snapshot.getCellsCount();
 2273  this.cacheFlushSize = 
snapshot.getDataSize();
 2274  committedFiles = new 
ArrayList(1);
-2275  return new 
MemStoreSize(snapshot.getMemStoreSize());
+2275  return 
snapshot.getMemStoreSize();
 2276}
 2277
 2278@Override

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/92a26cfb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.html
index cca21a9..2f8a48b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.html
@@ -2280,7 +2280,7 @@
 2272  this.cacheFlushCount = 
snapshot.getCellsCount();
 2273  this.cacheFlushSize = 
snapshot.getDataSize();
 2274  committedFiles = new 
ArrayList(1);
-2275  return new 
MemStoreSize(snapshot.getMemStoreSize());
+2275  return 
snapshot.getMemStoreSize();
 2276}
 2277
 2278@Override

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/92a26cfb/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStore.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStore.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStore.html
index 541a093..651511e 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStore.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStore.html
@@ -58,96 +58,95 @@
 050  void clearSnapshot(long id) throws 
UnexpectedStateException;
 051
 052  /**
-053   * On flush, how much memory we will 
clear.
-054   * Flush will first clear out the data 
in snapshot if any (It will take a second flush
-055   * invocation to clear the current Cell 
set). If snapshot is empty, current
-056   * Cell set will be flushed.
-057   *
-058   * @return size of data that is going 
to be flushed
-059   */
-060  MemStoreSize getFlushableSize();
-061
-062  /**
-063   * Return the size of the snapshot(s) 
if any
-064   * @return size of the memstore 
snapshot
-065   */
-066  MemStoreSize getSnapshotSize();
-067
-068  /**
-069   * Write an update
-070   * @param cell
-071   * @param memstoreSizing The delta in 
memstore size will be passed back via this.
-072   *This will include both data 
size and heap overhead delta.
-073   */
-074  void add(final Cell cell, 
MemStoreSizing memstoreSizing);
-075
-076  /**
-077   * Write the updates
-078   * @param cells
-079   * @param memstoreSizing The delta in 
memstore size will be passed back via this.
-080   *This will include both data 
size and heap overhead delta.
-081   */
-082  void add(IterableCell cells, 
MemStoreSizing memstoreSizing);
-083
-084  /**
-085   * @return Oldest timestamp of all the 
Cells in the MemStore
-086   */
-087  long timeOfOldestEdit();
-088
-089  /**
-090   * Update or insert the specified 
cells.
-091   * p
-092   * For each Cell, insert into MemStore. 
This will atomically upsert the value for that
-093   * row/family/qualifier. If a Cell did 
already exist, it will then be removed.
-094   * p
-095   * Currently the memstoreTS is kept at 
0 so as each insert happens, it will be immediately
-096   * visible. May want to change this so 
it is atomic across all KeyValues.
-097   * p
-098   * This is called under row lock, so 
Get operations will still see updates atomically. Scans will
-099   * only see each KeyValue update as 
atomic.
-100   * @param cells
-101   * @param readpoint readpoint below 
which we can safely remove duplicate Cells.
-102   * @param memstoreSizing The delta in 
memstore size will be passed back via this.
-103   *This will include both data 
size and heap overhead delta.
-104   */
-105  void upsert(IterableCell cells, 
long readpoint, MemStoreSizing memstoreSizing);
-106
-107  /**
-108   * @return scanner over the memstore. 
This might include scanner over the snapshot when one is
-109   * present.
-110   */
-111  ListKeyValueScanner 
getScanners(long readPt) throws IOException;
-112
-113  /**
-114   * @return 

[12/51] [partial] hbase-site git commit: Published site at acd0d1e446c164d9c54bfb461b2d449c8d717c07.

2018-05-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f2065178/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomScanWithRange1000Test.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomScanWithRange1000Test.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomScanWithRange1000Test.html
index 2510283..418c60c 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomScanWithRange1000Test.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/PerformanceEvaluation.RandomScanWithRange1000Test.html
@@ -77,77 +77,77 @@
 069import 
org.apache.hadoop.hbase.client.RowMutations;
 070import 
org.apache.hadoop.hbase.client.Scan;
 071import 
org.apache.hadoop.hbase.client.Table;
-072import 
org.apache.hadoop.hbase.filter.BinaryComparator;
-073import 
org.apache.hadoop.hbase.filter.Filter;
-074import 
org.apache.hadoop.hbase.filter.FilterAllFilter;
-075import 
org.apache.hadoop.hbase.filter.FilterList;
-076import 
org.apache.hadoop.hbase.filter.PageFilter;
-077import 
org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
-078import 
org.apache.hadoop.hbase.filter.WhileMatchFilter;
-079import 
org.apache.hadoop.hbase.io.compress.Compression;
-080import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-081import 
org.apache.hadoop.hbase.io.hfile.RandomDistribution;
-082import 
org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
-083import 
org.apache.hadoop.hbase.regionserver.BloomType;
-084import 
org.apache.hadoop.hbase.regionserver.CompactingMemStore;
-085import 
org.apache.hadoop.hbase.trace.HBaseHTraceConfiguration;
-086import 
org.apache.hadoop.hbase.trace.SpanReceiverHost;
-087import 
org.apache.hadoop.hbase.trace.TraceUtil;
-088import 
org.apache.hadoop.hbase.util.ByteArrayHashKey;
-089import 
org.apache.hadoop.hbase.util.Bytes;
-090import 
org.apache.hadoop.hbase.util.Hash;
-091import 
org.apache.hadoop.hbase.util.MurmurHash;
-092import 
org.apache.hadoop.hbase.util.Pair;
-093import 
org.apache.hadoop.hbase.util.YammerHistogramUtils;
-094import 
org.apache.hadoop.io.LongWritable;
-095import org.apache.hadoop.io.Text;
-096import org.apache.hadoop.mapreduce.Job;
-097import 
org.apache.hadoop.mapreduce.Mapper;
-098import 
org.apache.hadoop.mapreduce.lib.input.NLineInputFormat;
-099import 
org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-100import 
org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer;
-101import org.apache.hadoop.util.Tool;
-102import 
org.apache.hadoop.util.ToolRunner;
-103import 
org.apache.htrace.core.ProbabilitySampler;
-104import org.apache.htrace.core.Sampler;
-105import 
org.apache.htrace.core.TraceScope;
-106import 
org.apache.yetus.audience.InterfaceAudience;
-107import org.slf4j.Logger;
-108import org.slf4j.LoggerFactory;
-109import 
org.apache.hbase.thirdparty.com.google.common.base.MoreObjects;
-110import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
-111
-112/**
-113 * Script used evaluating HBase 
performance and scalability.  Runs a HBase
-114 * client that steps through one of a set 
of hardcoded tests or 'experiments'
-115 * (e.g. a random reads test, a random 
writes test, etc.). Pass on the
-116 * command-line which test to run and how 
many clients are participating in
-117 * this experiment. Run {@code 
PerformanceEvaluation --help} to obtain usage.
-118 *
-119 * pThis class sets up and runs 
the evaluation programs described in
-120 * Section 7, iPerformance 
Evaluation/i, of the a
-121 * 
href="http://labs.google.com/papers/bigtable.html"Bigtable/a;
-122 * paper, pages 8-10.
-123 *
-124 * pBy default, runs as a 
mapreduce job where each mapper runs a single test
-125 * client. Can also run as a 
non-mapreduce, multithreaded application by
-126 * specifying {@code --nomapred}. Each 
client does about 1GB of data, unless
-127 * specified otherwise.
-128 */
-129@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
-130public class PerformanceEvaluation 
extends Configured implements Tool {
-131  static final String RANDOM_SEEK_SCAN = 
"randomSeekScan";
-132  static final String RANDOM_READ = 
"randomRead";
-133  private static final Logger LOG = 
LoggerFactory.getLogger(PerformanceEvaluation.class.getName());
-134  private static final ObjectMapper 
MAPPER = new ObjectMapper();
-135  static {
-136
MAPPER.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true);
-137  }
-138
-139  public static final String TABLE_NAME = 
"TestTable";
-140  public static final byte[] FAMILY_NAME 
= Bytes.toBytes("info");
-141  public static final byte [] COLUMN_ZERO 
= Bytes.toBytes("" + 0);
-142  public static final byte [] 
QUALIFIER_NAME = COLUMN_ZERO;
+072import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
+073import 
org.apache.hadoop.hbase.filter.BinaryComparator;
+074import 

[12/51] [partial] hbase-site git commit: Published site at 87f5b5f3411d96c31b4cb61b9a57ced22be91d1f.

2018-05-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html
--
diff --git a/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html 
b/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html
index 3168ee3..e159b3f 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html
@@ -145,8 +145,8 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.backup.TestIncrementalBackupMergeWithFailures.FailurePhase
 org.apache.hadoop.hbase.backup.TestBackupDeleteWithFailures.Failure
+org.apache.hadoop.hbase.backup.TestIncrementalBackupMergeWithFailures.FailurePhase
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html 
b/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html
index fc45e57..d865969 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html
@@ -5307,18 +5307,22 @@
 
 
 static HBaseClassTestRule
-TestWALEntrySinkFilter.CLASS_RULE
+TestSerialReplicationEndpoint.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestRegionReplicaReplicationEndpoint.CLASS_RULE
+TestWALEntrySinkFilter.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestReplicator.CLASS_RULE
+TestRegionReplicaReplicationEndpoint.CLASS_RULE
 
 
 static HBaseClassTestRule
+TestReplicator.CLASS_RULE
+
+
+static HBaseClassTestRule
 TestRegionReplicaReplicationEndpointNoMaster.CLASS_RULE
 
 
@@ -6225,42 +6229,46 @@
 
 
 static HBaseClassTestRule
-TestCoprocessorScanPolicy.CLASS_RULE
+TestHBaseFsckCleanReplicationBarriers.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestMiniClusterLoadSequential.CLASS_RULE
+TestCoprocessorScanPolicy.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestFromClientSide3WoUnsafe.CLASS_RULE
+TestMiniClusterLoadSequential.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestEncryptionTest.CLASS_RULE
+TestFromClientSide3WoUnsafe.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestCompressionTest.CLASS_RULE
+TestEncryptionTest.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestIdReadWriteLock.CLASS_RULE
+TestCompressionTest.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestBoundedPriorityBlockingQueue.CLASS_RULE
+TestIdReadWriteLock.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestMiniClusterLoadParallel.CLASS_RULE
+TestBoundedPriorityBlockingQueue.CLASS_RULE
 
 
 static HBaseClassTestRule
-TestRegionSplitCalculator.CLASS_RULE
+TestMiniClusterLoadParallel.CLASS_RULE
 
 
 static HBaseClassTestRule
+TestRegionSplitCalculator.CLASS_RULE
+
+
+static HBaseClassTestRule
 TestIncrementingEnvironmentEdge.CLASS_RULE
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseTestingUtility.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseTestingUtility.html 
b/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseTestingUtility.html
index 48f4ec2..bcd3437 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseTestingUtility.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseTestingUtility.html
@@ -3358,14 +3358,18 @@
 TestSerialReplicationChecker.UTIL
 
 
+private static HBaseTestingUtility
+TestSerialReplicationEndpoint.UTIL
+
+
 protected static HBaseTestingUtility
 TestReplicationSourceManager.utility
 
-
+
 private static HBaseTestingUtility
 TestGlobalThrottler.utility1
 
-
+
 private static HBaseTestingUtility
 TestGlobalThrottler.utility2
 
@@ -4307,6 +4311,10 @@
 private static HBaseTestingUtility
 TestConnectionCache.UTIL
 
+
+private static HBaseTestingUtility
+TestHBaseFsckCleanReplicationBarriers.UTIL
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/de18d468/testdevapidocs/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.TestUpdatableReplicationEndpoint.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.TestUpdatableReplicationEndpoint.html
 

[12/51] [partial] hbase-site git commit: Published site at 2912c953551bedbfbf30c32c156ed7bb187d54c3.

2018-04-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d220bc5e/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
index 8302e28..c370eb9 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
@@ -2113,3031 +2113,3033 @@
 2105
errors.reportError(ERROR_CODE.ORPHAN_TABLE_STATE,
 2106tableName + " unable to 
delete dangling table state " + tableState);
 2107  }
-2108} else {
-2109  
errors.reportError(ERROR_CODE.ORPHAN_TABLE_STATE,
-2110  tableName + " has dangling 
table state " + tableState);
-2111}
-2112  }
-2113}
-2114// check that all tables have 
states
-2115for (TableName tableName : 
tablesInfo.keySet()) {
-2116  if (isTableIncluded(tableName) 
 !tableStates.containsKey(tableName)) {
-2117if (fixMeta) {
-2118  
MetaTableAccessor.updateTableState(connection, tableName, 
TableState.State.ENABLED);
-2119  TableState newState = 
MetaTableAccessor.getTableState(connection, tableName);
-2120  if (newState == null) {
-2121
errors.reportError(ERROR_CODE.NO_TABLE_STATE,
-2122"Unable to change state 
for table " + tableName + " in meta ");
-2123  }
-2124} else {
-2125  
errors.reportError(ERROR_CODE.NO_TABLE_STATE,
-2126  tableName + " has no state 
in meta ");
-2127}
-2128  }
-2129}
-2130  }
-2131
-2132  private void preCheckPermission() 
throws IOException, AccessDeniedException {
-2133if 
(shouldIgnorePreCheckPermission()) {
-2134  return;
-2135}
-2136
-2137Path hbaseDir = 
FSUtils.getRootDir(getConf());
-2138FileSystem fs = 
hbaseDir.getFileSystem(getConf());
-2139UserProvider userProvider = 
UserProvider.instantiate(getConf());
-2140UserGroupInformation ugi = 
userProvider.getCurrent().getUGI();
-2141FileStatus[] files = 
fs.listStatus(hbaseDir);
-2142for (FileStatus file : files) {
-2143  try {
-2144FSUtils.checkAccess(ugi, file, 
FsAction.WRITE);
-2145  } catch (AccessDeniedException 
ace) {
-2146LOG.warn("Got 
AccessDeniedException when preCheckPermission ", ace);
-2147
errors.reportError(ERROR_CODE.WRONG_USAGE, "Current user " + 
ugi.getUserName()
-2148  + " does not have write perms 
to " + file.getPath()
-2149  + ". Please rerun hbck as hdfs 
user " + file.getOwner());
-2150throw ace;
-2151  }
-2152}
-2153  }
-2154
-2155  /**
-2156   * Deletes region from meta table
-2157   */
-2158  private void deleteMetaRegion(HbckInfo 
hi) throws IOException {
-2159
deleteMetaRegion(hi.metaEntry.getRegionName());
-2160  }
-2161
-2162  /**
-2163   * Deletes region from meta table
-2164   */
-2165  private void deleteMetaRegion(byte[] 
metaKey) throws IOException {
-2166Delete d = new Delete(metaKey);
-2167meta.delete(d);
-2168LOG.info("Deleted " + 
Bytes.toString(metaKey) + " from META" );
-2169  }
-2170
-2171  /**
-2172   * Reset the split parent region info 
in meta table
-2173   */
-2174  private void resetSplitParent(HbckInfo 
hi) throws IOException {
-2175RowMutations mutations = new 
RowMutations(hi.metaEntry.getRegionName());
-2176Delete d = new 
Delete(hi.metaEntry.getRegionName());
-2177
d.addColumn(HConstants.CATALOG_FAMILY, HConstants.SPLITA_QUALIFIER);
-2178
d.addColumn(HConstants.CATALOG_FAMILY, HConstants.SPLITB_QUALIFIER);
-2179mutations.add(d);
-2180
-2181RegionInfo hri = 
RegionInfoBuilder.newBuilder(hi.metaEntry)
-2182.setOffline(false)
-2183.setSplit(false)
-2184.build();
-2185Put p = 
MetaTableAccessor.makePutFromRegionInfo(hri, 
EnvironmentEdgeManager.currentTime());
-2186mutations.add(p);
-2187
-2188meta.mutateRow(mutations);
-2189LOG.info("Reset split parent " + 
hi.metaEntry.getRegionNameAsString() + " in META" );
-2190  }
-2191
-2192  /**
-2193   * This backwards-compatibility 
wrapper for permanently offlining a region
-2194   * that should not be alive.  If the 
region server does not support the
-2195   * "offline" method, it will use the 
closest unassign method instead.  This
-2196   * will basically work until one 
attempts to disable or delete the affected
-2197   * table.  The problem has to do with 
in-memory only master state, so
-2198   * restarting the HMaster or failing 
over to another should fix this.
-2199   */
-2200  private void offline(byte[] 
regionName) throws IOException {
-2201String regionString = 
Bytes.toStringBinary(regionName);
-2202if (!rsSupportsOffline) {
-2203  LOG.warn("Using unassign region " 
+ 

[12/51] [partial] hbase-site git commit: Published site at 2a2258656b2fcd92b967131b6c1f037363553bc4.

2018-03-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e0fb1fde/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.html
index 50caf18..61bf913 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.html
@@ -45,773 +45,774 @@
 037import java.util.TimeZone;
 038import java.util.concurrent.TimeUnit;
 039
-040import 
org.apache.commons.cli.CommandLine;
-041import 
org.apache.commons.cli.CommandLineParser;
-042import 
org.apache.commons.cli.HelpFormatter;
-043import org.apache.commons.cli.Option;
-044import 
org.apache.commons.cli.OptionGroup;
-045import org.apache.commons.cli.Options;
-046import 
org.apache.commons.cli.ParseException;
-047import 
org.apache.commons.cli.PosixParser;
-048import 
org.apache.commons.lang3.StringUtils;
-049import 
org.apache.hadoop.conf.Configuration;
-050import 
org.apache.hadoop.conf.Configured;
-051import org.apache.hadoop.fs.FileSystem;
-052import org.apache.hadoop.fs.Path;
-053import org.apache.hadoop.hbase.Cell;
-054import 
org.apache.hadoop.hbase.CellComparator;
-055import 
org.apache.hadoop.hbase.CellUtil;
-056import 
org.apache.hadoop.hbase.HBaseConfiguration;
-057import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-058import 
org.apache.hadoop.hbase.HConstants;
-059import 
org.apache.hadoop.hbase.HRegionInfo;
-060import 
org.apache.hadoop.hbase.KeyValue;
-061import 
org.apache.hadoop.hbase.KeyValueUtil;
-062import 
org.apache.hadoop.hbase.PrivateCellUtil;
-063import 
org.apache.hadoop.hbase.TableName;
-064import org.apache.hadoop.hbase.Tag;
-065import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
-066import 
org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
-067import 
org.apache.hadoop.hbase.mob.MobUtils;
-068import 
org.apache.hadoop.hbase.regionserver.HStoreFile;
-069import 
org.apache.hadoop.hbase.regionserver.TimeRangeTracker;
-070import 
org.apache.hadoop.hbase.util.BloomFilter;
-071import 
org.apache.hadoop.hbase.util.BloomFilterFactory;
-072import 
org.apache.hadoop.hbase.util.BloomFilterUtil;
-073import 
org.apache.hadoop.hbase.util.Bytes;
-074import 
org.apache.hadoop.hbase.util.FSUtils;
-075import 
org.apache.hadoop.hbase.util.HFileArchiveUtil;
-076import org.apache.hadoop.util.Tool;
-077import 
org.apache.hadoop.util.ToolRunner;
-078import 
org.apache.yetus.audience.InterfaceAudience;
-079import 
org.apache.yetus.audience.InterfaceStability;
-080import org.slf4j.Logger;
-081import org.slf4j.LoggerFactory;
-082
-083import 
com.codahale.metrics.ConsoleReporter;
-084import com.codahale.metrics.Counter;
-085import com.codahale.metrics.Gauge;
-086import com.codahale.metrics.Histogram;
-087import com.codahale.metrics.Meter;
-088import 
com.codahale.metrics.MetricFilter;
-089import 
com.codahale.metrics.MetricRegistry;
-090import 
com.codahale.metrics.ScheduledReporter;
-091import com.codahale.metrics.Snapshot;
-092import com.codahale.metrics.Timer;
-093
-094/**
-095 * Implements pretty-printing 
functionality for {@link HFile}s.
-096 */
-097@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
-098@InterfaceStability.Evolving
-099public class HFilePrettyPrinter extends 
Configured implements Tool {
-100
-101  private static final Logger LOG = 
LoggerFactory.getLogger(HFilePrettyPrinter.class);
-102
-103  private Options options = new 
Options();
-104
-105  private boolean verbose;
-106  private boolean printValue;
-107  private boolean printKey;
-108  private boolean shouldPrintMeta;
-109  private boolean printBlockIndex;
-110  private boolean printBlockHeaders;
-111  private boolean printStats;
-112  private boolean checkRow;
-113  private boolean checkFamily;
-114  private boolean isSeekToRow = false;
-115  private boolean checkMobIntegrity = 
false;
-116  private MapString, 
ListPath mobFileLocations;
-117  private static final int 
FOUND_MOB_FILES_CACHE_CAPACITY = 50;
-118  private static final int 
MISSING_MOB_FILES_CACHE_CAPACITY = 20;
-119  private PrintStream out = System.out;
-120  private PrintStream err = System.err;
-121
-122  /**
-123   * The row which the user wants to 
specify and print all the KeyValues for.
-124   */
-125  private byte[] row = null;
-126
-127  private ListPath files = new 
ArrayList();
-128  private int count;
-129
-130  private static final String FOUR_SPACES 
= "";
-131
-132  public HFilePrettyPrinter() {
-133super();
-134init();
-135  }
-136
-137  public HFilePrettyPrinter(Configuration 
conf) {
-138super(conf);
-139init();
-140  }
-141
-142  private void init() {
-143options.addOption("v", "verbose", 
false,
-144"Verbose output; 

[12/51] [partial] hbase-site git commit: Published site at e468b4022f76688851b3e0c34722f01a56bd624f.

2018-03-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16541468/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index d4296e6..d91cb65 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -363,14 +363,14 @@ service.
 
 
 private TableName
-MetaTableAccessor.TableVisitorBase.tableName
-
-
-private TableName
 HRegionInfo.tableName
 Deprecated.
 
 
+
+private TableName
+MetaTableAccessor.TableVisitorBase.tableName
+
 
 
 
@@ -2065,59 +2065,51 @@ service.
 
 
 private TableName
-HRegionLocator.tableName
+AsyncClientScanner.tableName
 
 
-private TableName
-ScannerCallableWithReplicas.tableName
-
-
 protected TableName
-ClientScanner.tableName
-
-
-private TableName
-AsyncClientScanner.tableName
+RpcRetryingCallerWithReadReplicas.tableName
 
 
 private TableName
-AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.tableName
+AsyncProcessTask.tableName
 
 
 private TableName
-AsyncRpcRetryingCallerFactory.BatchCallerBuilder.tableName
+AsyncProcessTask.Builder.tableName
 
 
 private TableName
-RegionInfoBuilder.tableName
+RegionServerCallable.tableName
 
 
 private TableName
-RegionInfoBuilder.MutableRegionInfo.tableName
+AsyncSingleRequestRpcRetryingCaller.tableName
 
 
-private TableName
-RawAsyncTableImpl.tableName
+protected TableName
+RawAsyncHBaseAdmin.TableProcedureBiConsumer.tableName
 
 
 private TableName
-RegionCoprocessorRpcChannelImpl.tableName
+TableState.tableName
 
 
-private TableName
-AsyncTableRegionLocatorImpl.tableName
+protected TableName
+TableBuilderBase.tableName
 
 
 protected TableName
-RegionAdminServiceCallable.tableName
+ClientScanner.tableName
 
 
-private TableName
-HTable.tableName
+protected TableName
+RegionAdminServiceCallable.tableName
 
 
 private TableName
-BufferedMutatorImpl.tableName
+HTable.tableName
 
 
 private TableName
@@ -2129,47 +2121,55 @@ service.
 
 
 private TableName
-HBaseAdmin.TableFuture.tableName
+AsyncTableRegionLocatorImpl.tableName
 
 
 private TableName
-AsyncRequestFutureImpl.tableName
+HRegionLocator.tableName
 
 
 private TableName
-AsyncProcessTask.tableName
+BufferedMutatorImpl.tableName
 
 
 private TableName
-AsyncProcessTask.Builder.tableName
+RawAsyncTableImpl.tableName
 
 
 protected TableName
-RawAsyncHBaseAdmin.TableProcedureBiConsumer.tableName
+AsyncTableBuilderBase.tableName
 
 
 private TableName
-RegionServerCallable.tableName
+RegionCoprocessorRpcChannelImpl.tableName
 
 
 private TableName
-AsyncSingleRequestRpcRetryingCaller.tableName
+ScannerCallableWithReplicas.tableName
 
 
-protected TableName
-TableBuilderBase.tableName
+private TableName
+HBaseAdmin.TableFuture.tableName
 
 
-protected TableName
-RpcRetryingCallerWithReadReplicas.tableName
+private TableName
+AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.tableName
 
 
-protected TableName
-AsyncTableBuilderBase.tableName
+private TableName
+AsyncRpcRetryingCallerFactory.BatchCallerBuilder.tableName
 
 
 private TableName
-TableState.tableName
+RegionInfoBuilder.tableName
+
+
+private TableName
+RegionInfoBuilder.MutableRegionInfo.tableName
+
+
+private TableName
+AsyncRequestFutureImpl.tableName
 
 
 
@@ -2211,9 +2211,7 @@ service.
 
 
 TableName
-AsyncTable.getName()
-Gets the fully qualified table name instance of this 
table.
-
+AsyncTableImpl.getName()
 
 
 TableName
@@ -2223,21 +2221,26 @@ service.
 
 
 TableName
-HRegionLocator.getName()
+BufferedMutator.getName()
+Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
+
 
 
 TableName
-AsyncTableRegionLocator.getName()
-Gets the fully qualified table name instance of the table 
whose region we want to locate.
+AsyncBufferedMutator.getName()
+Gets the fully qualified table name instance of the table 
that this
+ AsyncBufferedMutator writes to.
 
 
 
 TableName
-AsyncTableImpl.getName()
+HTable.getName()
 
 
 TableName
-RawAsyncTableImpl.getName()
+AsyncTable.getName()
+Gets the fully qualified table name instance of this 
table.
+
 
 
 TableName
@@ -2245,34 +2248,31 @@ service.
 
 
 TableName
-BufferedMutator.getName()
-Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
-
+HRegionLocator.getName()
 
 
 TableName
-RegionLocator.getName()
-Gets the fully qualified table name instance of this 
table.
-
+BufferedMutatorImpl.getName()
 
 
 TableName
-AsyncBufferedMutatorImpl.getName()
+RawAsyncTableImpl.getName()
 
 
 TableName
-HTable.getName()
+RegionLocator.getName()
+Gets the fully qualified table name instance of this 
table.
+
 
 
 TableName
-BufferedMutatorImpl.getName()
+AsyncTableRegionLocator.getName()
+Gets the fully qualified table name instance of the table 
whose region we want to locate.
+
 
 
 TableName

[12/51] [partial] hbase-site git commit: Published site at 64061f896fe21512504e3886a400759e88b519da.

2018-03-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f1ebf5b6/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionObserver.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionObserver.html 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionObserver.html
index 180e58b..267f485 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionObserver.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionObserver.html
@@ -263,19 +263,19 @@
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">OptionalRegionObserver
-ScanModifyingObserver.getRegionObserver()
+WriteHeavyIncrementObserver.getRegionObserver()
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">OptionalRegionObserver
-WriteHeavyIncrementObserver.getRegionObserver()
+ScanModifyingObserver.getRegionObserver()
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">OptionalRegionObserver
-ZooKeeperScanPolicyObserver.getRegionObserver()
+ValueRewritingObserver.getRegionObserver()
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">OptionalRegionObserver
-ValueRewritingObserver.getRegionObserver()
+ZooKeeperScanPolicyObserver.getRegionObserver()
 
 
 https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">OptionalRegionObserver

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f1ebf5b6/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignException.html
 
b/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignException.html
index b487fc4..f0fc745 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/errorhandling/class-use/ForeignException.html
@@ -339,6 +339,14 @@
 
 
 void
+ProcedureCoordinatorRpcs.sendAbortToMembers(ProcedureprocName,
+  ForeignExceptioncause)
+Notify the members that the coordinator has aborted the 
procedure and that it should release
+ barrier resources.
+
+
+
+void
 ZKProcedureCoordinator.sendAbortToMembers(Procedureproc,
   ForeignExceptionee)
 This is the abort message being sent by the coordinator to 
member
@@ -347,14 +355,6 @@
  coordinator.
 
 
-
-void
-ProcedureCoordinatorRpcs.sendAbortToMembers(ProcedureprocName,
-  ForeignExceptioncause)
-Notify the members that the coordinator has aborted the 
procedure and that it should release
- barrier resources.
-
-
 
 void
 ProcedureMemberRpcs.sendMemberAborted(Subproceduresub,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f1ebf5b6/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
 
b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
index 49b5557..338b7a4 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
@@ -144,17 +144,15 @@
 
 
 
-static HColumnDescriptor
-HColumnDescriptor.parseFrom(byte[]bytes)
-Deprecated.
-
-
-
 static HTableDescriptor
 HTableDescriptor.parseFrom(byte[]bytes)
 Deprecated.
 
 
+
+static ClusterId
+ClusterId.parseFrom(byte[]bytes)
+
 
 static HRegionInfo
 HRegionInfo.parseFrom(byte[]bytes)
@@ -165,8 +163,10 @@
 
 
 
-static ClusterId
-ClusterId.parseFrom(byte[]bytes)
+static HColumnDescriptor
+HColumnDescriptor.parseFrom(byte[]bytes)
+Deprecated.
+
 
 
 static SplitLogTask
@@ -220,17 +220,17 @@
 TableDescriptorBuilder.ModifyableTableDescriptor.parseFrom(byte[]bytes)
 
 
+static RegionInfo
+RegionInfo.parseFrom(byte[]bytes)
+
+
 static ColumnFamilyDescriptor
 ColumnFamilyDescriptorBuilder.parseFrom(byte[]pbBytes)
 
-
+
 private static ColumnFamilyDescriptor
 ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.parseFrom(byte[]bytes)
 
-
-static RegionInfo
-RegionInfo.parseFrom(byte[]bytes)
-
 
 static RegionInfo
 RegionInfo.parseFrom(byte[]bytes,
@@ -305,153 +305,153 @@
 ByteArrayComparable.parseFrom(byte[]pbBytes)
 
 
-static ColumnPrefixFilter
-ColumnPrefixFilter.parseFrom(byte[]pbBytes)
+static SingleColumnValueExcludeFilter
+SingleColumnValueExcludeFilter.parseFrom(byte[]pbBytes)
 
 
-static 

[12/51] [partial] hbase-site git commit: Published site at 4cb40e6d846ce1f28ffb40d388c9efb753197813.

2018-03-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4dc2a2e8/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Callback.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Callback.html
 
b/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Callback.html
index a6c6bcc..65d4b29 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Callback.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/coprocessor/class-use/Batch.Callback.html
@@ -113,17 +113,17 @@
 
 
 
-private Batch.CallbackCResult
-AsyncRequestFutureImpl.callback
-
-
 private Batch.CallbackT
 AsyncProcessTask.callback
 
-
+
 private Batch.CallbackT
 AsyncProcessTask.Builder.callback
 
+
+private Batch.CallbackCResult
+AsyncRequestFutureImpl.callback
+
 
 
 
@@ -148,42 +148,50 @@
 
 
 Rvoid
-Table.batchCallback(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Rowactions,
+HTable.batchCallback(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Rowactions,
  https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]results,
- Batch.CallbackRcallback)
-Same as Table.batch(List,
 Object[]), but with a callback.
-
+ Batch.CallbackRcallback)
 
 
 Rvoid
-HTable.batchCallback(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Rowactions,
+Table.batchCallback(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Rowactions,
  https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]results,
- Batch.CallbackRcallback)
+ Batch.CallbackRcallback)
+Same as Table.batch(List,
 Object[]), but with a callback.
+
 
 
 R extends 
com.google.protobuf.Messagevoid
-Table.batchCoprocessorService(com.google.protobuf.Descriptors.MethodDescriptormethodDescriptor,
+HTable.batchCoprocessorService(com.google.protobuf.Descriptors.MethodDescriptormethodDescriptor,
com.google.protobuf.Messagerequest,
byte[]startKey,
byte[]endKey,
RresponsePrototype,
-   Batch.CallbackRcallback)
-Creates an instance of the given Service 
subclass for each table
- region spanning the range from the startKey row to 
endKey row (inclusive), all
- the invocations to the same region server will be batched into one call.
-
+   Batch.CallbackRcallback)
 
 
 R extends 
com.google.protobuf.Messagevoid
-HTable.batchCoprocessorService(com.google.protobuf.Descriptors.MethodDescriptormethodDescriptor,
+Table.batchCoprocessorService(com.google.protobuf.Descriptors.MethodDescriptormethodDescriptor,
com.google.protobuf.Messagerequest,
byte[]startKey,
byte[]endKey,
RresponsePrototype,
-   Batch.CallbackRcallback)
+   Batch.CallbackRcallback)
+Creates an instance of the given Service 
subclass for each table
+ region spanning the range from the startKey row to 
endKey row (inclusive), all
+ the invocations to the same region server will be batched into one call.
+
 
 
 T extends 
com.google.protobuf.Service,Rvoid
+HTable.coprocessorService(https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">ClassTservice,
+  byte[]startKey,
+  byte[]endKey,
+  Batch.CallT,Rcallable,
+  Batch.CallbackRcallback)
+
+
+T extends 
com.google.protobuf.Service,Rvoid
 Table.coprocessorService(https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">ClassTservice,
   byte[]startKey,
   byte[]endKey,
@@ -195,14 +203,6 @@
  with each Service instance.
 
 
-
-T extends 
com.google.protobuf.Service,Rvoid
-HTable.coprocessorService(https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">ClassTservice,
-  byte[]startKey,
-  byte[]endKey,
-  Batch.CallT,Rcallable,
-  Batch.CallbackRcallback)
-
 
 static Rvoid
 HTable.doBatchWithCallback(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Rowactions,


[12/51] [partial] hbase-site git commit: Published site at 8ab7b20f48951d77945181024f5e15842bc253c4.

2018-03-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6eb695c8/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSHLog.RingBufferEventHandler.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSHLog.RingBufferEventHandler.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSHLog.RingBufferEventHandler.html
index 9971079..03c8b000 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSHLog.RingBufferEventHandler.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSHLog.RingBufferEventHandler.html
@@ -49,1067 +49,1082 @@
 041import org.apache.hadoop.fs.Path;
 042import 
org.apache.hadoop.hbase.HConstants;
 043import 
org.apache.hadoop.hbase.client.RegionInfo;
-044import 
org.apache.hadoop.hbase.trace.TraceUtil;
-045import 
org.apache.hadoop.hbase.util.Bytes;
-046import 
org.apache.hadoop.hbase.util.ClassSize;
-047import 
org.apache.hadoop.hbase.util.FSUtils;
-048import 
org.apache.hadoop.hbase.util.HasThread;
-049import 
org.apache.hadoop.hbase.util.Threads;
-050import 
org.apache.hadoop.hbase.wal.FSHLogProvider;
-051import 
org.apache.hadoop.hbase.wal.WALEdit;
-052import 
org.apache.hadoop.hbase.wal.WALKeyImpl;
-053import 
org.apache.hadoop.hbase.wal.WALProvider.Writer;
-054import 
org.apache.hadoop.hdfs.DFSOutputStream;
-055import 
org.apache.hadoop.hdfs.client.HdfsDataOutputStream;
-056import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-057import 
org.apache.htrace.core.TraceScope;
-058import 
org.apache.yetus.audience.InterfaceAudience;
-059import org.slf4j.Logger;
-060import org.slf4j.LoggerFactory;
-061import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-062
-063/**
-064 * The default implementation of FSWAL.
-065 */
-066@InterfaceAudience.Private
-067public class FSHLog extends 
AbstractFSWALWriter {
-068  // IMPLEMENTATION NOTES:
-069  //
-070  // At the core is a ring buffer. Our 
ring buffer is the LMAX Disruptor. It tries to
-071  // minimize synchronizations and 
volatile writes when multiple contending threads as is the case
-072  // here appending and syncing on a 
single WAL. The Disruptor is configured to handle multiple
-073  // producers but it has one consumer 
only (the producers in HBase are IPC Handlers calling append
-074  // and then sync). The single 
consumer/writer pulls the appends and syncs off the ring buffer.
-075  // When a handler calls sync, it is 
given back a future. The producer 'blocks' on the future so
-076  // it does not return until the sync 
completes. The future is passed over the ring buffer from
-077  // the producer/handler to the consumer 
thread where it does its best to batch up the producer
-078  // syncs so one WAL sync actually spans 
multiple producer sync invocations. How well the
-079  // batching works depends on the write 
rate; i.e. we tend to batch more in times of
-080  // high writes/syncs.
-081  //
-082  // Calls to append now also wait until 
the append has been done on the consumer side of the
-083  // disruptor. We used to not wait but 
it makes the implementation easier to grok if we have
-084  // the region edit/sequence id after 
the append returns.
-085  //
-086  // TODO: Handlers need to coordinate 
appending AND syncing. Can we have the threads contend
-087  // once only? Probably hard given syncs 
take way longer than an append.
-088  //
-089  // The consumer threads pass the syncs 
off to multiple syncing threads in a round robin fashion
-090  // to ensure we keep up back-to-back FS 
sync calls (FS sync calls are the long poll writing the
-091  // WAL). The consumer thread passes the 
futures to the sync threads for it to complete
-092  // the futures when done.
-093  //
-094  // The 'sequence' in the below is the 
sequence of the append/sync on the ringbuffer. It
-095  // acts as a sort-of transaction id. It 
is always incrementing.
-096  //
-097  // The RingBufferEventHandler class 
hosts the ring buffer consuming code. The threads that
-098  // do the actual FS sync are 
implementations of SyncRunner. SafePointZigZagLatch is a
-099  // synchronization class used to halt 
the consumer at a safe point -- just after all outstanding
-100  // syncs and appends have completed -- 
so the log roller can swap the WAL out under it.
-101  //
-102  // We use ring buffer sequence as txid 
of FSWALEntry and SyncFuture.
-103  private static final Logger LOG = 
LoggerFactory.getLogger(FSHLog.class);
-104
-105  /**
-106   * The nexus at which all incoming 
handlers meet. Does appends and sync with an ordering. Appends
-107   * and syncs are each put on the ring 
which means handlers need to smash up against the ring twice
-108   * (can we make it once only? ... maybe 
not since time to append is so different from time to sync
-109   * and sometimes we don't want to sync 
or we want to async the sync). The ring is where we make
-110   * 

[12/51] [partial] hbase-site git commit: Published site at 00095a2ef9442e3fd86c04876c9d91f2f8b23ad8.

2018-03-17 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bd675fa3/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.html
index d654af2..3cec2fd 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.html
@@ -29,312 +29,289 @@
 021import java.io.IOException;
 022import java.util.ArrayList;
 023import java.util.Collection;
-024import java.util.HashMap;
-025import java.util.List;
-026import java.util.concurrent.Callable;
-027import 
java.util.concurrent.ExecutionException;
-028import java.util.concurrent.Executors;
-029import java.util.concurrent.TimeUnit;
-030import 
org.apache.hadoop.conf.Configuration;
-031import 
org.apache.hadoop.hbase.ClusterMetrics;
-032import 
org.apache.hadoop.hbase.HDFSBlocksDistribution;
-033import 
org.apache.hadoop.hbase.ServerName;
-034import 
org.apache.hadoop.hbase.TableName;
-035import 
org.apache.hadoop.hbase.client.RegionInfo;
-036import 
org.apache.hadoop.hbase.client.TableDescriptor;
-037import 
org.apache.hadoop.hbase.master.MasterServices;
-038import 
org.apache.hadoop.hbase.master.assignment.AssignmentManager;
-039import 
org.apache.hadoop.hbase.regionserver.HRegion;
-040import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-041import 
org.apache.yetus.audience.InterfaceAudience;
-042import org.slf4j.Logger;
-043import org.slf4j.LoggerFactory;
-044import 
org.apache.hbase.thirdparty.com.google.common.cache.CacheBuilder;
-045import 
org.apache.hbase.thirdparty.com.google.common.cache.CacheLoader;
-046import 
org.apache.hbase.thirdparty.com.google.common.cache.LoadingCache;
-047import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-048import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.Futures;
-049import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ListenableFuture;
-050import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ListeningExecutorService;
-051import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.MoreExecutors;
-052import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
-053
-054/**
-055 * This will find where data for a region 
is located in HDFS. It ranks
-056 * {@link ServerName}'s by the size of 
the store files they are holding for a
-057 * given region.
-058 *
-059 */
-060@InterfaceAudience.Private
-061class RegionLocationFinder {
-062  private static final Logger LOG = 
LoggerFactory.getLogger(RegionLocationFinder.class);
-063  private static final long CACHE_TIME = 
240 * 60 * 1000;
-064  private static final 
HDFSBlocksDistribution EMPTY_BLOCK_DISTRIBUTION = new 
HDFSBlocksDistribution();
-065  private Configuration conf;
-066  private volatile ClusterMetrics 
status;
-067  private MasterServices services;
-068  private final ListeningExecutorService 
executor;
-069  // Do not scheduleFullRefresh at master 
startup
-070  private long lastFullRefresh = 
EnvironmentEdgeManager.currentTime();
-071
-072  private CacheLoaderRegionInfo, 
HDFSBlocksDistribution loader =
-073  new CacheLoaderRegionInfo, 
HDFSBlocksDistribution() {
-074
-075@Override
-076public 
ListenableFutureHDFSBlocksDistribution reload(final RegionInfo hri,
-077HDFSBlocksDistribution oldValue) 
throws Exception {
-078  return executor.submit(new 
CallableHDFSBlocksDistribution() {
-079@Override
-080public HDFSBlocksDistribution 
call() throws Exception {
-081  return 
internalGetTopBlockLocation(hri);
-082}
-083  });
-084}
-085
-086@Override
-087public HDFSBlocksDistribution 
load(RegionInfo key) throws Exception {
-088  return 
internalGetTopBlockLocation(key);
-089}
-090  };
+024import java.util.Collections;
+025import java.util.HashMap;
+026import java.util.List;
+027import java.util.Map;
+028import java.util.concurrent.Callable;
+029import 
java.util.concurrent.ExecutionException;
+030import java.util.concurrent.Executors;
+031import java.util.concurrent.TimeUnit;
+032
+033import 
org.apache.commons.collections4.CollectionUtils;
+034import 
org.apache.commons.collections4.MultiValuedMap;
+035import 
org.apache.commons.collections4.multimap.ArrayListValuedHashMap;
+036import 
org.apache.hadoop.conf.Configuration;
+037import 
org.apache.hadoop.hbase.ClusterMetrics;
+038import 
org.apache.hadoop.hbase.HDFSBlocksDistribution;
+039import 
org.apache.hadoop.hbase.ServerName;
+040import 
org.apache.hadoop.hbase.TableName;
+041import 
org.apache.hadoop.hbase.client.RegionInfo;
+042import 
org.apache.hadoop.hbase.client.TableDescriptor;
+043import 

[12/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
index b99f924..2bb6cea 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
@@ -37,1779 +37,1734 @@
 029import java.util.UUID;
 030import 
java.util.concurrent.ConcurrentHashMap;
 031import 
java.util.concurrent.ConcurrentMap;
-032import java.util.regex.Matcher;
-033
-034import 
org.apache.commons.collections4.map.AbstractReferenceMap;
-035import 
org.apache.commons.collections4.map.ReferenceMap;
-036import 
org.apache.hadoop.conf.Configuration;
-037import org.apache.hadoop.fs.FileSystem;
-038import org.apache.hadoop.fs.Path;
-039import org.apache.hadoop.hbase.Cell;
-040import 
org.apache.hadoop.hbase.CompareOperator;
-041import 
org.apache.hadoop.hbase.Coprocessor;
-042import 
org.apache.hadoop.hbase.HBaseConfiguration;
-043import 
org.apache.hadoop.hbase.HConstants;
-044import 
org.apache.hadoop.hbase.RawCellBuilder;
-045import 
org.apache.hadoop.hbase.RawCellBuilderFactory;
-046import 
org.apache.hadoop.hbase.ServerName;
-047import 
org.apache.hadoop.hbase.SharedConnection;
-048import 
org.apache.hadoop.hbase.client.Append;
-049import 
org.apache.hadoop.hbase.client.Connection;
-050import 
org.apache.hadoop.hbase.client.Delete;
-051import 
org.apache.hadoop.hbase.client.Durability;
-052import 
org.apache.hadoop.hbase.client.Get;
-053import 
org.apache.hadoop.hbase.client.Increment;
-054import 
org.apache.hadoop.hbase.client.Mutation;
-055import 
org.apache.hadoop.hbase.client.Put;
-056import 
org.apache.hadoop.hbase.client.RegionInfo;
-057import 
org.apache.hadoop.hbase.client.Result;
-058import 
org.apache.hadoop.hbase.client.Scan;
-059import 
org.apache.hadoop.hbase.client.TableDescriptor;
-060import 
org.apache.hadoop.hbase.coprocessor.BaseEnvironment;
-061import 
org.apache.hadoop.hbase.coprocessor.BulkLoadObserver;
-062import 
org.apache.hadoop.hbase.coprocessor.CoprocessorException;
-063import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
-064import 
org.apache.hadoop.hbase.coprocessor.CoprocessorService;
-065import 
org.apache.hadoop.hbase.coprocessor.CoprocessorServiceBackwardCompatiblity;
-066import 
org.apache.hadoop.hbase.coprocessor.CoreCoprocessor;
-067import 
org.apache.hadoop.hbase.coprocessor.EndpointObserver;
-068import 
org.apache.hadoop.hbase.coprocessor.HasRegionServerServices;
-069import 
org.apache.hadoop.hbase.coprocessor.MetricsCoprocessor;
-070import 
org.apache.hadoop.hbase.coprocessor.ObserverContext;
-071import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
-072import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
-073import 
org.apache.hadoop.hbase.coprocessor.RegionObserver;
-074import 
org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
-075import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-076import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
-077import 
org.apache.hadoop.hbase.io.Reference;
-078import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-079import 
org.apache.hadoop.hbase.metrics.MetricRegistry;
-080import 
org.apache.hadoop.hbase.regionserver.Region.Operation;
-081import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
-082import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
-083import 
org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker;
-084import 
org.apache.hadoop.hbase.security.User;
-085import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-086import 
org.apache.hadoop.hbase.util.Bytes;
-087import 
org.apache.hadoop.hbase.util.CoprocessorClassLoader;
-088import 
org.apache.hadoop.hbase.util.Pair;
-089import 
org.apache.hadoop.hbase.wal.WALEdit;
-090import 
org.apache.hadoop.hbase.wal.WALKey;
-091import 
org.apache.yetus.audience.InterfaceAudience;
-092import org.slf4j.Logger;
-093import org.slf4j.LoggerFactory;
-094
-095/**
-096 * Implements the coprocessor environment 
and runtime support for coprocessors
-097 * loaded within a {@link Region}.
-098 */
-099@InterfaceAudience.Private
-100public class RegionCoprocessorHost
-101extends 
CoprocessorHostRegionCoprocessor, RegionCoprocessorEnvironment {
-102
-103  private static final Logger LOG = 
LoggerFactory.getLogger(RegionCoprocessorHost.class);
-104  // The shared data map
-105  private static final 
ReferenceMapString, ConcurrentMapString, Object SHARED_DATA_MAP 
=
-106  new 

[12/51] [partial] hbase-site git commit: Published site at 31da4d0bce69b3a47066a5df675756087ce4dc60.

2018-03-15 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a754d895/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
index 1546b5d..31c6fd0 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class HRegionServer.CompactionChecker
+private static class HRegionServer.CompactionChecker
 extends ScheduledChore
 
 
@@ -233,7 +233,7 @@ extends 
 
 instance
-private finalHRegionServer instance
+private finalHRegionServer instance
 
 
 
@@ -242,7 +242,7 @@ extends 
 
 majorCompactPriority
-private finalint majorCompactPriority
+private finalint majorCompactPriority
 
 
 
@@ -251,7 +251,7 @@ extends 
 
 DEFAULT_PRIORITY
-private static finalint DEFAULT_PRIORITY
+private static finalint DEFAULT_PRIORITY
 
 See Also:
 Constant
 Field Values
@@ -264,7 +264,7 @@ extends 
 
 iteration
-privatelong iteration
+privatelong iteration
 
 
 
@@ -281,7 +281,7 @@ extends 
 
 CompactionChecker
-CompactionChecker(HRegionServerh,
+CompactionChecker(HRegionServerh,
   intsleepTime,
   Stoppablestopper)
 
@@ -300,7 +300,7 @@ extends 
 
 chore
-protectedvoidchore()
+protectedvoidchore()
 Description copied from 
class:ScheduledChore
 The task to execute on each scheduled execution of the 
Chore
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a754d895/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
index 38486a8..1239bd7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class HRegionServer.MovedRegionInfo
+private static class HRegionServer.MovedRegionInfo
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 
 
@@ -218,7 +218,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 serverName
-private finalServerName serverName
+private finalServerName serverName
 
 
 
@@ -227,7 +227,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 seqNum
-private finallong seqNum
+private finallong seqNum
 
 
 
@@ -236,7 +236,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 ts
-private finallong ts
+private finallong ts
 
 
 
@@ -253,7 +253,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 MovedRegionInfo
-publicMovedRegionInfo(ServerNameserverName,
+publicMovedRegionInfo(ServerNameserverName,
longcloseSeqNum)
 
 
@@ -271,7 +271,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getServerName
-publicServerNamegetServerName()
+publicServerNamegetServerName()
 
 
 
@@ -280,7 +280,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getSeqNum
-publiclonggetSeqNum()
+publiclonggetSeqNum()
 
 
 
@@ -289,7 +289,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 getMoveTime
-publiclonggetMoveTime()
+publiclonggetMoveTime()
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a754d895/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
index 95037fd..01fb6e7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-protected static final class HRegionServer.MovedRegionsCleaner
+protected static final class HRegionServer.MovedRegionsCleaner
 extends ScheduledChore
 implements Stoppable
 Creates a Chore thread to clean the moved region 
cache.
@@ -242,7 +242,7 @@ implements 
 
 regionServer
-privateHRegionServer regionServer
+privateHRegionServer regionServer
 
 
 
@@ -251,7 +251,7 @@ implements 
 
 

[12/51] [partial] hbase-site git commit: Published site at 6b77786dfc46d25ac5bb5f1c8a4a9eb47b52a604.

2018-03-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/81cde4ce/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
index ef30022..abeccf1 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
@@ -162,11 +162,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ImmutableBytesWritable
-TableSnapshotInputFormat.TableSnapshotRecordReader.createKey()
+TableRecordReader.createKey()
 
 
 ImmutableBytesWritable
-TableRecordReader.createKey()
+TableSnapshotInputFormat.TableSnapshotRecordReader.createKey()
 
 
 ImmutableBytesWritable
@@ -183,9 +183,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReaderImmutableBytesWritable,Result
-TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
+TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
org.apache.hadoop.mapred.JobConfjob,
-   
org.apache.hadoop.mapred.Reporterreporter)
+   org.apache.hadoop.mapred.Reporterreporter)
+Builds a TableRecordReader.
+
 
 
 org.apache.hadoop.mapred.RecordReaderImmutableBytesWritable,Result
@@ -195,11 +197,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReaderImmutableBytesWritable,Result
-TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
+TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
org.apache.hadoop.mapred.JobConfjob,
-   org.apache.hadoop.mapred.Reporterreporter)
-Builds a TableRecordReader.
-
+   
org.apache.hadoop.mapred.Reporterreporter)
 
 
 
@@ -218,10 +218,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
-   Resultvalues,
+IdentityTableMap.map(ImmutableBytesWritablekey,
+   Resultvalue,
org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
-   org.apache.hadoop.mapred.Reporterreporter)
+   org.apache.hadoop.mapred.Reporterreporter)
+Pass the key, value to reduce
+
 
 
 void
@@ -234,21 +236,19 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritablekey,
-   Resultvalue,
+RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
+   Resultvalues,
org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
-   org.apache.hadoop.mapred.Reporterreporter)
-Pass the key, value to reduce
-
+   org.apache.hadoop.mapred.Reporterreporter)
 
 
 boolean
-TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritablekey,
+TableRecordReader.next(ImmutableBytesWritablekey,
 Resultvalue)
 
 
 boolean
-TableRecordReader.next(ImmutableBytesWritablekey,
+TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritablekey,
 Resultvalue)
 
 
@@ -281,10 +281,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
-   Resultvalues,
+IdentityTableMap.map(ImmutableBytesWritablekey,
+   Resultvalue,
org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
-   org.apache.hadoop.mapred.Reporterreporter)
+   org.apache.hadoop.mapred.Reporterreporter)
+Pass the key, value to reduce
+
 
 
 void
@@ -297,12 +299,10 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritablekey,
-   Resultvalue,
+RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
+   Resultvalues,
org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
-   org.apache.hadoop.mapred.Reporterreporter)
-Pass the key, value to reduce
-
+   org.apache.hadoop.mapred.Reporterreporter)
 
 
 void
@@ -349,7 +349,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private ImmutableBytesWritable
-TableRecordReaderImpl.key
+MultithreadedTableMapper.SubMapRecordReader.key
 
 
 private ImmutableBytesWritable
@@ -357,7 +357,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private ImmutableBytesWritable
-MultithreadedTableMapper.SubMapRecordReader.key
+TableRecordReaderImpl.key
 
 
 (package private) ImmutableBytesWritable
@@ -427,33 +427,33 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ImmutableBytesWritable
-TableSnapshotInputFormat.TableSnapshotRegionRecordReader.getCurrentKey()

[12/51] [partial] hbase-site git commit: Published site at 1384da71375427b522b09f06862bb5d629cef52f.

2018-03-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d347bde8/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
 
b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
index 338b7a4..49b5557 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
@@ -144,14 +144,16 @@
 
 
 
-static HTableDescriptor
-HTableDescriptor.parseFrom(byte[]bytes)
+static HColumnDescriptor
+HColumnDescriptor.parseFrom(byte[]bytes)
 Deprecated.
 
 
 
-static ClusterId
-ClusterId.parseFrom(byte[]bytes)
+static HTableDescriptor
+HTableDescriptor.parseFrom(byte[]bytes)
+Deprecated.
+
 
 
 static HRegionInfo
@@ -163,10 +165,8 @@
 
 
 
-static HColumnDescriptor
-HColumnDescriptor.parseFrom(byte[]bytes)
-Deprecated.
-
+static ClusterId
+ClusterId.parseFrom(byte[]bytes)
 
 
 static SplitLogTask
@@ -220,17 +220,17 @@
 TableDescriptorBuilder.ModifyableTableDescriptor.parseFrom(byte[]bytes)
 
 
-static RegionInfo
-RegionInfo.parseFrom(byte[]bytes)
-
-
 static ColumnFamilyDescriptor
 ColumnFamilyDescriptorBuilder.parseFrom(byte[]pbBytes)
 
-
+
 private static ColumnFamilyDescriptor
 ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.parseFrom(byte[]bytes)
 
+
+static RegionInfo
+RegionInfo.parseFrom(byte[]bytes)
+
 
 static RegionInfo
 RegionInfo.parseFrom(byte[]bytes,
@@ -305,151 +305,151 @@
 ByteArrayComparable.parseFrom(byte[]pbBytes)
 
 
-static SingleColumnValueExcludeFilter
-SingleColumnValueExcludeFilter.parseFrom(byte[]pbBytes)
+static ColumnPrefixFilter
+ColumnPrefixFilter.parseFrom(byte[]pbBytes)
 
 
-static ValueFilter
-ValueFilter.parseFrom(byte[]pbBytes)
+static ColumnCountGetFilter
+ColumnCountGetFilter.parseFrom(byte[]pbBytes)
 
 
-static SkipFilter
-SkipFilter.parseFrom(byte[]pbBytes)
+static RowFilter
+RowFilter.parseFrom(byte[]pbBytes)
 
 
-static FamilyFilter
-FamilyFilter.parseFrom(byte[]pbBytes)
+static FuzzyRowFilter
+FuzzyRowFilter.parseFrom(byte[]pbBytes)
 
 
-static BinaryPrefixComparator
-BinaryPrefixComparator.parseFrom(byte[]pbBytes)
+static BinaryComparator
+BinaryComparator.parseFrom(byte[]pbBytes)
 
 
-static NullComparator
-NullComparator.parseFrom(byte[]pbBytes)
+static RegexStringComparator
+RegexStringComparator.parseFrom(byte[]pbBytes)
 
 
-static BigDecimalComparator
-BigDecimalComparator.parseFrom(byte[]pbBytes)
+static Filter
+Filter.parseFrom(byte[]pbBytes)
+Concrete implementers can signal a failure condition in 
their code by throwing an
+ https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException.
+
 
 
-static ColumnPrefixFilter
-ColumnPrefixFilter.parseFrom(byte[]pbBytes)
+static RandomRowFilter
+RandomRowFilter.parseFrom(byte[]pbBytes)
 
 
-static PageFilter
-PageFilter.parseFrom(byte[]pbBytes)
+static FirstKeyOnlyFilter
+FirstKeyOnlyFilter.parseFrom(byte[]pbBytes)
 
 
-static BitComparator
-BitComparator.parseFrom(byte[]pbBytes)
+static SkipFilter
+SkipFilter.parseFrom(byte[]pbBytes)
 
 
-static RowFilter
-RowFilter.parseFrom(byte[]pbBytes)
+static BinaryPrefixComparator
+BinaryPrefixComparator.parseFrom(byte[]pbBytes)
 
 
-static ColumnRangeFilter
-ColumnRangeFilter.parseFrom(byte[]pbBytes)
+static TimestampsFilter
+TimestampsFilter.parseFrom(byte[]pbBytes)
 
 
-static ColumnCountGetFilter
-ColumnCountGetFilter.parseFrom(byte[]pbBytes)
+static ValueFilter
+ValueFilter.parseFrom(byte[]pbBytes)
 
 
-static SubstringComparator
-SubstringComparator.parseFrom(byte[]pbBytes)
+static KeyOnlyFilter
+KeyOnlyFilter.parseFrom(byte[]pbBytes)
 
 
-static MultipleColumnPrefixFilter
-MultipleColumnPrefixFilter.parseFrom(byte[]pbBytes)
+static FamilyFilter
+FamilyFilter.parseFrom(byte[]pbBytes)
 
 
-static ColumnPaginationFilter
-ColumnPaginationFilter.parseFrom(byte[]pbBytes)
+static QualifierFilter
+QualifierFilter.parseFrom(byte[]pbBytes)
 
 
-static DependentColumnFilter
-DependentColumnFilter.parseFrom(byte[]pbBytes)
+static FilterList
+FilterList.parseFrom(byte[]pbBytes)
 
 
-static BinaryComparator
-BinaryComparator.parseFrom(byte[]pbBytes)
+static BigDecimalComparator
+BigDecimalComparator.parseFrom(byte[]pbBytes)
 
 
-static InclusiveStopFilter
-InclusiveStopFilter.parseFrom(byte[]pbBytes)
+static ColumnRangeFilter
+ColumnRangeFilter.parseFrom(byte[]pbBytes)
 
 
-static KeyOnlyFilter
-KeyOnlyFilter.parseFrom(byte[]pbBytes)
+static ColumnPaginationFilter
+ColumnPaginationFilter.parseFrom(byte[]pbBytes)
 
 
-static MultiRowRangeFilter
-MultiRowRangeFilter.parseFrom(byte[]pbBytes)
+static SubstringComparator
+SubstringComparator.parseFrom(byte[]pbBytes)
 
 
-static Filter
-Filter.parseFrom(byte[]pbBytes)
-Concrete implementers can signal a 

[12/51] [partial] hbase-site git commit: Published site at b7b86839250bf9b295ebc1948826f43a88736d6c.

2018-03-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6b94a2f2/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.TableVisitorBase.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.TableVisitorBase.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.TableVisitorBase.html
index df5fa53..8fffb89 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.TableVisitorBase.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/MetaTableAccessor.TableVisitorBase.html
@@ -42,1927 +42,2060 @@
 034import java.util.TreeMap;
 035import java.util.regex.Matcher;
 036import java.util.regex.Pattern;
-037import 
org.apache.hadoop.conf.Configuration;
-038import 
org.apache.hadoop.hbase.Cell.Type;
-039import 
org.apache.hadoop.hbase.client.Connection;
-040import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-041import 
org.apache.hadoop.hbase.client.Consistency;
-042import 
org.apache.hadoop.hbase.client.Delete;
-043import 
org.apache.hadoop.hbase.client.Get;
-044import 
org.apache.hadoop.hbase.client.Mutation;
-045import 
org.apache.hadoop.hbase.client.Put;
-046import 
org.apache.hadoop.hbase.client.RegionInfo;
-047import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-048import 
org.apache.hadoop.hbase.client.RegionLocator;
-049import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-050import 
org.apache.hadoop.hbase.client.RegionServerCallable;
-051import 
org.apache.hadoop.hbase.client.Result;
-052import 
org.apache.hadoop.hbase.client.ResultScanner;
-053import 
org.apache.hadoop.hbase.client.Scan;
-054import 
org.apache.hadoop.hbase.client.Table;
-055import 
org.apache.hadoop.hbase.client.TableState;
-056import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-057import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-058import 
org.apache.hadoop.hbase.master.RegionState;
-059import 
org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-060import 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
-061import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier;
-062import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-063import 
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos;
-064import 
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest;
-065import 
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse;
-066import 
org.apache.hadoop.hbase.util.Bytes;
-067import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-068import 
org.apache.hadoop.hbase.util.ExceptionUtil;
-069import 
org.apache.hadoop.hbase.util.Pair;
-070import 
org.apache.hadoop.hbase.util.PairOfSameType;
-071import 
org.apache.yetus.audience.InterfaceAudience;
-072import org.slf4j.Logger;
-073import org.slf4j.LoggerFactory;
-074
-075import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-076
-077/**
-078 * p
-079 * Read/write operations on region and 
assignment information store in codehbase:meta/code.
-080 * /p
+037import java.util.stream.Collectors;
+038import java.util.stream.Stream;
+039import 
org.apache.hadoop.conf.Configuration;
+040import 
org.apache.hadoop.hbase.Cell.Type;
+041import 
org.apache.hadoop.hbase.client.Connection;
+042import 
org.apache.hadoop.hbase.client.ConnectionFactory;
+043import 
org.apache.hadoop.hbase.client.Consistency;
+044import 
org.apache.hadoop.hbase.client.Delete;
+045import 
org.apache.hadoop.hbase.client.Get;
+046import 
org.apache.hadoop.hbase.client.Mutation;
+047import 
org.apache.hadoop.hbase.client.Put;
+048import 
org.apache.hadoop.hbase.client.RegionInfo;
+049import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
+050import 
org.apache.hadoop.hbase.client.RegionLocator;
+051import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
+052import 
org.apache.hadoop.hbase.client.RegionServerCallable;
+053import 
org.apache.hadoop.hbase.client.Result;
+054import 
org.apache.hadoop.hbase.client.ResultScanner;
+055import 
org.apache.hadoop.hbase.client.Scan;
+056import 
org.apache.hadoop.hbase.client.Table;
+057import 
org.apache.hadoop.hbase.client.TableState;
+058import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
+059import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
+060import 
org.apache.hadoop.hbase.master.RegionState;
+061import 
org.apache.hadoop.hbase.master.RegionState.State;
+062import 
org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+063import 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
+064import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier;
+065import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
+066import 
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos;
+067import 

[12/51] [partial] hbase-site git commit: Published site at 1d25b60831b8cc8f7ad5fd366f1867de5c20d2f3.

2018-03-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/eb05e3e3/apidocs/org/apache/hadoop/hbase/client/Query.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Query.html 
b/apidocs/org/apache/hadoop/hbase/client/Query.html
index 3a6a0da..beb9848 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Query.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Query.html
@@ -97,7 +97,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
 
 
 org.apache.hadoop.hbase.client.Operation
@@ -150,7 +150,7 @@ extends Field and Description
 
 
-protected http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],TimeRange
+protected https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],TimeRange
 colFamTimeRangeMap
 
 
@@ -162,7 +162,7 @@ extends filter
 
 
-protected http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+protected https://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
 loadColumnFamiliesOnDemand
 
 
@@ -223,7 +223,7 @@ extends getAuthorizations()
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],TimeRange
+https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],TimeRange
 getColumnFamilyTimeRange()
 
 
@@ -241,7 +241,7 @@ extends getIsolationLevel()
 
 
-http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+https://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
 getLoadColumnFamiliesOnDemandValue()
 Get the raw loadColumnFamiliesOnDemand setting; if it's not 
set, can be null.
 
@@ -254,11 +254,11 @@ extends 
 Query
-setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">String,org.apache.hadoop.hbase.security.access.Permissionperms)
+setACL(https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">String,org.apache.hadoop.hbase.security.access.Permissionperms)
 
 
 Query
-setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringuser,
+setACL(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringuser,
   
org.apache.hadoop.hbase.security.access.Permissionperms)
 
 
@@ -326,8 +326,8 @@ extends 
 
 
-Methods inherited from classjava.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, http://docs.oracle.com/javase/8/docs/api/java/lang
 /Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 

[12/51] [partial] hbase-site git commit: Published site at .

2018-02-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
index 3152619..27db368 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
@@ -39,29 +39,29 @@
 031import 
java.util.concurrent.ExecutorService;
 032import 
java.util.concurrent.ThreadLocalRandom;
 033import java.util.concurrent.TimeUnit;
-034
-035import 
org.apache.hadoop.conf.Configuration;
-036import org.apache.hadoop.hbase.Cell;
-037import 
org.apache.hadoop.hbase.CellComparator;
-038import 
org.apache.hadoop.hbase.HConstants;
-039import 
org.apache.hadoop.hbase.MasterNotRunningException;
-040import 
org.apache.hadoop.hbase.PrivateCellUtil;
-041import 
org.apache.hadoop.hbase.ServerName;
-042import 
org.apache.hadoop.hbase.TableName;
-043import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
-044import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-045import 
org.apache.hadoop.hbase.security.User;
-046import 
org.apache.hadoop.hbase.security.UserProvider;
-047import 
org.apache.hadoop.hbase.util.Bytes;
-048import 
org.apache.hadoop.hbase.util.ReflectionUtils;
-049import 
org.apache.hadoop.ipc.RemoteException;
-050import org.apache.hadoop.net.DNS;
-051import 
org.apache.yetus.audience.InterfaceAudience;
-052import org.slf4j.Logger;
-053import org.slf4j.LoggerFactory;
-054import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-055import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-056import 
org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
+034import 
org.apache.hadoop.conf.Configuration;
+035import org.apache.hadoop.hbase.Cell;
+036import 
org.apache.hadoop.hbase.CellComparator;
+037import 
org.apache.hadoop.hbase.HConstants;
+038import 
org.apache.hadoop.hbase.PrivateCellUtil;
+039import 
org.apache.hadoop.hbase.ServerName;
+040import 
org.apache.hadoop.hbase.TableName;
+041import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
+042import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+043import 
org.apache.hadoop.hbase.security.User;
+044import 
org.apache.hadoop.hbase.security.UserProvider;
+045import 
org.apache.hadoop.hbase.util.Bytes;
+046import 
org.apache.hadoop.hbase.util.ReflectionUtils;
+047import 
org.apache.hadoop.ipc.RemoteException;
+048import org.apache.hadoop.net.DNS;
+049import 
org.apache.yetus.audience.InterfaceAudience;
+050import org.slf4j.Logger;
+051import org.slf4j.LoggerFactory;
+052
+053import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
+054import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
+055import 
org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
+056
 057import 
org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
 058import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
 059import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
@@ -163,11 +163,11 @@
 155}
 156
 157@Override
-158public MasterKeepAliveConnection 
getKeepAliveMasterService() throws MasterNotRunningException {
+158public MasterKeepAliveConnection 
getMaster() throws IOException {
 159  if (this.localHostClient instanceof 
MasterService.BlockingInterface) {
 160return new 
ShortCircuitMasterConnection((MasterService.BlockingInterface)this.localHostClient);
 161  }
-162  return 
super.getKeepAliveMasterService();
+162  return super.getMaster();
 163}
 164  }
 165

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ec8bf761/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html
index 3152619..27db368 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.ShortCircuitingClusterConnection.html
@@ -39,29 +39,29 @@
 031import 
java.util.concurrent.ExecutorService;
 032import 
java.util.concurrent.ThreadLocalRandom;
 033import java.util.concurrent.TimeUnit;
-034
-035import 
org.apache.hadoop.conf.Configuration;
-036import org.apache.hadoop.hbase.Cell;
-037import 

[12/51] [partial] hbase-site git commit: Published site at .

2018-02-18 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/991224b9/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
index 802b925..a3e80ab 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
@@ -73,229 +73,229 @@
 065import 
java.util.concurrent.TimeoutException;
 066import 
java.util.concurrent.atomic.AtomicBoolean;
 067import 
java.util.concurrent.atomic.AtomicInteger;
-068import 
java.util.concurrent.atomic.AtomicLong;
-069import 
java.util.concurrent.atomic.LongAdder;
-070import java.util.concurrent.locks.Lock;
-071import 
java.util.concurrent.locks.ReadWriteLock;
-072import 
java.util.concurrent.locks.ReentrantReadWriteLock;
-073import java.util.function.Function;
-074import 
org.apache.hadoop.conf.Configuration;
-075import org.apache.hadoop.fs.FileStatus;
-076import org.apache.hadoop.fs.FileSystem;
-077import 
org.apache.hadoop.fs.LocatedFileStatus;
-078import org.apache.hadoop.fs.Path;
-079import org.apache.hadoop.hbase.Cell;
-080import 
org.apache.hadoop.hbase.CellBuilderType;
-081import 
org.apache.hadoop.hbase.CellComparator;
-082import 
org.apache.hadoop.hbase.CellComparatorImpl;
-083import 
org.apache.hadoop.hbase.CellScanner;
-084import 
org.apache.hadoop.hbase.CellUtil;
-085import 
org.apache.hadoop.hbase.CompareOperator;
-086import 
org.apache.hadoop.hbase.CompoundConfiguration;
-087import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-088import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-089import 
org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
-090import 
org.apache.hadoop.hbase.HConstants;
-091import 
org.apache.hadoop.hbase.HConstants.OperationStatusCode;
-092import 
org.apache.hadoop.hbase.HDFSBlocksDistribution;
-093import 
org.apache.hadoop.hbase.HRegionInfo;
-094import 
org.apache.hadoop.hbase.KeyValue;
-095import 
org.apache.hadoop.hbase.KeyValueUtil;
-096import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-097import 
org.apache.hadoop.hbase.NotServingRegionException;
-098import 
org.apache.hadoop.hbase.PrivateCellUtil;
-099import 
org.apache.hadoop.hbase.RegionTooBusyException;
-100import 
org.apache.hadoop.hbase.TableName;
-101import org.apache.hadoop.hbase.Tag;
-102import org.apache.hadoop.hbase.TagUtil;
-103import 
org.apache.hadoop.hbase.UnknownScannerException;
-104import 
org.apache.hadoop.hbase.client.Append;
-105import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-106import 
org.apache.hadoop.hbase.client.CompactionState;
-107import 
org.apache.hadoop.hbase.client.Delete;
-108import 
org.apache.hadoop.hbase.client.Durability;
-109import 
org.apache.hadoop.hbase.client.Get;
-110import 
org.apache.hadoop.hbase.client.Increment;
-111import 
org.apache.hadoop.hbase.client.IsolationLevel;
-112import 
org.apache.hadoop.hbase.client.Mutation;
-113import 
org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;
-114import 
org.apache.hadoop.hbase.client.Put;
-115import 
org.apache.hadoop.hbase.client.RegionInfo;
-116import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-117import 
org.apache.hadoop.hbase.client.Result;
-118import 
org.apache.hadoop.hbase.client.RowMutations;
-119import 
org.apache.hadoop.hbase.client.Scan;
-120import 
org.apache.hadoop.hbase.client.TableDescriptor;
-121import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-122import 
org.apache.hadoop.hbase.conf.ConfigurationManager;
-123import 
org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;
-124import 
org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
-125import 
org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
-126import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-127import 
org.apache.hadoop.hbase.exceptions.TimeoutIOException;
-128import 
org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
-129import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-130import 
org.apache.hadoop.hbase.filter.FilterWrapper;
-131import 
org.apache.hadoop.hbase.filter.IncompatibleFilterException;
-132import 
org.apache.hadoop.hbase.io.HFileLink;
-133import 
org.apache.hadoop.hbase.io.HeapSize;
-134import 
org.apache.hadoop.hbase.io.TimeRange;
-135import 
org.apache.hadoop.hbase.io.hfile.HFile;
-136import 
org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
-137import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-138import 
org.apache.hadoop.hbase.ipc.RpcCall;
-139import 
org.apache.hadoop.hbase.ipc.RpcServer;
-140import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
-141import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-142import 

[12/51] [partial] hbase-site git commit: Published site at .

2018-02-17 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/193b4259/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
index bd13b53..802b925 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
@@ -900,7600 +900,7598 @@
 892if 
(this.getRegionInfo().getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {
 893  status.setStatus("Writing region 
info on filesystem");
 894  fs.checkRegionInfoOnFilesystem();
-895} else {
-896  if (LOG.isDebugEnabled()) {
-897LOG.debug("Skipping creation of 
.regioninfo file for " + this.getRegionInfo());
-898  }
-899}
-900
-901// Initialize all the HStores
-902status.setStatus("Initializing all 
the Stores");
-903long maxSeqId = 
initializeStores(reporter, status);
-904this.mvcc.advanceTo(maxSeqId);
-905if 
(ServerRegionReplicaUtil.shouldReplayRecoveredEdits(this)) {
-906  CollectionHStore stores = 
this.stores.values();
-907  try {
-908// update the stores that we are 
replaying
-909
stores.forEach(HStore::startReplayingFromWAL);
-910// Recover any edits if 
available.
-911maxSeqId = Math.max(maxSeqId,
-912  
replayRecoveredEditsIfAny(this.fs.getRegionDir(), maxSeqIdInStores, reporter, 
status));
-913// Make sure mvcc is up to max.
-914this.mvcc.advanceTo(maxSeqId);
-915  } finally {
-916// update the stores that we are 
done replaying
-917
stores.forEach(HStore::stopReplayingFromWAL);
-918  }
-919}
-920this.lastReplayedOpenRegionSeqId = 
maxSeqId;
+895}
+896
+897// Initialize all the HStores
+898status.setStatus("Initializing all 
the Stores");
+899long maxSeqId = 
initializeStores(reporter, status);
+900this.mvcc.advanceTo(maxSeqId);
+901if 
(ServerRegionReplicaUtil.shouldReplayRecoveredEdits(this)) {
+902  CollectionHStore stores = 
this.stores.values();
+903  try {
+904// update the stores that we are 
replaying
+905
stores.forEach(HStore::startReplayingFromWAL);
+906// Recover any edits if 
available.
+907maxSeqId = Math.max(maxSeqId,
+908  
replayRecoveredEditsIfAny(this.fs.getRegionDir(), maxSeqIdInStores, reporter, 
status));
+909// Make sure mvcc is up to max.
+910this.mvcc.advanceTo(maxSeqId);
+911  } finally {
+912// update the stores that we are 
done replaying
+913
stores.forEach(HStore::stopReplayingFromWAL);
+914  }
+915}
+916this.lastReplayedOpenRegionSeqId = 
maxSeqId;
+917
+918
this.writestate.setReadOnly(ServerRegionReplicaUtil.isReadOnly(this));
+919this.writestate.flushRequested = 
false;
+920this.writestate.compacting.set(0);
 921
-922
this.writestate.setReadOnly(ServerRegionReplicaUtil.isReadOnly(this));
-923this.writestate.flushRequested = 
false;
-924this.writestate.compacting.set(0);
-925
-926if (this.writestate.writesEnabled) 
{
-927  // Remove temporary data left over 
from old regions
-928  status.setStatus("Cleaning up 
temporary data from old regions");
-929  fs.cleanupTempDir();
-930}
-931
-932if (this.writestate.writesEnabled) 
{
-933  status.setStatus("Cleaning up 
detritus from prior splits");
-934  // Get rid of any splits or merges 
that were lost in-progress.  Clean out
-935  // these directories here on open.  
We may be opening a region that was
-936  // being split but we crashed in 
the middle of it all.
-937  fs.cleanupAnySplitDetritus();
-938  fs.cleanupMergesDir();
-939}
-940
-941// Initialize split policy
-942this.splitPolicy = 
RegionSplitPolicy.create(this, conf);
-943
-944// Initialize flush policy
-945this.flushPolicy = 
FlushPolicyFactory.create(this, conf);
-946
-947long lastFlushTime = 
EnvironmentEdgeManager.currentTime();
-948for (HStore store: stores.values()) 
{
-949  
this.lastStoreFlushTimeMap.put(store, lastFlushTime);
-950}
-951
-952// Use maximum of log sequenceid or 
that which was found in stores
-953// (particularly if no recovered 
edits, seqid will be -1).
-954long nextSeqid = maxSeqId;
-955if (this.writestate.writesEnabled) 
{
-956  nextSeqid = 
WALSplitter.writeRegionSequenceIdFile(this.fs.getFileSystem(),
-957  this.fs.getRegionDir(), 
nextSeqid, 1);
-958} else {
-959  nextSeqid++;
-960}
-961
-962LOG.info("Onlined " + 
this.getRegionInfo().getShortNameToLog() +
-963  "; next sequenceid=" + 
nextSeqid);
+922if (this.writestate.writesEnabled) 
{
+923  

[12/51] [partial] hbase-site git commit: Published site at .

2018-02-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/94208cfe/devapidocs/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
index bc89c2d..aa1df59 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
@@ -576,7 +576,7 @@ extends 
 
 EXPECTED_SPLIT_STATES
-private staticRegionState.State[] EXPECTED_SPLIT_STATES
+private staticRegionState.State[] EXPECTED_SPLIT_STATES
 
 
 
@@ -686,7 +686,7 @@ extends 
 
 rollbackState
-protectedvoidrollbackState(MasterProcedureEnvenv,
+protectedvoidrollbackState(MasterProcedureEnvenv,
  
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionStatestate)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException,
  http://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true;
 title="class or interface in java.lang">InterruptedException
@@ -710,7 +710,7 @@ extends 
 
 isRollbackSupported
-protectedbooleanisRollbackSupported(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionStatestate)
+protectedbooleanisRollbackSupported(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionStatestate)
 Description copied from 
class:StateMachineProcedure
 Used by the default implementation of abort() to know if 
the current state can be aborted
  and rollback can be triggered.
@@ -726,7 +726,7 @@ extends 
 
 getState
-protectedorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionStategetState(intstateId)
+protectedorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionStategetState(intstateId)
 Description copied from 
class:StateMachineProcedure
 Convert an ordinal (or state id) to an Enum (or more 
descriptive) state object.
 
@@ -745,7 +745,7 @@ extends 
 
 getStateId
-protectedintgetStateId(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionStatestate)
+protectedintgetStateId(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionStatestate)
 Description copied from 
class:StateMachineProcedure
 Convert the Enum (or more descriptive) state object to an 
ordinal (or state id).
 
@@ -764,7 +764,7 @@ extends 
 
 getInitialState
-protectedorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionStategetInitialState()
+protectedorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionStategetInitialState()
 Description copied from 
class:StateMachineProcedure
 Return the initial state object that will be used for the 
first call to executeFromState().
 
@@ -781,7 +781,7 @@ extends 
 
 serializeStateData
-protectedvoidserializeStateData(ProcedureStateSerializerserializer)
+protectedvoidserializeStateData(ProcedureStateSerializerserializer)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
class:Procedure
 The user-level code of the procedure may have some state to
@@ -803,7 +803,7 @@ extends 
 
 deserializeStateData
-protectedvoiddeserializeStateData(ProcedureStateSerializerserializer)
+protectedvoiddeserializeStateData(ProcedureStateSerializerserializer)
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
class:Procedure
 Called on store load to allow the user to decode the 
previously serialized
@@ -824,7 +824,7 @@ extends 
 
 toStringClassDetails
-publicvoidtoStringClassDetails(http://docs.oracle.com/javase/8/docs/api/java/lang/StringBuilder.html?is-external=true;
 title="class or interface in java.lang">StringBuildersb)
+publicvoidtoStringClassDetails(http://docs.oracle.com/javase/8/docs/api/java/lang/StringBuilder.html?is-external=true;
 title="class or interface in java.lang">StringBuildersb)
 Description copied from 
class:Procedure
 Extend the toString() information with the procedure details
  e.g. className and parameters
@@ -842,7 +842,7 @@ extends 
 
 getParentRegion
-privateRegionInfogetParentRegion()
+privateRegionInfogetParentRegion()
 
 
 
@@ -851,7 +851,7 @@ extends 
 
 getTableOperationType
-publicTableProcedureInterface.TableOperationTypegetTableOperationType()

[12/51] [partial] hbase-site git commit: Published site at .

2018-02-15 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0cd17dc5/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
index 49f85aa..6e37f0b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
@@ -162,11 +162,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ImmutableBytesWritable
-TableRecordReader.createKey()
+TableSnapshotInputFormat.TableSnapshotRecordReader.createKey()
 
 
 ImmutableBytesWritable
-TableSnapshotInputFormat.TableSnapshotRecordReader.createKey()
+TableRecordReader.createKey()
 
 
 ImmutableBytesWritable
@@ -183,11 +183,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReaderImmutableBytesWritable,Result
-TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
+TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
org.apache.hadoop.mapred.JobConfjob,
-   org.apache.hadoop.mapred.Reporterreporter)
-Builds a TableRecordReader.
-
+   
org.apache.hadoop.mapred.Reporterreporter)
 
 
 org.apache.hadoop.mapred.RecordReaderImmutableBytesWritable,Result
@@ -197,9 +195,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReaderImmutableBytesWritable,Result
-TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
+TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
org.apache.hadoop.mapred.JobConfjob,
-   
org.apache.hadoop.mapred.Reporterreporter)
+   org.apache.hadoop.mapred.Reporterreporter)
+Builds a TableRecordReader.
+
 
 
 
@@ -218,12 +218,10 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritablekey,
-   Resultvalue,
+RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
+   Resultvalues,
org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
-   org.apache.hadoop.mapred.Reporterreporter)
-Pass the key, value to reduce
-
+   org.apache.hadoop.mapred.Reporterreporter)
 
 
 void
@@ -236,19 +234,21 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
-   Resultvalues,
+IdentityTableMap.map(ImmutableBytesWritablekey,
+   Resultvalue,
org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
-   org.apache.hadoop.mapred.Reporterreporter)
+   org.apache.hadoop.mapred.Reporterreporter)
+Pass the key, value to reduce
+
 
 
 boolean
-TableRecordReader.next(ImmutableBytesWritablekey,
+TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritablekey,
 Resultvalue)
 
 
 boolean
-TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritablekey,
+TableRecordReader.next(ImmutableBytesWritablekey,
 Resultvalue)
 
 
@@ -281,12 +281,10 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritablekey,
-   Resultvalue,
+RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
+   Resultvalues,
org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
-   org.apache.hadoop.mapred.Reporterreporter)
-Pass the key, value to reduce
-
+   org.apache.hadoop.mapred.Reporterreporter)
 
 
 void
@@ -299,10 +297,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
-   Resultvalues,
+IdentityTableMap.map(ImmutableBytesWritablekey,
+   Resultvalue,
org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
-   org.apache.hadoop.mapred.Reporterreporter)
+   org.apache.hadoop.mapred.Reporterreporter)
+Pass the key, value to reduce
+
 
 
 void
@@ -349,7 +349,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private ImmutableBytesWritable
-MultithreadedTableMapper.SubMapRecordReader.key
+TableRecordReaderImpl.key
 
 
 private ImmutableBytesWritable
@@ -357,7 +357,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private ImmutableBytesWritable
-TableRecordReaderImpl.key
+MultithreadedTableMapper.SubMapRecordReader.key
 
 
 (package private) ImmutableBytesWritable
@@ -427,33 +427,33 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ImmutableBytesWritable
-MultithreadedTableMapper.SubMapRecordReader.getCurrentKey()

[12/51] [partial] hbase-site git commit: Published site at .

2018-02-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
index c7d05d1..abcb738 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
@@ -143,18 +143,18 @@
 
 
 void
-HFileDataBlockEncoderImpl.saveMetadata(HFile.Writerwriter)
-
-
-void
 NoOpDataBlockEncoder.saveMetadata(HFile.Writerwriter)
 
-
+
 void
 HFileDataBlockEncoder.saveMetadata(HFile.Writerwriter)
 Save metadata in HFile which will be written to disk
 
 
+
+void
+HFileDataBlockEncoderImpl.saveMetadata(HFile.Writerwriter)
+
 
 
 
@@ -203,18 +203,18 @@
 
 
 
-void
-RowColBloomContext.addLastBloomKey(HFile.Writerwriter)
+abstract void
+BloomContext.addLastBloomKey(HFile.Writerwriter)
+Adds the last bloom key to the HFile Writer as part of 
StorefileWriter close.
+
 
 
 void
 RowBloomContext.addLastBloomKey(HFile.Writerwriter)
 
 
-abstract void
-BloomContext.addLastBloomKey(HFile.Writerwriter)
-Adds the last bloom key to the HFile Writer as part of 
StorefileWriter close.
-
+void
+RowColBloomContext.addLastBloomKey(HFile.Writerwriter)
 
 
 static BloomFilterWriter

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
index 479b9d3..274bfad 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
@@ -106,15 +106,15 @@
 
 
 
+private HFileBlock.Writer
+HFileBlockIndex.BlockIndexWriter.blockWriter
+
+
 protected HFileBlock.Writer
 HFileWriterImpl.blockWriter
 block writer
 
 
-
-private HFileBlock.Writer
-HFileBlockIndex.BlockIndexWriter.blockWriter
-
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/828486ae/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
index 0c892c8..b293c97 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
@@ -136,15 +136,15 @@
 
 
 HFileContext
-HFileBlockDecodingContext.getHFileContext()
+HFileBlockEncodingContext.getHFileContext()
 
 
 HFileContext
-HFileBlockDefaultDecodingContext.getHFileContext()
+HFileBlockDecodingContext.getHFileContext()
 
 
 HFileContext
-HFileBlockEncodingContext.getHFileContext()
+HFileBlockDefaultDecodingContext.getHFileContext()
 
 
 HFileContext
@@ -224,23 +224,23 @@
 
 
 private HFileContext
+HFile.WriterFactory.fileContext
+
+
+private HFileContext
 HFileBlock.fileContext
 Meta data that holds meta information on the 
hfileblock.
 
 
-
+
 private HFileContext
 HFileBlock.Writer.fileContext
 Meta data that holds information about the hfileblock
 
 
-
-private HFileContext
-HFileBlock.FSReaderImpl.fileContext
-
 
 private HFileContext
-HFile.WriterFactory.fileContext
+HFileBlock.FSReaderImpl.fileContext
 
 
 private HFileContext
@@ -277,20 +277,20 @@
 
 
 HFileContext
-HFileWriterImpl.getFileContext()
-
-
-HFileContext
 HFile.Writer.getFileContext()
 Return the file context for the HFile this writer belongs 
to
 
 
-
+
 HFileContext
 HFile.Reader.getFileContext()
 Return the file context of the HFile this reader belongs 
to
 
 
+
+HFileContext
+HFileWriterImpl.getFileContext()
+
 
 HFileContext
 HFileReaderImpl.getFileContext()
@@ -323,35 +323,35 @@
 
 
 HFileBlockDecodingContext
-HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContextfileContext)
-
-
-HFileBlockDecodingContext
 NoOpDataBlockEncoder.newDataBlockDecodingContext(HFileContextmeta)
 
-
+
 HFileBlockDecodingContext
 HFileDataBlockEncoder.newDataBlockDecodingContext(HFileContextfileContext)
 create a encoder specific decoding context for 
reading.
 
 
-
-HFileBlockEncodingContext
-HFileDataBlockEncoderImpl.newDataBlockEncodingContext(byte[]dummyHeader,
-   HFileContextfileContext)
-
 
+HFileBlockDecodingContext
+HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContextfileContext)
+
+
 HFileBlockEncodingContext
 NoOpDataBlockEncoder.newDataBlockEncodingContext(byte[]dummyHeader,
HFileContextmeta)
 
-
+
 HFileBlockEncodingContext
 

[12/51] [partial] hbase-site git commit: Published site at .

2018-02-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f272b0e8/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
index 0c342b2..bb2794a 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
@@ -151,115 +151,115 @@
 
 
 Filter.ReturnCode
-FilterListWithAND.filterCell(Cellc)
+ColumnPrefixFilter.filterCell(Cellcell)
 
 
 Filter.ReturnCode
-ValueFilter.filterCell(Cellc)
+ColumnCountGetFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-SkipFilter.filterCell(Cellc)
+RowFilter.filterCell(Cellv)
 
 
 Filter.ReturnCode
-FamilyFilter.filterCell(Cellc)
+FuzzyRowFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterCell(Cellcell)
+Filter.filterCell(Cellc)
+A way to filter based on the column family, column 
qualifier and/or the column value.
+
 
 
 Filter.ReturnCode
-PageFilter.filterCell(Cellignored)
+RandomRowFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-RowFilter.filterCell(Cellv)
+FirstKeyOnlyFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterCell(Cellc)
+SkipFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterCell(Cellc)
+TimestampsFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterCell(Cellc)
+ValueFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-ColumnPaginationFilter.filterCell(Cellc)
+KeyOnlyFilter.filterCell(Cellignored)
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterCell(Cellc)
+FamilyFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FilterListWithOR.filterCell(Cellc)
+QualifierFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-InclusiveStopFilter.filterCell(Cellc)
+FilterList.filterCell(Cellc)
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterCell(Cellignored)
+ColumnRangeFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-MultiRowRangeFilter.filterCell(Cellignored)
+ColumnPaginationFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-Filter.filterCell(Cellc)
-A way to filter based on the column family, column 
qualifier and/or the column value.
-
+FilterListWithAND.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FirstKeyOnlyFilter.filterCell(Cellc)
+WhileMatchFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-WhileMatchFilter.filterCell(Cellc)
+MultiRowRangeFilter.filterCell(Cellignored)
 
 
 Filter.ReturnCode
-FirstKeyValueMatchingQualifiersFilter.filterCell(Cellc)
-Deprecated.
-
+PrefixFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-TimestampsFilter.filterCell(Cellc)
+DependentColumnFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FuzzyRowFilter.filterCell(Cellc)
+FirstKeyValueMatchingQualifiersFilter.filterCell(Cellc)
+Deprecated.
+
 
 
 Filter.ReturnCode
-FilterList.filterCell(Cellc)
+PageFilter.filterCell(Cellignored)
 
 
 Filter.ReturnCode
-RandomRowFilter.filterCell(Cellc)
+FilterListWithOR.filterCell(Cellc)
 
 
 Filter.ReturnCode
-PrefixFilter.filterCell(Cellc)
+InclusiveStopFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-SingleColumnValueFilter.filterCell(Cellc)
+MultipleColumnPrefixFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-QualifierFilter.filterCell(Cellc)
+SingleColumnValueFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
@@ -275,158 +275,158 @@
 
 
 Filter.ReturnCode
-ValueFilter.filterKeyValue(Cellc)
+ColumnPrefixFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-SkipFilter.filterKeyValue(Cellc)
+ColumnCountGetFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-FilterListBase.filterKeyValue(Cellc)
+RowFilter.filterKeyValue(Cellc)
+Deprecated.
+
 
 
 Filter.ReturnCode
-FamilyFilter.filterKeyValue(Cellc)
+FuzzyRowFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterKeyValue(Cellc)
-Deprecated.
+Filter.filterKeyValue(Cellc)
+Deprecated.
+As of release 2.0.0, this 
will be removed in HBase 3.0.0.
+ Instead use filterCell(Cell)
+
 
 
 
 Filter.ReturnCode
-PageFilter.filterKeyValue(Cellc)
+RandomRowFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-RowFilter.filterKeyValue(Cellc)
+FirstKeyOnlyFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterKeyValue(Cellc)
+SkipFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterKeyValue(Cellc)
+TimestampsFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterKeyValue(Cellc)
+ValueFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-ColumnPaginationFilter.filterKeyValue(Cellc)
+KeyOnlyFilter.filterKeyValue(Cellignored)
 Deprecated.
 
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterKeyValue(Cellc)
+FamilyFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 

[12/51] [partial] hbase-site git commit: Published site at .

2018-02-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c83a37c8/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
index c7d05d1..abcb738 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
@@ -143,18 +143,18 @@
 
 
 void
-HFileDataBlockEncoderImpl.saveMetadata(HFile.Writerwriter)
-
-
-void
 NoOpDataBlockEncoder.saveMetadata(HFile.Writerwriter)
 
-
+
 void
 HFileDataBlockEncoder.saveMetadata(HFile.Writerwriter)
 Save metadata in HFile which will be written to disk
 
 
+
+void
+HFileDataBlockEncoderImpl.saveMetadata(HFile.Writerwriter)
+
 
 
 
@@ -203,18 +203,18 @@
 
 
 
-void
-RowColBloomContext.addLastBloomKey(HFile.Writerwriter)
+abstract void
+BloomContext.addLastBloomKey(HFile.Writerwriter)
+Adds the last bloom key to the HFile Writer as part of 
StorefileWriter close.
+
 
 
 void
 RowBloomContext.addLastBloomKey(HFile.Writerwriter)
 
 
-abstract void
-BloomContext.addLastBloomKey(HFile.Writerwriter)
-Adds the last bloom key to the HFile Writer as part of 
StorefileWriter close.
-
+void
+RowColBloomContext.addLastBloomKey(HFile.Writerwriter)
 
 
 static BloomFilterWriter

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c83a37c8/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
index 479b9d3..274bfad 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
@@ -106,15 +106,15 @@
 
 
 
+private HFileBlock.Writer
+HFileBlockIndex.BlockIndexWriter.blockWriter
+
+
 protected HFileBlock.Writer
 HFileWriterImpl.blockWriter
 block writer
 
 
-
-private HFileBlock.Writer
-HFileBlockIndex.BlockIndexWriter.blockWriter
-
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c83a37c8/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
index 0c892c8..b293c97 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
@@ -136,15 +136,15 @@
 
 
 HFileContext
-HFileBlockDecodingContext.getHFileContext()
+HFileBlockEncodingContext.getHFileContext()
 
 
 HFileContext
-HFileBlockDefaultDecodingContext.getHFileContext()
+HFileBlockDecodingContext.getHFileContext()
 
 
 HFileContext
-HFileBlockEncodingContext.getHFileContext()
+HFileBlockDefaultDecodingContext.getHFileContext()
 
 
 HFileContext
@@ -224,23 +224,23 @@
 
 
 private HFileContext
+HFile.WriterFactory.fileContext
+
+
+private HFileContext
 HFileBlock.fileContext
 Meta data that holds meta information on the 
hfileblock.
 
 
-
+
 private HFileContext
 HFileBlock.Writer.fileContext
 Meta data that holds information about the hfileblock
 
 
-
-private HFileContext
-HFileBlock.FSReaderImpl.fileContext
-
 
 private HFileContext
-HFile.WriterFactory.fileContext
+HFileBlock.FSReaderImpl.fileContext
 
 
 private HFileContext
@@ -277,20 +277,20 @@
 
 
 HFileContext
-HFileWriterImpl.getFileContext()
-
-
-HFileContext
 HFile.Writer.getFileContext()
 Return the file context for the HFile this writer belongs 
to
 
 
-
+
 HFileContext
 HFile.Reader.getFileContext()
 Return the file context of the HFile this reader belongs 
to
 
 
+
+HFileContext
+HFileWriterImpl.getFileContext()
+
 
 HFileContext
 HFileReaderImpl.getFileContext()
@@ -323,35 +323,35 @@
 
 
 HFileBlockDecodingContext
-HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContextfileContext)
-
-
-HFileBlockDecodingContext
 NoOpDataBlockEncoder.newDataBlockDecodingContext(HFileContextmeta)
 
-
+
 HFileBlockDecodingContext
 HFileDataBlockEncoder.newDataBlockDecodingContext(HFileContextfileContext)
 create a encoder specific decoding context for 
reading.
 
 
-
-HFileBlockEncodingContext
-HFileDataBlockEncoderImpl.newDataBlockEncodingContext(byte[]dummyHeader,
-   HFileContextfileContext)
-
 
+HFileBlockDecodingContext
+HFileDataBlockEncoderImpl.newDataBlockDecodingContext(HFileContextfileContext)
+
+
 HFileBlockEncodingContext
 NoOpDataBlockEncoder.newDataBlockEncodingContext(byte[]dummyHeader,
HFileContextmeta)
 
-
+
 HFileBlockEncodingContext
 

[12/51] [partial] hbase-site git commit: Published site at .

2018-02-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0ab8335e/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
index 7244ce2..5f7ce59 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
@@ -114,15 +114,15 @@
 
 
 private PriorityFunction
-SimpleRpcScheduler.priority
+RpcExecutor.priority
 
 
 private PriorityFunction
-RpcExecutor.priority
+RpcExecutor.CallPriorityComparator.priority
 
 
 private PriorityFunction
-RpcExecutor.CallPriorityComparator.priority
+SimpleRpcScheduler.priority
 
 
 
@@ -319,7 +319,7 @@
 
 
 RpcScheduler
-RpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
+FifoRpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
   PriorityFunctionpriority)
 Deprecated.
 
@@ -333,18 +333,16 @@
 
 
 RpcScheduler
-FifoRpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
+RpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
   PriorityFunctionpriority)
 Deprecated.
 
 
 
 RpcScheduler
-RpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
+FifoRpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
   PriorityFunctionpriority,
-  Abortableserver)
-Constructs a RpcScheduler.
-
+  Abortableserver)
 
 
 RpcScheduler
@@ -354,9 +352,11 @@
 
 
 RpcScheduler
-FifoRpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
+RpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
   PriorityFunctionpriority,
-  Abortableserver)
+  Abortableserver)
+Constructs a RpcScheduler.
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0ab8335e/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html
index 4a25f5c..6d59fb7 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcCallback.html
@@ -123,14 +123,14 @@
 
 
 void
-ServerCall.setCallBack(RpcCallbackcallback)
-
-
-void
 RpcCallContext.setCallBack(RpcCallbackcallback)
 Sets a callback which has to be executed at the end of this 
RPC call.
 
 
+
+void
+ServerCall.setCallBack(RpcCallbackcallback)
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0ab8335e/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html
index fab4d7a..baa4e5e 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcControllerFactory.html
@@ -131,32 +131,24 @@
 
 
 
-private RpcControllerFactory
-ConnectionImplementation.rpcControllerFactory
-
-
-protected RpcControllerFactory
-ClientScanner.rpcControllerFactory
-
-
 protected RpcControllerFactory
 RegionAdminServiceCallable.rpcControllerFactory
 
 
-(package private) RpcControllerFactory
-AsyncConnectionImpl.rpcControllerFactory
+private RpcControllerFactory
+ConnectionImplementation.rpcControllerFactory
 
 
-private RpcControllerFactory
-HTable.rpcControllerFactory
+(package private) RpcControllerFactory
+AsyncConnectionImpl.rpcControllerFactory
 
 
 private RpcControllerFactory
-HBaseAdmin.rpcControllerFactory
+HTable.rpcControllerFactory
 
 
 private RpcControllerFactory
-SecureBulkLoadClient.rpcControllerFactory
+RpcRetryingCallerWithReadReplicas.rpcControllerFactory
 
 
 protected RpcControllerFactory
@@ -164,7 +156,15 @@
 
 
 private RpcControllerFactory
-RpcRetryingCallerWithReadReplicas.rpcControllerFactory
+HBaseAdmin.rpcControllerFactory
+
+
+private RpcControllerFactory
+SecureBulkLoadClient.rpcControllerFactory
+
+
+protected RpcControllerFactory
+ClientScanner.rpcControllerFactory
 
 
 (package private) RpcControllerFactory
@@ -181,11 +181,11 @@
 
 
 RpcControllerFactory
-ConnectionImplementation.getRpcControllerFactory()
+ClusterConnection.getRpcControllerFactory()
 
 
 RpcControllerFactory
-ClusterConnection.getRpcControllerFactory()
+ConnectionImplementation.getRpcControllerFactory()
 
 
 private RpcControllerFactory

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0ab8335e/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcExecutor.Handler.html
--
diff --git 

[12/51] [partial] hbase-site git commit: Published site at .

2018-02-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6674e3ab/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.html
index 77fb9b5..c4e8c8b 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.html
@@ -165,520 +165,519 @@
 157E env = 
checkAndLoadInstance(implClass, priority, conf);
 158if (env != null) {
 159  
this.coprocEnvironments.add(env);
-160  LOG.info(
-161  "System coprocessor " + 
className + " was loaded " + "successfully with priority (" + priority + 
").");
-162  ++priority;
-163}
-164  } catch (Throwable t) {
-165// We always abort if system 
coprocessors cannot be loaded
-166abortServer(className, t);
-167  }
-168}
-169  }
-170
-171  /**
-172   * Load a coprocessor implementation 
into the host
-173   * @param path path to implementation 
jar
-174   * @param className the main class 
name
-175   * @param priority chaining priority
-176   * @param conf configuration for 
coprocessor
-177   * @throws java.io.IOException 
Exception
-178   */
-179  public E load(Path path, String 
className, int priority,
-180  Configuration conf) throws 
IOException {
-181String[] includedClassPrefixes = 
null;
-182if 
(conf.get(HConstants.CP_HTD_ATTR_INCLUSION_KEY) != null){
-183  String prefixes = 
conf.get(HConstants.CP_HTD_ATTR_INCLUSION_KEY);
-184  includedClassPrefixes = 
prefixes.split(";");
-185}
-186return load(path, className, 
priority, conf, includedClassPrefixes);
-187  }
-188
-189  /**
-190   * Load a coprocessor implementation 
into the host
-191   * @param path path to implementation 
jar
-192   * @param className the main class 
name
-193   * @param priority chaining priority
-194   * @param conf configuration for 
coprocessor
-195   * @param includedClassPrefixes class 
name prefixes to include
-196   * @throws java.io.IOException 
Exception
-197   */
-198  public E load(Path path, String 
className, int priority,
-199  Configuration conf, String[] 
includedClassPrefixes) throws IOException {
-200Class? implClass;
-201LOG.debug("Loading coprocessor class 
" + className + " with path " +
-202path + " and priority " + 
priority);
-203
-204ClassLoader cl = null;
-205if (path == null) {
-206  try {
-207implClass = 
getClass().getClassLoader().loadClass(className);
-208  } catch (ClassNotFoundException e) 
{
-209throw new IOException("No jar 
path specified for " + className);
-210  }
-211} else {
-212  cl = 
CoprocessorClassLoader.getClassLoader(
-213path, 
getClass().getClassLoader(), pathPrefix, conf);
-214  try {
-215implClass = 
((CoprocessorClassLoader)cl).loadClass(className, includedClassPrefixes);
-216  } catch (ClassNotFoundException e) 
{
-217throw new IOException("Cannot 
load external coprocessor class " + className, e);
-218  }
-219}
-220
-221//load custom code for coprocessor
-222Thread currentThread = 
Thread.currentThread();
-223ClassLoader hostClassLoader = 
currentThread.getContextClassLoader();
-224try{
-225  // switch temporarily to the thread 
classloader for custom CP
-226  
currentThread.setContextClassLoader(cl);
-227  E cpInstance = 
checkAndLoadInstance(implClass, priority, conf);
-228  return cpInstance;
-229} finally {
-230  // restore the fresh (host) 
classloader
-231  
currentThread.setContextClassLoader(hostClassLoader);
-232}
-233  }
-234
-235  @VisibleForTesting
-236  public void load(Class? extends 
C implClass, int priority, Configuration conf)
-237  throws IOException {
-238E env = 
checkAndLoadInstance(implClass, priority, conf);
-239coprocEnvironments.add(env);
-240  }
-241
-242  /**
-243   * @param implClass Implementation 
class
-244   * @param priority priority
-245   * @param conf configuration
-246   * @throws java.io.IOException 
Exception
-247   */
-248  public E 
checkAndLoadInstance(Class? implClass, int priority, Configuration 
conf)
-249  throws IOException {
-250// create the instance
-251C impl;
-252try {
-253  impl = 
checkAndGetInstance(implClass);
-254  if (impl == null) {
-255LOG.error("Cannot load 
coprocessor " + implClass.getSimpleName());
-256return null;
-257  }
-258} catch 
(InstantiationException|IllegalAccessException e) {
-259  throw new IOException(e);
-260}
-261// create the environment
-262E env = createEnvironment(impl, 
priority, loadSequence.incrementAndGet(), conf);
-263assert env instanceof 

[12/51] [partial] hbase-site git commit: Published site at .

2018-02-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1f2eeb22/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.WALKeyValueMapper.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.WALKeyValueMapper.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.WALKeyValueMapper.html
index 88c511a..ccdd6a2 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.WALKeyValueMapper.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.WALKeyValueMapper.html
@@ -113,7 +113,7 @@
 105throws IOException {
 106  try {
 107// skip all other tables
-108if (Bytes.equals(table, 
key.getTablename().getName())) {
+108if (Bytes.equals(table, 
key.getTableName().getName())) {
 109  for (Cell cell : 
value.getCells()) {
 110if 
(WALEdit.isMetaEditFamily(cell)) {
 111  continue;
@@ -153,10 +153,10 @@
 145public void map(WALKey key, WALEdit 
value, Context context)
 146throws IOException {
 147  try {
-148if (tables.isEmpty() || 
tables.containsKey(key.getTablename())) {
+148if (tables.isEmpty() || 
tables.containsKey(key.getTableName())) {
 149  TableName targetTable = 
tables.isEmpty() ?
-150key.getTablename() :
-151
tables.get(key.getTablename());
+150key.getTableName() :
+151
tables.get(key.getTableName());
 152  ImmutableBytesWritable tableOut 
= new ImmutableBytesWritable(targetTable.getName());
 153  Put put = null;
 154  Delete del = null;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1f2eeb22/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.WALMapper.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.WALMapper.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.WALMapper.html
index 88c511a..ccdd6a2 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.WALMapper.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.WALMapper.html
@@ -113,7 +113,7 @@
 105throws IOException {
 106  try {
 107// skip all other tables
-108if (Bytes.equals(table, 
key.getTablename().getName())) {
+108if (Bytes.equals(table, 
key.getTableName().getName())) {
 109  for (Cell cell : 
value.getCells()) {
 110if 
(WALEdit.isMetaEditFamily(cell)) {
 111  continue;
@@ -153,10 +153,10 @@
 145public void map(WALKey key, WALEdit 
value, Context context)
 146throws IOException {
 147  try {
-148if (tables.isEmpty() || 
tables.containsKey(key.getTablename())) {
+148if (tables.isEmpty() || 
tables.containsKey(key.getTableName())) {
 149  TableName targetTable = 
tables.isEmpty() ?
-150key.getTablename() :
-151
tables.get(key.getTablename());
+150key.getTableName() :
+151
tables.get(key.getTableName());
 152  ImmutableBytesWritable tableOut 
= new ImmutableBytesWritable(targetTable.getName());
 153  Put put = null;
 154  Delete del = null;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1f2eeb22/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.html
index 88c511a..ccdd6a2 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.html
@@ -113,7 +113,7 @@
 105throws IOException {
 106  try {
 107// skip all other tables
-108if (Bytes.equals(table, 
key.getTablename().getName())) {
+108if (Bytes.equals(table, 
key.getTableName().getName())) {
 109  for (Cell cell : 
value.getCells()) {
 110if 
(WALEdit.isMetaEditFamily(cell)) {
 111  continue;
@@ -153,10 +153,10 @@
 145public void map(WALKey key, WALEdit 
value, Context context)
 146throws IOException {
 147  try {
-148if (tables.isEmpty() || 
tables.containsKey(key.getTablename())) {
+148if (tables.isEmpty() || 
tables.containsKey(key.getTableName())) {
 149  TableName targetTable = 
tables.isEmpty() ?
-150key.getTablename() :
-151
tables.get(key.getTablename());
+150key.getTableName() :
+151
tables.get(key.getTableName());
 152  ImmutableBytesWritable tableOut 
= new ImmutableBytesWritable(targetTable.getName());
 153  Put put = null;
 154  Delete del = 

[12/51] [partial] hbase-site git commit: Published site at .

2018-01-29 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/cc6597ec/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncTableScanMetrics.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncTableScanMetrics.html 
b/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncTableScanMetrics.html
index 2f1d88a..2604e6c 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncTableScanMetrics.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncTableScanMetrics.html
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public class TestAsyncTableScanMetrics
+public class TestAsyncTableScanMetrics
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 
 
@@ -153,34 +153,38 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 CF
 
 
+static HBaseClassTestRule
+CLASS_RULE
+
+
 private static 
org.apache.hadoop.hbase.client.AsyncConnection
 CONN
 
-
+
 private static byte[]
 CQ
 
-
+
 TestAsyncTableScanMetrics.ScanWithMetrics
 method
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 methodName
 
-
+
 private static int
 NUM_REGIONS
 
-
+
 private static 
org.apache.hadoop.hbase.TableName
 TABLE_NAME
 
-
+
 private static HBaseTestingUtility
 UTIL
 
-
+
 private static byte[]
 VALUE
 
@@ -270,13 +274,22 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 Field Detail
+
+
+
+
+
+CLASS_RULE
+public static finalHBaseClassTestRule CLASS_RULE
+
+
 
 
 
 
 
 UTIL
-private static finalHBaseTestingUtility UTIL
+private static finalHBaseTestingUtility UTIL
 
 
 
@@ -285,7 +298,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 TABLE_NAME
-private static finalorg.apache.hadoop.hbase.TableName TABLE_NAME
+private static finalorg.apache.hadoop.hbase.TableName TABLE_NAME
 
 
 
@@ -294,7 +307,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 CF
-private static finalbyte[] CF
+private static finalbyte[] CF
 
 
 
@@ -303,7 +316,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 CQ
-private static finalbyte[] CQ
+private static finalbyte[] CQ
 
 
 
@@ -312,7 +325,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 VALUE
-private static finalbyte[] VALUE
+private static finalbyte[] VALUE
 
 
 
@@ -321,7 +334,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 CONN
-private staticorg.apache.hadoop.hbase.client.AsyncConnection CONN
+private staticorg.apache.hadoop.hbase.client.AsyncConnection CONN
 
 
 
@@ -330,7 +343,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 NUM_REGIONS
-private staticint NUM_REGIONS
+private staticint NUM_REGIONS
 
 
 
@@ -339,7 +352,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 methodName
-publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String methodName
+publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String methodName
 
 
 
@@ -348,7 +361,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 method
-publicTestAsyncTableScanMetrics.ScanWithMetrics 
method
+publicTestAsyncTableScanMetrics.ScanWithMetrics 
method
 
 
 
@@ -365,7 +378,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 TestAsyncTableScanMetrics
-publicTestAsyncTableScanMetrics()
+publicTestAsyncTableScanMetrics()
 
 
 
@@ -382,7 +395,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 params
-public statichttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]params()
+public statichttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]params()
 
 
 
@@ -391,7 +404,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 setUp
-public staticvoidsetUp()
+public staticvoidsetUp()
   throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -405,7 +418,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 tearDown
-public staticvoidtearDown()
+public staticvoidtearDown()
  

[12/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
index 4febd01..ef680de 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
@@ -71,203 +71,203 @@
 063   * @return The new HashMap of RS log 
time stamps after the log roll for this incremental backup.
 064   * @throws IOException exception
 065   */
-066  public HashMapString, Long 
getIncrBackupLogFileMap()
-067  throws IOException {
-068ListString logList;
-069HashMapString, Long 
newTimestamps;
-070HashMapString, Long 
previousTimestampMins;
-071
-072String savedStartCode = 
readBackupStartCode();
-073
-074// key: tableName
-075// value: 
RegionServer,PreviousTimeStamp
-076HashMapTableName, 
HashMapString, Long previousTimestampMap = readLogTimestampMap();
-077
-078previousTimestampMins = 
BackupUtils.getRSLogTimestampMins(previousTimestampMap);
-079
-080if (LOG.isDebugEnabled()) {
-081  LOG.debug("StartCode " + 
savedStartCode + "for backupID " + backupInfo.getBackupId());
-082}
-083// get all new log files from .logs 
and .oldlogs after last TS and before new timestamp
-084if (savedStartCode == null || 
previousTimestampMins == null
-085|| 
previousTimestampMins.isEmpty()) {
-086  throw new IOException(
-087  "Cannot read any previous back 
up timestamps from backup system table. "
-088  + "In order to create an 
incremental backup, at least one full backup is needed.");
-089}
-090
-091LOG.info("Execute roll log procedure 
for incremental backup ...");
-092HashMapString, String props = 
new HashMapString, String();
-093props.put("backupRoot", 
backupInfo.getBackupRootDir());
-094
-095try (Admin admin = conn.getAdmin()) 
{
-096  
admin.execProcedure(LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_SIGNATURE,
-097
LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_NAME, props);
-098}
-099newTimestamps = 
readRegionServerLastLogRollResult();
-100
-101logList = 
getLogFilesForNewBackup(previousTimestampMins, newTimestamps, conf, 
savedStartCode);
-102ListWALItem 
logFromSystemTable =
-103
getLogFilesFromBackupSystem(previousTimestampMins, newTimestamps, 
getBackupInfo()
-104.getBackupRootDir());
-105logList = 
excludeAlreadyBackedUpWALs(logList, logFromSystemTable);
-106
backupInfo.setIncrBackupFileList(logList);
-107
-108return newTimestamps;
-109  }
-110
-111  /**
-112   * Get list of WAL files eligible for 
incremental backup
+066  public HashMapString, Long 
getIncrBackupLogFileMap() throws IOException {
+067ListString logList;
+068HashMapString, Long 
newTimestamps;
+069HashMapString, Long 
previousTimestampMins;
+070
+071String savedStartCode = 
readBackupStartCode();
+072
+073// key: tableName
+074// value: 
RegionServer,PreviousTimeStamp
+075HashMapTableName, 
HashMapString, Long previousTimestampMap = readLogTimestampMap();
+076
+077previousTimestampMins = 
BackupUtils.getRSLogTimestampMins(previousTimestampMap);
+078
+079if (LOG.isDebugEnabled()) {
+080  LOG.debug("StartCode " + 
savedStartCode + "for backupID " + backupInfo.getBackupId());
+081}
+082// get all new log files from .logs 
and .oldlogs after last TS and before new timestamp
+083if (savedStartCode == null || 
previousTimestampMins == null
+084|| 
previousTimestampMins.isEmpty()) {
+085  throw new IOException(
+086  "Cannot read any previous back 
up timestamps from backup system table. "
+087  + "In order to create an 
incremental backup, at least one full backup is needed.");
+088}
+089
+090LOG.info("Execute roll log procedure 
for incremental backup ...");
+091HashMapString, String props = 
new HashMap();
+092props.put("backupRoot", 
backupInfo.getBackupRootDir());
+093
+094try (Admin admin = conn.getAdmin()) 
{
+095  
admin.execProcedure(LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_SIGNATURE,
+096
LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_NAME, props);
+097}
+098newTimestamps = 
readRegionServerLastLogRollResult();
+099
+100logList = 
getLogFilesForNewBackup(previousTimestampMins, newTimestamps, conf, 
savedStartCode);
+101ListWALItem 
logFromSystemTable =
+102
getLogFilesFromBackupSystem(previousTimestampMins, newTimestamps, 
getBackupInfo()
+103.getBackupRootDir());
+104logList = 
excludeAlreadyBackedUpWALs(logList, logFromSystemTable);
+105

[12/51] [partial] hbase-site git commit: Published site at .

2018-01-26 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/96e5e102/testdevapidocs/org/apache/hadoop/hbase/rest/TestScannersWithLabels.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/rest/TestScannersWithLabels.html 
b/testdevapidocs/org/apache/hadoop/hbase/rest/TestScannersWithLabels.html
index 26337ab..47c5c80 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/rest/TestScannersWithLabels.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/rest/TestScannersWithLabels.html
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public class TestScannersWithLabels
+public class TestScannersWithLabels
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 
 
@@ -295,7 +295,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 TABLE
-private static finalorg.apache.hadoop.hbase.TableName TABLE
+private static finalorg.apache.hadoop.hbase.TableName TABLE
 
 
 
@@ -304,7 +304,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 CFA
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CFA
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CFA
 
 See Also:
 Constant
 Field Values
@@ -317,7 +317,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 CFB
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CFB
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CFB
 
 See Also:
 Constant
 Field Values
@@ -330,7 +330,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 COLUMN_1
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String COLUMN_1
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String COLUMN_1
 
 See Also:
 Constant
 Field Values
@@ -343,7 +343,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 COLUMN_2
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String COLUMN_2
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String COLUMN_2
 
 See Also:
 Constant
 Field Values
@@ -356,7 +356,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 TOPSECRET
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String TOPSECRET
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String TOPSECRET
 
 See Also:
 Constant
 Field Values
@@ -369,7 +369,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 PUBLIC
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String PUBLIC
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String PUBLIC
 
 See Also:
 Constant
 Field Values
@@ -382,7 +382,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 PRIVATE
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String PRIVATE
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String PRIVATE
 
 See Also:
 Constant
 Field Values
@@ -395,7 +395,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 CONFIDENTIAL
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CONFIDENTIAL
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CONFIDENTIAL
 
 See Also:
 Constant
 Field Values
@@ -408,7 +408,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 SECRET
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String SECRET

[12/51] [partial] hbase-site git commit: Published site at .

2018-01-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8118541f/devapidocs/org/apache/hadoop/hbase/regionserver/HMobStore.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HMobStore.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HMobStore.html
index a36c52a..196d515 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HMobStore.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HMobStore.html
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class HMobStore
+public class HMobStore
 extends HStore
 The store implementation to save MOBs (medium objects), it 
extends the HStore.
  When a descriptor of a column family has the value "IS_MOB", it means this 
column family
@@ -154,19 +154,19 @@ extends Field and Description
 
 
-private long
+private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicLong
 cellsCountCompactedFromMob
 
 
-private long
+private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicLong
 cellsCountCompactedToMob
 
 
-private long
+private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicLong
 cellsSizeCompactedFromMob
 
 
-private long
+private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicLong
 cellsSizeCompactedToMob
 
 
@@ -198,23 +198,23 @@ extends mobFamilyPath
 
 
-private long
+private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicLong
 mobFlushCount
 
 
-private long
+private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicLong
 mobFlushedCellsCount
 
 
-private long
+private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicLong
 mobFlushedCellsSize
 
 
-private long
+private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicLong
 mobScanCellsCount
 
 
-private long
+private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicLong
 mobScanCellsSize
 
 
@@ -501,7 +501,7 @@ extends 
 
 LOG
-private static finalorg.slf4j.Logger LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -510,7 +510,7 @@ extends 
 
 mobCacheConfig
-privateMobCacheConfig mobCacheConfig
+privateMobCacheConfig mobCacheConfig
 
 
 
@@ -519,7 +519,7 @@ extends 
 
 homePath
-privateorg.apache.hadoop.fs.Path homePath
+privateorg.apache.hadoop.fs.Path homePath
 
 
 
@@ -528,7 +528,7 @@ extends 
 
 mobFamilyPath
-privateorg.apache.hadoop.fs.Path mobFamilyPath
+privateorg.apache.hadoop.fs.Path mobFamilyPath
 
 
 
@@ -537,7 +537,7 @@ extends 
 
 cellsCountCompactedToMob
-private volatilelong cellsCountCompactedToMob
+privatehttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicLong cellsCountCompactedToMob
 
 
 
@@ -546,7 +546,7 @@ extends 
 
 cellsCountCompactedFromMob
-private volatilelong cellsCountCompactedFromMob
+privatehttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicLong cellsCountCompactedFromMob
 
 
 
@@ -555,7 +555,7 @@ extends 
 
 cellsSizeCompactedToMob
-private volatilelong cellsSizeCompactedToMob
+privatehttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicLong cellsSizeCompactedToMob
 
 
 
@@ -564,7 +564,7 @@ extends 
 
 cellsSizeCompactedFromMob
-private volatilelong cellsSizeCompactedFromMob
+privatehttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicLong cellsSizeCompactedFromMob
 
 
 
@@ -573,7 +573,7 @@ extends 
 
 mobFlushCount
-private volatilelong mobFlushCount
+privatehttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in 

[12/51] [partial] hbase-site git commit: Published site at .

2018-01-19 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/14db89d7/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.FaultyRsExecutor.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.FaultyRsExecutor.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.FaultyRsExecutor.html
index f1db5ca..d8515d7 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.FaultyRsExecutor.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.FaultyRsExecutor.html
@@ -32,813 +32,820 @@
 024import static org.junit.Assert.fail;
 025
 026import java.io.IOException;
-027import java.net.SocketTimeoutException;
-028import java.util.NavigableMap;
-029import java.util.Random;
-030import java.util.Set;
-031import java.util.SortedSet;
-032import 
java.util.concurrent.ConcurrentSkipListMap;
-033import 
java.util.concurrent.ConcurrentSkipListSet;
-034import 
java.util.concurrent.ExecutionException;
-035import java.util.concurrent.Executors;
-036import java.util.concurrent.Future;
-037import 
java.util.concurrent.ScheduledExecutorService;
-038import java.util.concurrent.TimeUnit;
-039
-040import 
org.apache.hadoop.conf.Configuration;
-041import 
org.apache.hadoop.hbase.CategoryBasedTimeout;
-042import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-043import 
org.apache.hadoop.hbase.HBaseTestingUtility;
-044import 
org.apache.hadoop.hbase.NotServingRegionException;
-045import 
org.apache.hadoop.hbase.ServerName;
-046import 
org.apache.hadoop.hbase.TableName;
-047import 
org.apache.hadoop.hbase.client.RegionInfo;
-048import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-049import 
org.apache.hadoop.hbase.client.RetriesExhaustedException;
-050import 
org.apache.hadoop.hbase.exceptions.UnexpectedStateException;
-051import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-052import 
org.apache.hadoop.hbase.master.MasterServices;
-053import 
org.apache.hadoop.hbase.master.RegionState.State;
-054import 
org.apache.hadoop.hbase.master.procedure.MasterProcedureConstants;
-055import 
org.apache.hadoop.hbase.master.procedure.ProcedureSyncWait;
-056import 
org.apache.hadoop.hbase.master.procedure.RSProcedureDispatcher;
-057import 
org.apache.hadoop.hbase.procedure2.Procedure;
-058import 
org.apache.hadoop.hbase.procedure2.ProcedureMetrics;
-059import 
org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
-060import 
org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore;
-061import 
org.apache.hadoop.hbase.procedure2.util.StringUtils;
-062import 
org.apache.hadoop.hbase.regionserver.RegionServerAbortedException;
-063import 
org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
-064import 
org.apache.hadoop.hbase.testclassification.MasterTests;
-065import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-066import 
org.apache.hadoop.hbase.util.Bytes;
-067import 
org.apache.hadoop.hbase.util.FSUtils;
-068import 
org.apache.hadoop.ipc.RemoteException;
-069import org.junit.After;
-070import org.junit.Before;
-071import org.junit.Ignore;
-072import org.junit.Rule;
-073import org.junit.Test;
-074import 
org.junit.experimental.categories.Category;
-075import 
org.junit.rules.ExpectedException;
-076import org.junit.rules.TestName;
-077import org.junit.rules.TestRule;
-078import org.slf4j.Logger;
-079import org.slf4j.LoggerFactory;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresResponse;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest;
-092
-093@Category({MasterTests.class, 
MediumTests.class})
-094public class TestAssignmentManager {
-095  private static final Logger LOG = 

[12/51] [partial] hbase-site git commit: Published site at .

2018-01-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0b638133/testdevapidocs/org/apache/hadoop/hbase/client/package-use.html
--
diff --git a/testdevapidocs/org/apache/hadoop/hbase/client/package-use.html 
b/testdevapidocs/org/apache/hadoop/hbase/client/package-use.html
index e2ff53a..e89ffa5 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/client/package-use.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/client/package-use.html
@@ -110,104 +110,119 @@
 AbstractTestAsyncTableScan
 
 
-AbstractTestResultScannerCursor
+AbstractTestCIOperationTimeout
+Based class for testing operation timeout logic for 
ConnectionImplementation.
+
 
 
-AbstractTestScanCursor
+AbstractTestCIRpcTimeout
+Based class for testing rpc timeout logic for 
ConnectionImplementation.
+
 
 
-AbstractTestShell
+AbstractTestCITimeout
+Based class for testing timeout logic for 
ConnectionImplementation.
+
 
 
-ColumnCountOnRowFilter
+AbstractTestResultScannerCursor
 
 
+AbstractTestScanCursor
+
+
+AbstractTestShell
+
+
+ColumnCountOnRowFilter
+
+
 DoNothingAsyncRegistry
 Registry that does nothing.
 
 
-
+
 TestAsyncAdminBase
 Class to test AsyncAdmin.
 
 
-
+
 TestAsyncProcess.MyAsyncProcess
 
-
+
 TestAsyncProcess.MyAsyncProcessWithReplicas
 
-
+
 TestAsyncProcess.MyConnectionImpl
 Returns our async process.
 
 
-
+
 TestAsyncProcess.ResponseGenerator
 
-
+
 TestAsyncProcess.RR
 After reading TheDailyWtf, I always wanted to create a 
MyBoolean enum like this!
 
 
-
+
 TestAsyncTableGetMultiThreaded
 Will split the table, and move region randomly when 
testing.
 
 
-
+
 TestAsyncTableScanMetrics.ScanWithMetrics
 
-
+
 TestBlockEvictionFromClient.CustomInnerRegionObserver
 
-
+
 TestBlockEvictionFromClient.GetThread
 
-
+
 TestBlockEvictionFromClient.MultiGetThread
 
-
+
 TestBlockEvictionFromClient.ScanThread
 
-
+
 TestCloneSnapshotFromClient
 Test clone snapshots from the client
 
 
-
+
 TestFromClientSide
 Run tests that use the HBase clients; 
Table.
 
 
-
+
 TestFromClientSideScanExcpetion
 
-
+
 TestHBaseAdminNoCluster.MethodCaller
 
-
+
 TestIncrementsFromClientSide
 Run Increment tests that use the HBase clients; 
HTable.
 
 
-
+
 TestMetaCache.ExceptionInjector
 
-
+
 TestMetaCache.FakeRSRpcServices
 
-
+
 TestRestoreSnapshotFromClient
 Test restore snapshots from the client
 
 
-
+
 TestSnapshotCloneIndependence
 Test to verify that the cloned table is independent of the 
table from which it was cloned
 
 
-
+
 TestSnapshotFromClient
 Test create/using/deleting snapshots from the client
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0b638133/testdevapidocs/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.html
 
b/testdevapidocs/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.html
index 23b0ee4..5022712 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":9,"i1":10,"i2":9,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10};
+var methods = 
{"i0":9,"i1":10,"i2":9,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public class TestReplicationAdmin
+public class TestReplicationAdmin
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Unit testing of ReplicationAdmin
 
@@ -253,32 +253,40 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 void
+testPeerClusterKey()
+
+
+void
 testPeerConfig()
 Tests that the peer configuration used by ReplicationAdmin 
contains all
  the peer's properties.
 
 
-
+
 void
 testPeerConfigConflict()
 
-
+
 void
 testPeerExcludeNamespaces()
 
-
+
 void
 testPeerExcludeTableCFs()
 
-
+
+void
+testPeerReplicationEndpointImpl()
+
+
 void
 testRemovePeerTableCFs()
 
-
+
 void
 testSetPeerNamespaces()
 
-
+
 void
 testSetReplicateAllUserTables()
 
@@ -310,7 +318,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 LOG
-private static finalorg.slf4j.Logger LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -319,7 +327,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 TEST_UTIL
-private static finalHBaseTestingUtility 

[12/51] [partial] hbase-site git commit: Published site at .

2018-01-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f183e80f/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHMobStore.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHMobStore.html 
b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHMobStore.html
index a4307f9..9a73216 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHMobStore.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHMobStore.html
@@ -748,7 +748,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 init
-privatevoidinit(org.apache.hadoop.conf.Configurationconf,
+privatevoidinit(org.apache.hadoop.conf.Configurationconf,
   org.apache.hadoop.hbase.HColumnDescriptorhcd)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
@@ -763,7 +763,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 testGetFromMemStore
-publicvoidtestGetFromMemStore()
+publicvoidtestGetFromMemStore()
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Getting data from memstore
 
@@ -778,7 +778,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 testGetFromFiles
-publicvoidtestGetFromFiles()
+publicvoidtestGetFromFiles()
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Getting MOB data from files
 
@@ -793,7 +793,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 testGetReferencesFromFiles
-publicvoidtestGetReferencesFromFiles()
+publicvoidtestGetReferencesFromFiles()
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Getting the reference data from files
 
@@ -808,7 +808,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 testGetFromMemStoreAndFiles
-publicvoidtestGetFromMemStoreAndFiles()
+publicvoidtestGetFromMemStoreAndFiles()
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Getting data from memstore and files
 
@@ -823,7 +823,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 testMobCellSizeThreshold
-publicvoidtestMobCellSizeThreshold()
+publicvoidtestMobCellSizeThreshold()
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Getting data from memstore and files
 
@@ -838,7 +838,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 testCommitFile
-publicvoidtestCommitFile()
+publicvoidtestCommitFile()
 throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -852,7 +852,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 testResolve
-publicvoidtestResolve()
+publicvoidtestResolve()
  throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -866,7 +866,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 flush
-privatevoidflush(intstoreFilesSize)
+privatevoidflush(intstoreFilesSize)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Flush the memstore
 
@@ -883,7 +883,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 flushStore
-private staticvoidflushStore(org.apache.hadoop.hbase.regionserver.HMobStorestore,
+private staticvoidflushStore(org.apache.hadoop.hbase.regionserver.HMobStorestore,
longid)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Flush the memstore
@@ -902,7 +902,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 testMOBStoreEncryption
-publicvoidtestMOBStoreEncryption()
+publicvoidtestMOBStoreEncryption()
 throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -916,7 +916,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 

[12/51] [partial] hbase-site git commit: Published site at .

2018-01-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/49431b18/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html 
b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
index ce948d2..66944b6 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class MasterRpcServices
+public class MasterRpcServices
 extends RSRpcServices
 implements 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.BlockingInterface,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService.BlockingInterface,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockService.BlockingInterface
 Implements the master RPC services.
@@ -635,111 +635,116 @@ implements 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
  
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequestrequest)
 
 
+org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportProcedureDoneResponse
+reportProcedureDone(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
+   
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportProcedureDoneRequestrequest)
+
+
 org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUseReportResponse
 reportRegionSpaceUse(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUseReportRequestrequest)
 
-
+
 org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse
 reportRegionStateTransition(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllerc,

org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequestreq)
 
-
+
 org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse
 reportRSFatalError(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,
   
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequestrequest)
 
-
+
 org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse
 requestLock(org.apache.hbase.thirdparty.com.google.protobuf.RpcControllercontroller,

org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequestrequest)
 
-
+
 

[12/51] [partial] hbase-site git commit: Published site at .

2018-01-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c7c40c62/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.PrimaryRegionCountSkewCostFunction.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.PrimaryRegionCountSkewCostFunction.html
 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.PrimaryRegionCountSkewCostFunction.html
index 763eec0..c4f0db1 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.PrimaryRegionCountSkewCostFunction.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.PrimaryRegionCountSkewCostFunction.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static class StochasticLoadBalancer.PrimaryRegionCountSkewCostFunction
+static class StochasticLoadBalancer.PrimaryRegionCountSkewCostFunction
 extends StochasticLoadBalancer.CostFunction
 Compute the cost of a potential cluster state from skew in 
number of
  primary regions on a cluster.
@@ -230,7 +230,7 @@ extends 
 
 PRIMARY_REGION_COUNT_SKEW_COST_KEY
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String PRIMARY_REGION_COUNT_SKEW_COST_KEY
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String PRIMARY_REGION_COUNT_SKEW_COST_KEY
 
 See Also:
 Constant
 Field Values
@@ -243,7 +243,7 @@ extends 
 
 DEFAULT_PRIMARY_REGION_COUNT_SKEW_COST
-private static finalfloat DEFAULT_PRIMARY_REGION_COUNT_SKEW_COST
+private static finalfloat DEFAULT_PRIMARY_REGION_COUNT_SKEW_COST
 
 See Also:
 Constant
 Field Values
@@ -256,7 +256,7 @@ extends 
 
 stats
-privatedouble[] stats
+privatedouble[] stats
 
 
 
@@ -273,7 +273,7 @@ extends 
 
 PrimaryRegionCountSkewCostFunction
-PrimaryRegionCountSkewCostFunction(org.apache.hadoop.conf.Configurationconf)
+PrimaryRegionCountSkewCostFunction(org.apache.hadoop.conf.Configurationconf)
 
 
 
@@ -290,7 +290,7 @@ extends 
 
 cost
-doublecost()
+doublecost()
 
 Specified by:
 costin
 classStochasticLoadBalancer.CostFunction

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c7c40c62/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RackLocalityCostFunction.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RackLocalityCostFunction.html
 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RackLocalityCostFunction.html
index 268e2f3..5550840 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RackLocalityCostFunction.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RackLocalityCostFunction.html
@@ -123,7 +123,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static class StochasticLoadBalancer.RackLocalityCostFunction
+static class StochasticLoadBalancer.RackLocalityCostFunction
 extends StochasticLoadBalancer.LocalityBasedCostFunction
 
 
@@ -239,7 +239,7 @@ extends 
 
 RACK_LOCALITY_COST_KEY
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String RACK_LOCALITY_COST_KEY
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String RACK_LOCALITY_COST_KEY
 
 See Also:
 Constant
 Field Values
@@ -252,7 +252,7 @@ extends 
 
 DEFAULT_RACK_LOCALITY_COST
-private static finalfloat DEFAULT_RACK_LOCALITY_COST
+private static finalfloat DEFAULT_RACK_LOCALITY_COST
 
 See Also:
 Constant
 Field Values
@@ -273,7 +273,7 @@ extends 
 
 RackLocalityCostFunction
-publicRackLocalityCostFunction(org.apache.hadoop.conf.Configurationconf,
+publicRackLocalityCostFunction(org.apache.hadoop.conf.Configurationconf,
 MasterServicesservices)
 
 
@@ -291,7 +291,7 @@ extends 
 
 regionIndexToEntityIndex
-intregionIndexToEntityIndex(intregion)
+intregionIndexToEntityIndex(intregion)
 Description copied from 
class:StochasticLoadBalancer.LocalityBasedCostFunction
 Maps region to the current entity (server or rack) on which 
it is stored
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c7c40c62/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RandomCandidateGenerator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RandomCandidateGenerator.html
 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.RandomCandidateGenerator.html
index 233412a..992fe88 100644
--- 

[12/51] [partial] hbase-site git commit: Published site at .

2018-01-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/bb398572/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverGetter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverGetter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverGetter.html
index b8e321a..439a50d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverGetter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.ObserverGetter.html
@@ -468,274 +468,216 @@
 460  }
 461
 462  /**
-463   * Used to gracefully handle fallback 
to deprecated methods when we
-464   * evolve coprocessor APIs.
-465   *
-466   * When a particular Coprocessor API is 
updated to change methods, hosts can support fallback
-467   * to the deprecated API by using this 
method to determine if an instance implements the new API.
-468   * In the event that said support is 
partial, then in the face of a runtime issue that prevents
-469   * proper operation {@link 
#legacyWarning(Class, String)} should be used to let operators know.
-470   *
-471   * For examples of this in action, see 
the implementation of
-472   * ul
-473   *   li{@link 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost}
-474   *   li{@link 
org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost}
-475   * /ul
-476   *
-477   * @param clazz Coprocessor you wish to 
evaluate
-478   * @param methodName the name of the 
non-deprecated method version
-479   * @param parameterTypes the Class of 
the non-deprecated method's arguments in the order they are
-480   * declared.
-481   */
-482  @InterfaceAudience.Private
-483  protected static boolean 
useLegacyMethod(final Class? extends Coprocessor clazz,
-484  final String methodName, final 
Class?... parameterTypes) {
-485boolean useLegacy;
-486// Use reflection to see if they 
implement the non-deprecated version
-487try {
-488  clazz.getDeclaredMethod(methodName, 
parameterTypes);
-489  LOG.debug("Found an implementation 
of '" + methodName + "' that uses updated method " +
-490  "signature. Skipping legacy 
support for invocations in '" + clazz +"'.");
-491  useLegacy = false;
-492} catch (NoSuchMethodException 
exception) {
-493  useLegacy = true;
-494} catch (SecurityException exception) 
{
-495  LOG.warn("The Security Manager 
denied our attempt to detect if the coprocessor '" + clazz +
-496  "' requires legacy support; 
assuming it does. If you get later errors about legacy " +
-497  "coprocessor use, consider 
updating your security policy to allow access to the package" +
-498  " and declared members of your 
implementation.");
-499  LOG.debug("Details of Security 
Manager rejection.", exception);
-500  useLegacy = true;
+463   * Used to limit legacy handling to 
once per Coprocessor class per classloader.
+464   */
+465  private static final SetClass? 
extends Coprocessor legacyWarning =
+466  new 
ConcurrentSkipListSet(
+467  new ComparatorClass? 
extends Coprocessor() {
+468@Override
+469public int compare(Class? 
extends Coprocessor c1, Class? extends Coprocessor c2) {
+470  if (c1.equals(c2)) {
+471return 0;
+472  }
+473  return 
c1.getName().compareTo(c2.getName());
+474}
+475  });
+476
+477  /**
+478   * Implementations defined function to 
get an observer of type {@code O} from a coprocessor of
+479   * type {@code C}. Concrete 
implementations of CoprocessorHost define one getter for each
+480   * observer they can handle. For e.g. 
RegionCoprocessorHost will use 3 getters, one for
+481   * each of RegionObserver, 
EndpointObserver and BulkLoadObserver.
+482   * These getters are used by {@code 
ObserverOperation} to get appropriate observer from the
+483   * coprocessor.
+484   */
+485  @FunctionalInterface
+486  public interface ObserverGetterC, 
O extends FunctionC, OptionalO {}
+487
+488  private abstract class 
ObserverOperationO extends ObserverContextImplE {
+489ObserverGetterC, O 
observerGetter;
+490
+491
ObserverOperation(ObserverGetterC, O observerGetter) {
+492  this(observerGetter, null);
+493}
+494
+495
ObserverOperation(ObserverGetterC, O observerGetter, User user) {
+496  this(observerGetter, user, 
false);
+497}
+498
+499
ObserverOperation(ObserverGetterC, O observerGetter, boolean 
bypassable) {
+500  this(observerGetter, null, 
bypassable);
 501}
-502return useLegacy;
-503  }
-504
-505  /**
-506   * Used to limit legacy handling to 
once per Coprocessor class per classloader.
-507   */
-508  private static final SetClass? 
extends Coprocessor legacyWarning =
-509  new 
ConcurrentSkipListSet(
-510  new 

[12/51] [partial] hbase-site git commit: Published site at .

2018-01-01 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.NotSeekedException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.NotSeekedException.html
 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.NotSeekedException.html
index 724353c..c5a4a7f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.NotSeekedException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.NotSeekedException.html
@@ -274,6 +274,6 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/IllegalState
 
 
 
-Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html
index 75e7b05..06b0d14 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html
@@ -1896,6 +1896,6 @@ publicCopyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileScanner.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileScanner.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileScanner.html
index 12eac35..e740733 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileScanner.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileScanner.html
@@ -567,6 +567,6 @@ extends Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileUtil.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileUtil.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileUtil.html
index 71186cf..26c8b69 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileUtil.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileUtil.html
@@ -285,6 +285,6 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html
index 330e08a..bbef49a 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html
@@ -1488,6 +1488,6 @@ implements Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/hfile/InclusiveCombinedBlockCache.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/InclusiveCombinedBlockCache.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/InclusiveCombinedBlockCache.html
index 644d487..5bd8d1c 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/InclusiveCombinedBlockCache.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/InclusiveCombinedBlockCache.html
@@ -406,6 +406,6 @@ extends Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+Copyright  20072018 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/69506d41/devapidocs/org/apache/hadoop/hbase/io/hfile/InlineBlockWriter.html
--
diff --git 

[12/51] [partial] hbase-site git commit: Published site at .

2017-12-30 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/83bf6175/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcess.AsyncProcessWithFailure.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcess.AsyncProcessWithFailure.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcess.AsyncProcessWithFailure.html
index bbd91b8..4f76302 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcess.AsyncProcessWithFailure.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcess.AsyncProcessWithFailure.html
@@ -56,1641 +56,1753 @@
 048import 
java.util.concurrent.atomic.AtomicBoolean;
 049import 
java.util.concurrent.atomic.AtomicInteger;
 050import 
java.util.concurrent.atomic.AtomicLong;
-051
-052import 
org.apache.hadoop.conf.Configuration;
-053import 
org.apache.hadoop.hbase.CallQueueTooBigException;
-054import 
org.apache.hadoop.hbase.CategoryBasedTimeout;
-055import org.apache.hadoop.hbase.Cell;
-056import 
org.apache.hadoop.hbase.HConstants;
-057import 
org.apache.hadoop.hbase.HRegionInfo;
-058import 
org.apache.hadoop.hbase.HRegionLocation;
-059import 
org.apache.hadoop.hbase.RegionLocations;
-060import 
org.apache.hadoop.hbase.ServerName;
-061import 
org.apache.hadoop.hbase.TableName;
-062import 
org.apache.hadoop.hbase.client.AsyncProcessTask.ListRowAccess;
-063import 
org.apache.hadoop.hbase.client.AsyncProcessTask.SubmittedRows;
-064import 
org.apache.hadoop.hbase.client.backoff.ClientBackoffPolicy;
-065import 
org.apache.hadoop.hbase.client.backoff.ServerStatistics;
-066import 
org.apache.hadoop.hbase.client.coprocessor.Batch;
-067import 
org.apache.hadoop.hbase.ipc.RpcControllerFactory;
-068import 
org.apache.hadoop.hbase.testclassification.ClientTests;
-069import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-070import 
org.apache.hadoop.hbase.util.Bytes;
-071import 
org.apache.hadoop.hbase.util.Threads;
-072import org.junit.Assert;
-073import org.junit.BeforeClass;
-074import org.junit.Ignore;
-075import org.junit.Rule;
-076import org.junit.Test;
-077import 
org.junit.experimental.categories.Category;
-078import org.junit.rules.TestRule;
-079import org.mockito.Mockito;
-080import org.slf4j.Logger;
-081import org.slf4j.LoggerFactory;
-082
-083@Category({ClientTests.class, 
MediumTests.class})
-084public class TestAsyncProcess {
-085  @Rule public final TestRule timeout = 
CategoryBasedTimeout.builder().withTimeout(this.getClass()).
-086  
withLookingForStuckThread(true).build();
-087  private static final Logger LOG = 
LoggerFactory.getLogger(TestAsyncProcess.class);
-088  private static final TableName 
DUMMY_TABLE =
-089  TableName.valueOf("DUMMY_TABLE");
-090  private static final byte[] 
DUMMY_BYTES_1 = Bytes.toBytes("DUMMY_BYTES_1");
-091  private static final byte[] 
DUMMY_BYTES_2 = Bytes.toBytes("DUMMY_BYTES_2");
-092  private static final byte[] 
DUMMY_BYTES_3 = Bytes.toBytes("DUMMY_BYTES_3");
-093  private static final byte[] FAILS = 
Bytes.toBytes("FAILS");
-094  private static final Configuration CONF 
= new Configuration();
-095  private static final 
ConnectionConfiguration CONNECTION_CONFIG =
-096  new 
ConnectionConfiguration(CONF);
-097  private static final ServerName sn = 
ServerName.valueOf("s1,1,1");
-098  private static final ServerName sn2 = 
ServerName.valueOf("s2,2,2");
-099  private static final ServerName sn3 = 
ServerName.valueOf("s3,3,3");
-100  private static final HRegionInfo hri1 
=
-101  new HRegionInfo(DUMMY_TABLE, 
DUMMY_BYTES_1, DUMMY_BYTES_2, false, 1);
-102  private static final HRegionInfo hri2 
=
-103  new HRegionInfo(DUMMY_TABLE, 
DUMMY_BYTES_2, HConstants.EMPTY_END_ROW, false, 2);
-104  private static final HRegionInfo hri3 
=
-105  new HRegionInfo(DUMMY_TABLE, 
DUMMY_BYTES_3, HConstants.EMPTY_END_ROW, false, 3);
-106  private static final HRegionLocation 
loc1 = new HRegionLocation(hri1, sn);
-107  private static final HRegionLocation 
loc2 = new HRegionLocation(hri2, sn);
-108  private static final HRegionLocation 
loc3 = new HRegionLocation(hri3, sn2);
-109
-110  // Replica stuff
-111  private static final RegionInfo hri1r1 
= RegionReplicaUtil.getRegionInfoForReplica(hri1, 1);
-112  private static final RegionInfo hri1r2 
= RegionReplicaUtil.getRegionInfoForReplica(hri1, 2);
-113  private static final RegionInfo hri2r1 
= RegionReplicaUtil.getRegionInfoForReplica(hri2, 1);
-114  private static final RegionLocations 
hrls1 = new RegionLocations(new HRegionLocation(hri1, sn),
-115  new HRegionLocation(hri1r1, sn2), 
new HRegionLocation(hri1r2, sn3));
-116  private static final RegionLocations 
hrls2 = new RegionLocations(new HRegionLocation(hri2, sn2),
-117  new HRegionLocation(hri2r1, 
sn3));
-118  private static final RegionLocations 
hrls3 =
-119  new RegionLocations(new 
HRegionLocation(hri3, sn3), null);
-120

[12/51] [partial] hbase-site git commit: Published site at .

2017-12-29 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/63d6f712/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
 
b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
index 010a1d0..e90dedc 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
@@ -254,7 +254,7 @@ extends 
 
 Methods inherited from classorg.apache.hadoop.hbase.regionserver.HRegionServer
-abort,
 addRegion,
 addToMovedRegions,
 checkFileSystem,
 cleanMovedRegions,
 clearRegionBlockCache,
 closeAllRegions, href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#closeAndOfflineRegionForSplitOrMerge-java.util.List-">closeAndOfflineRegionForSplitOrMerge,
 > href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#closeRegion-java.lang.String-boolean-org.apache.hadoop.hbase.ServerName-">closeRegion,
 > href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#constructRegionServer-java.lang.Class-org.apache.hadoop.conf.Configuration-">constructRegionServer,
 > href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#convertThrowableToIOE-java.lang.Throwable-java.lang.String-">convertThrowableToIOE,
 > href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createClusterConnection--">createClusterConnection,
 > href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createConnection-org.apache.hadoop.conf.Configuration-">createConnection,
 > <
 a 
href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createRegionLoad-java.lang.String-">createRegionLoad,
 createRegionServerStatusStub,
 createRegionServerStatusStub,
 execRegionServerService,
 getCacheConfig,
 getChoreService,
 getClusterConnection,
 getClusterId,
 getCompactionPressure,
 getCompactionRequestor,
 getCompactSplitThread,
 getConfiguration,
 getConfigurationManager,
 getConnection,
 getCoordinatedStateManager,
 getEventLoopGroupConfig,
 getExecutorService,
 getFavoredNodesForRegion,
 getFileSystem,
 getFlushPressure,
 getFlushRequester,
 getFlushThroughputController, getFsTableDescriptors,
 getHeapMemoryManager,
 getInfoServer,
 getLastSequenceId,
 getLeases,
 getMasterAddressTracker,
 getMetaTableLocator,
 getMetrics, getMostLoadedRegions,
 getNonceManager,
 getNumberOfOnlineRegions,
 getOnlineRegion,
 getOnlineRegionsLocalContext,
 getOnlineTables,
 getRegion,
 getRegion, getRegionBlockLocations,
 getRegionByEncodedName,
 getRegionByEncodedName,
 getRegions,
 getRegions,
 getRegionServerAccounting,
 getRegionServerCoprocessorHost, getRegionServerCoprocessors,
 getRegionServerMetrics,
 getRegionServerRpcQuotaManager,
 getRegionServerSpaceQuotaManager,
 getRegionsInTransitionInRS,
 getReplicationSourceService,
 getRootDir, getRpcServer,
 getRSRpcServices,
 getSecureBulkLoadManager,
 getStartcode,
 getThreadWakeFrequency,
 getWAL,
 getWALFileSystem,
 getWalRoller, getWALRootDir,
 getWALs,
 handleReportForDutyResponse,
 initializeMemStoreChunkCreator,
 isAborted,
 isOnline,
 isStopped,
 <
 a 
href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isStopping--">isStopping,
 kill,
 movedRegionCleanerPeriod,
 onConfigurationChange,
 postOpenDeployTasks,
 regionLock,
 removeRegion,
 reportRegionSizesForQuotas,
 reportRegionStateTransition,
 sendShutdownInterrupt,
 setInitLatch,
 setupClusterConnection,
 shouldUseThisHostnameInstead, stop,
 stop,
 toString,
 tryRegionServerReport,
 unassign,
 updateConfiguration,
 updateRegionFavoredNodesMapping,
 waitForServerOnline,
 walRollRequestFinished
+abort,
 addRegion,
 addToMovedRegions,
 checkFileSystem,
 cleanMovedRegions,
 clearRegionBlockCache,
 closeAllRegions, href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#closeAndOfflineRegionForSplitOrMerge-java.util.List-">closeAndOfflineRegionForSplitOrMerge,
 > href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#closeRegion-java.lang.String-boolean-org.apache.hadoop.hbase.ServerName-">closeRegion,
 > href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#constructRegionServer-java.lang.Class-org.apache.hadoop.conf.Configuration-">constructRegionServer,
 > href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#convertThrowableToIOE-java.lang.Throwable-java.lang.String-">convertThrowableToIOE,
 > href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createClusterConnection--">createClusterConnection,
 > href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createConnection-org.apache.hadoop.conf.Configuration-">createConnection,
 > <
 a 

[12/51] [partial] hbase-site git commit: Published site at .

2017-12-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d449e87f/devapidocs/org/apache/hadoop/hbase/http/HttpServer.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/http/HttpServer.html 
b/devapidocs/org/apache/hadoop/hbase/http/HttpServer.html
index 7d4d03e..47b04e4 100644
--- a/devapidocs/org/apache/hadoop/hbase/http/HttpServer.html
+++ b/devapidocs/org/apache/hadoop/hbase/http/HttpServer.html
@@ -115,7 +115,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Private
  @InterfaceStability.Evolving
-public class HttpServer
+public class HttpServer
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements FilterContainer
 Create a Jetty embedded server to answer http requests. The 
primary goal
@@ -684,7 +684,7 @@ implements 
 
 LOG
-private static finalorg.slf4j.Logger LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -693,7 +693,7 @@ implements 
 
 EMPTY_STRING
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String EMPTY_STRING
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String EMPTY_STRING
 
 See Also:
 Constant
 Field Values
@@ -706,7 +706,7 @@ implements 
 
 DEFAULT_MAX_HEADER_SIZE
-private static finalint DEFAULT_MAX_HEADER_SIZE
+private static finalint DEFAULT_MAX_HEADER_SIZE
 
 See Also:
 Constant
 Field Values
@@ -719,7 +719,7 @@ implements 
 
 FILTER_INITIALIZERS_PROPERTY
-static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String FILTER_INITIALIZERS_PROPERTY
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String FILTER_INITIALIZERS_PROPERTY
 
 See Also:
 Constant
 Field Values
@@ -732,7 +732,7 @@ implements 
 
 HTTP_MAX_THREADS
-static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String HTTP_MAX_THREADS
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String HTTP_MAX_THREADS
 
 See Also:
 Constant
 Field Values
@@ -745,7 +745,7 @@ implements 
 
 HTTP_UI_AUTHENTICATION
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String HTTP_UI_AUTHENTICATION
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String HTTP_UI_AUTHENTICATION
 
 See Also:
 Constant
 Field Values
@@ -758,7 +758,7 @@ implements 
 
 HTTP_AUTHENTICATION_PREFIX
-static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String HTTP_AUTHENTICATION_PREFIX
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String HTTP_AUTHENTICATION_PREFIX
 
 See Also:
 Constant
 Field Values
@@ -771,7 +771,7 @@ implements 
 
 HTTP_SPNEGO_AUTHENTICATION_PREFIX
-static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String HTTP_SPNEGO_AUTHENTICATION_PREFIX
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String HTTP_SPNEGO_AUTHENTICATION_PREFIX
 
 See Also:
 Constant
 Field Values
@@ -784,7 +784,7 @@ implements 
 
 HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX
-static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX
 
 See Also:
 Constant
 Field Values
@@ -797,7 +797,7 @@ implements 
 
 HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY
 
 See Also:
 Constant
 Field Values
@@ -810,7 +810,7 @@ implements 
 
 HTTP_SPNEGO_AUTHENTICATION_KEYTAB_SUFFIX
-static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 

[12/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyByteBufferCell.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyByteBufferCell.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyByteBufferCell.html
index 3400507..2baa140 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyByteBufferCell.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyByteBufferCell.html
@@ -28,3034 +28,2926 @@
 020import static 
org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
 021import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
 022
-023import 
com.google.common.annotations.VisibleForTesting;
-024
-025import java.io.DataOutput;
-026import java.io.DataOutputStream;
-027import java.io.IOException;
-028import java.io.OutputStream;
-029import java.math.BigDecimal;
-030import java.nio.ByteBuffer;
-031import java.util.ArrayList;
-032import java.util.Iterator;
-033import java.util.List;
-034import java.util.Optional;
-035
-036import 
org.apache.hadoop.hbase.KeyValue.Type;
-037import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-038import 
org.apache.hadoop.hbase.io.HeapSize;
-039import 
org.apache.hadoop.hbase.io.TagCompressionContext;
-040import 
org.apache.hadoop.hbase.io.util.Dictionary;
-041import 
org.apache.hadoop.hbase.io.util.StreamUtils;
-042import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-043import 
org.apache.hadoop.hbase.util.ByteRange;
-044import 
org.apache.hadoop.hbase.util.Bytes;
-045import 
org.apache.hadoop.hbase.util.ClassSize;
-046import 
org.apache.yetus.audience.InterfaceAudience;
-047
-048
-049/**
-050 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
-051 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
-052 */
-053@InterfaceAudience.Private
-054public final class PrivateCellUtil {
-055
-056  /**
-057   * Private constructor to keep this 
class from being instantiated.
-058   */
-059  private PrivateCellUtil() {
-060  }
+023import java.io.DataOutput;
+024import java.io.DataOutputStream;
+025import java.io.IOException;
+026import java.io.OutputStream;
+027import java.math.BigDecimal;
+028import java.nio.ByteBuffer;
+029import java.util.ArrayList;
+030import java.util.Iterator;
+031import java.util.List;
+032import java.util.Optional;
+033import 
org.apache.hadoop.hbase.KeyValue.Type;
+034import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
+035import 
org.apache.hadoop.hbase.io.HeapSize;
+036import 
org.apache.hadoop.hbase.io.TagCompressionContext;
+037import 
org.apache.hadoop.hbase.io.util.Dictionary;
+038import 
org.apache.hadoop.hbase.io.util.StreamUtils;
+039import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+040import 
org.apache.hadoop.hbase.util.ByteRange;
+041import 
org.apache.hadoop.hbase.util.Bytes;
+042import 
org.apache.hadoop.hbase.util.ClassSize;
+043import 
org.apache.yetus.audience.InterfaceAudience;
+044
+045import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+046
+047/**
+048 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
+049 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
+050 */
+051@InterfaceAudience.Private
+052public final class PrivateCellUtil {
+053
+054  /**
+055   * Private constructor to keep this 
class from being instantiated.
+056   */
+057  private PrivateCellUtil() {
+058  }
+059
+060  /*** ByteRange 
***/
 061
-062  /*** ByteRange 
***/
-063
-064  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
-065return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
-066  }
-067
-068  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
-069return 
range.set(cell.getFamilyArray(), cell.getFamilyOffset(), 
cell.getFamilyLength());
-070  }
-071
-072  public static ByteRange 
fillQualifierRange(Cell cell, ByteRange range) {
-073return 
range.set(cell.getQualifierArray(), cell.getQualifierOffset(),
-074  cell.getQualifierLength());
-075  }
-076
-077  public static ByteRange 
fillValueRange(Cell cell, ByteRange range) {
-078return 
range.set(cell.getValueArray(), cell.getValueOffset(), 
cell.getValueLength());
-079  }
-080
-081  public static ByteRange 
fillTagRange(Cell cell, ByteRange range) {
-082return range.set(cell.getTagsArray(), 
cell.getTagsOffset(), cell.getTagsLength());
-083  }
+062  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
+063return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
+064  }
+065
+066  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
+067return 

[12/51] [partial] hbase-site git commit: Published site at .

2017-12-26 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b618ac40/devapidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
index 38865a3..8b6f080 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
@@ -26,746 +26,954 @@
 018
 019package org.apache.hadoop.hbase.client;
 020
-021import java.io.IOException;
-022import java.nio.ByteBuffer;
-023import java.util.ArrayList;
-024import java.util.Arrays;
-025import java.util.HashMap;
-026import java.util.List;
-027import java.util.Map;
-028import java.util.NavigableMap;
-029import java.util.TreeMap;
-030import java.util.UUID;
-031import java.util.stream.Collectors;
-032import org.apache.hadoop.hbase.Cell;
-033import 
org.apache.hadoop.hbase.CellScannable;
-034import 
org.apache.hadoop.hbase.CellScanner;
-035import 
org.apache.hadoop.hbase.CellUtil;
-036import 
org.apache.hadoop.hbase.HConstants;
-037import 
org.apache.hadoop.hbase.KeyValue;
-038import 
org.apache.hadoop.hbase.PrivateCellUtil;
-039import org.apache.hadoop.hbase.Tag;
-040import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-041import 
org.apache.hadoop.hbase.io.HeapSize;
-042import 
org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-043import 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
-044import 
org.apache.hadoop.hbase.security.access.AccessControlConstants;
-045import 
org.apache.hadoop.hbase.security.access.AccessControlUtil;
-046import 
org.apache.hadoop.hbase.security.access.Permission;
-047import 
org.apache.hadoop.hbase.security.visibility.CellVisibility;
-048import 
org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
-049import 
org.apache.hadoop.hbase.util.Bytes;
-050import 
org.apache.hadoop.hbase.util.ClassSize;
-051import 
org.apache.yetus.audience.InterfaceAudience;
-052
-053import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-054import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
-055import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
-056import 
org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataInput;
-057import 
org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataOutput;
-058import 
org.apache.hadoop.hbase.shaded.com.google.common.io.ByteStreams;
+021import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
+022
+023import java.io.IOException;
+024import java.nio.ByteBuffer;
+025import java.util.ArrayList;
+026import java.util.Arrays;
+027import java.util.HashMap;
+028import java.util.Iterator;
+029import java.util.List;
+030import java.util.Map;
+031import java.util.NavigableMap;
+032import java.util.Optional;
+033import java.util.TreeMap;
+034import java.util.UUID;
+035import java.util.stream.Collectors;
+036import 
org.apache.hadoop.hbase.ArrayBackedTag;
+037import org.apache.hadoop.hbase.Cell;
+038import 
org.apache.hadoop.hbase.CellScannable;
+039import 
org.apache.hadoop.hbase.CellScanner;
+040import 
org.apache.hadoop.hbase.CellUtil;
+041import 
org.apache.hadoop.hbase.ExtendedCell;
+042import 
org.apache.hadoop.hbase.HConstants;
+043import 
org.apache.hadoop.hbase.KeyValue;
+044import 
org.apache.hadoop.hbase.PrivateCellUtil;
+045import org.apache.hadoop.hbase.RawCell;
+046import org.apache.hadoop.hbase.Tag;
+047import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
+048import 
org.apache.hadoop.hbase.io.HeapSize;
+049import 
org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+050import 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
+051import 
org.apache.hadoop.hbase.security.access.AccessControlConstants;
+052import 
org.apache.hadoop.hbase.security.access.AccessControlUtil;
+053import 
org.apache.hadoop.hbase.security.access.Permission;
+054import 
org.apache.hadoop.hbase.security.visibility.CellVisibility;
+055import 
org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
+056import 
org.apache.hadoop.hbase.util.Bytes;
+057import 
org.apache.hadoop.hbase.util.ClassSize;
+058import 
org.apache.yetus.audience.InterfaceAudience;
 059
-060@InterfaceAudience.Public
-061public abstract class Mutation extends 
OperationWithAttributes implements Row, CellScannable,
-062HeapSize {
-063  public static final long 
MUTATION_OVERHEAD = ClassSize.align(
-064  // This
-065  ClassSize.OBJECT +
-066  // row + 
OperationWithAttributes.attributes
-067  2 * ClassSize.REFERENCE +
-068  // Timestamp
-069  1 * Bytes.SIZEOF_LONG +
-070  // durability
-071  ClassSize.REFERENCE +
-072  // familyMap
-073  ClassSize.REFERENCE +
-074  // familyMap
-075  ClassSize.TREEMAP +
-076  // priority
-077  ClassSize.INTEGER
-078  );
-079
-080  /**
-081   

[12/51] [partial] hbase-site git commit: Published site at .

2017-12-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7c0589c0/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.RestoreSnapshotFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.RestoreSnapshotFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.RestoreSnapshotFuture.html
index 6fecbc9..2accda0 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.RestoreSnapshotFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.RestoreSnapshotFuture.html
@@ -34,4140 +34,4141 @@
 026import 
java.nio.charset.StandardCharsets;
 027import java.util.ArrayList;
 028import java.util.Arrays;
-029import java.util.Collection;
-030import java.util.EnumSet;
-031import java.util.HashMap;
-032import java.util.Iterator;
-033import java.util.LinkedList;
-034import java.util.List;
-035import java.util.Map;
-036import java.util.Set;
-037import java.util.concurrent.Callable;
-038import 
java.util.concurrent.ExecutionException;
-039import java.util.concurrent.Future;
-040import java.util.concurrent.TimeUnit;
-041import 
java.util.concurrent.TimeoutException;
-042import 
java.util.concurrent.atomic.AtomicInteger;
-043import 
java.util.concurrent.atomic.AtomicReference;
-044import java.util.regex.Pattern;
-045import java.util.stream.Collectors;
-046import java.util.stream.Stream;
-047import 
org.apache.hadoop.conf.Configuration;
-048import 
org.apache.hadoop.hbase.Abortable;
-049import 
org.apache.hadoop.hbase.CacheEvictionStats;
-050import 
org.apache.hadoop.hbase.CacheEvictionStatsBuilder;
-051import 
org.apache.hadoop.hbase.ClusterMetrics.Option;
-052import 
org.apache.hadoop.hbase.ClusterStatus;
-053import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-054import 
org.apache.hadoop.hbase.HBaseConfiguration;
-055import 
org.apache.hadoop.hbase.HConstants;
-056import 
org.apache.hadoop.hbase.HRegionInfo;
-057import 
org.apache.hadoop.hbase.HRegionLocation;
-058import 
org.apache.hadoop.hbase.HTableDescriptor;
-059import 
org.apache.hadoop.hbase.MasterNotRunningException;
-060import 
org.apache.hadoop.hbase.MetaTableAccessor;
-061import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-062import 
org.apache.hadoop.hbase.NamespaceNotFoundException;
-063import 
org.apache.hadoop.hbase.NotServingRegionException;
-064import 
org.apache.hadoop.hbase.RegionLoad;
-065import 
org.apache.hadoop.hbase.RegionLocations;
-066import 
org.apache.hadoop.hbase.ServerName;
-067import 
org.apache.hadoop.hbase.TableExistsException;
-068import 
org.apache.hadoop.hbase.TableName;
-069import 
org.apache.hadoop.hbase.TableNotDisabledException;
-070import 
org.apache.hadoop.hbase.TableNotFoundException;
-071import 
org.apache.hadoop.hbase.UnknownRegionException;
-072import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-073import 
org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
-074import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-075import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
-076import 
org.apache.hadoop.hbase.exceptions.TimeoutIOException;
-077import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-078import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-079import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-080import 
org.apache.hadoop.hbase.ipc.RpcControllerFactory;
-081import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-082import 
org.apache.hadoop.hbase.quotas.QuotaRetriever;
-083import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-084import 
org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
-085import 
org.apache.hadoop.hbase.replication.ReplicationException;
-086import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-087import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-088import 
org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
-089import 
org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
-090import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-091import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-092import 
org.apache.hadoop.hbase.snapshot.UnknownSnapshotException;
-093import 
org.apache.hadoop.hbase.util.Addressing;
-094import 
org.apache.hadoop.hbase.util.Bytes;
-095import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-096import 
org.apache.hadoop.hbase.util.ForeignExceptionUtil;
-097import 
org.apache.hadoop.hbase.util.Pair;
-098import 
org.apache.hadoop.ipc.RemoteException;
-099import 
org.apache.hadoop.util.StringUtils;
-100import 
org.apache.yetus.audience.InterfaceAudience;
-101import 
org.apache.yetus.audience.InterfaceStability;
-102import org.slf4j.Logger;
-103import org.slf4j.LoggerFactory;
-104
-105import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-106import 

[12/51] [partial] hbase-site git commit: Published site at .

2017-12-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c4b2cc17/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index ce3d805..d6491d4 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -1526,10 +1526,6 @@
 
 addCurrentScanners(List?
 extends KeyValueScanner) - Method in class 
org.apache.hadoop.hbase.regionserver.StoreScanner
 
-addDaughter(Connection,
 RegionInfo, ServerName, long) - Static method in class 
org.apache.hadoop.hbase.MetaTableAccessor
-
-Adds a daughter region entry to meta.
-
 addDaughtersToPut(Put,
 RegionInfo, RegionInfo) - Static method in class 
org.apache.hadoop.hbase.MetaTableAccessor
 
 Adds split daughters to the Put
@@ -2144,18 +2140,6 @@
 
 Adds a hbase:meta row for the specified new region.
 
-addRegionToMeta(Table,
 RegionInfo) - Static method in class org.apache.hadoop.hbase.MetaTableAccessor
-
-Adds a hbase:meta row for the specified new region to the 
given catalog table.
-
-addRegionToMeta(Table,
 RegionInfo, RegionInfo, RegionInfo) - Static method in class 
org.apache.hadoop.hbase.MetaTableAccessor
-
-Adds a (single) hbase:meta row for the specified new region 
and its daughters.
-
-addRegionToMeta(Connection,
 RegionInfo, RegionInfo, RegionInfo) - Static method in class 
org.apache.hadoop.hbase.MetaTableAccessor
-
-Adds a (single) hbase:meta row for the specified new region 
and its daughters.
-
 addRegionToRemove(RegionInfo)
 - Method in class org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper.RestoreMetaChanges
 
 addRegionToRestore(RegionInfo)
 - Method in class org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper.RestoreMetaChanges
@@ -2370,6 +2354,10 @@
 
 Add sources for the given peer cluster on this region 
server.
 
+addSpiltsToParent(Connection,
 RegionInfo, RegionInfo, RegionInfo) - Static method in class 
org.apache.hadoop.hbase.MetaTableAccessor
+
+Adds daughter region infos to hbase:meta row for the 
specified region.
+
 addSplit(long,
 long) - Method in class org.apache.hadoop.hbase.master.MetricsMasterFileSystem
 
 Record a single instance of a split
@@ -2975,6 +2963,10 @@
 
 Create a Append operation for the specified row.
 
+Append(byte[],
 long, NavigableMapbyte[], ListCell) - Constructor 
for class org.apache.hadoop.hbase.client.Append
+
+Construct the Append with user defined data.
+
 append(Append)
 - Method in interface org.apache.hadoop.hbase.client.AsyncTable
 
 Appends values to one or more columns within a single 
row.
@@ -4899,9 +4891,9 @@
 
 backupMasterAddressesZNode
 - Variable in class org.apache.hadoop.hbase.zookeeper.ZNodePaths
 
-backupMasters
 - Variable in class org.apache.hadoop.hbase.ClusterStatus
+backupMasterNames
 - Variable in class org.apache.hadoop.hbase.ClusterMetricsBuilder
 
-backupMasters
 - Variable in class org.apache.hadoop.hbase.ClusterStatus.Builder
+backupMasterNames
 - Variable in class org.apache.hadoop.hbase.ClusterMetricsBuilder.ClusterMetricsImpl
 
 BackupMasterStatusTmpl - Class in org.apache.hadoop.hbase.tmpl.master
 
@@ -5197,9 +5189,9 @@
 
 balancerChore
 - Variable in class org.apache.hadoop.hbase.master.HMaster
 
-balancerOn
 - Variable in class org.apache.hadoop.hbase.ClusterStatus
+balancerOn
 - Variable in class org.apache.hadoop.hbase.ClusterMetricsBuilder
 
-balancerOn
 - Variable in class org.apache.hadoop.hbase.ClusterStatus.Builder
+balancerOn
 - Variable in class org.apache.hadoop.hbase.ClusterMetricsBuilder.ClusterMetricsImpl
 
 BalancerRegionLoad - Class in org.apache.hadoop.hbase.master.balancer
 
@@ -6488,6 +6480,10 @@
 
 This class should not be instantiated.
 
+bloomFilterSize
 - Variable in class org.apache.hadoop.hbase.RegionMetricsBuilder
+
+bloomFilterSize
 - Variable in class org.apache.hadoop.hbase.RegionMetricsBuilder.RegionMetricsImpl
+
 bloomFilterType
 - Variable in class org.apache.hadoop.hbase.regionserver.StoreFileReader
 
 BloomFilterUtil - Class in org.apache.hadoop.hbase.util
@@ -6929,7 +6925,7 @@
 
 build()
 - Method in class org.apache.hadoop.hbase.client.TableDescriptorBuilder
 
-build()
 - Method in class org.apache.hadoop.hbase.ClusterStatus.Builder
+build()
 - Method in class org.apache.hadoop.hbase.ClusterMetricsBuilder
 
 build()
 - Method in interface org.apache.hadoop.hbase.ExtendedCellBuilder
 
@@ -6951,6 +6947,8 @@
 
 build() - 
Method in interface org.apache.hadoop.hbase.RawCellBuilder
 
+build()
 - Method in class org.apache.hadoop.hbase.RegionMetricsBuilder
+
 build()
 - Method in class org.apache.hadoop.hbase.regionserver.CustomizedScanInfoBuilder
 
 build()
 - Method in class org.apache.hadoop.hbase.regionserver.ScannerContext.Builder
@@ -6963,6 +6961,8 @@
 
 build()
 - Method in class org.apache.hadoop.hbase.rest.model.ScannerModel.FilterModel.ByteArrayComparableModel
 
+build()
 - Method in class org.apache.hadoop.hbase.ServerMetricsBuilder
+
 build()
 - Method in class 

[12/51] [partial] hbase-site git commit: Published site at .

2017-12-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html 
b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html
index f8df828..568381d 100644
--- a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html
+++ b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CloseableVisitor.html
@@ -103,7 +103,7 @@
 
 
 
-public static interface MetaTableAccessor.CloseableVisitor
+public static interface MetaTableAccessor.CloseableVisitor
 extends MetaTableAccessor.Visitor, http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable
 Implementations 'visit' a catalog table row but with 
close() at the end.
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html 
b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html
index 1bad2b1..7cf80e2 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectAllVisitor.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static class MetaTableAccessor.CollectAllVisitor
+static class MetaTableAccessor.CollectAllVisitor
 extends MetaTableAccessor.CollectingVisitorResult
 Collects all returned.
 
@@ -214,7 +214,7 @@ extends 
 
 CollectAllVisitor
-CollectAllVisitor()
+CollectAllVisitor()
 
 
 
@@ -231,7 +231,7 @@ extends 
 
 add
-voidadd(Resultr)
+voidadd(Resultr)
 
 Specified by:
 addin
 classMetaTableAccessor.CollectingVisitorResult

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html 
b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html
index 0bd778b..c9a475e 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.CollectingVisitor.html
@@ -121,7 +121,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-abstract static class MetaTableAccessor.CollectingVisitorT
+abstract static class MetaTableAccessor.CollectingVisitorT
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements MetaTableAccessor.Visitor
 A MetaTableAccessor.Visitor that 
collects content out of passed Result.
@@ -221,7 +221,7 @@ implements 
 
 results
-finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListT results
+finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListT results
 
 
 
@@ -238,7 +238,7 @@ implements 
 
 CollectingVisitor
-CollectingVisitor()
+CollectingVisitor()
 
 
 
@@ -255,7 +255,7 @@ implements 
 
 visit
-publicbooleanvisit(Resultr)
+publicbooleanvisit(Resultr)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:MetaTableAccessor.Visitor
 Visit the catalog table row.
@@ -278,7 +278,7 @@ implements 
 
 add
-abstractvoidadd(Resultr)
+abstractvoidadd(Resultr)
 
 
 
@@ -287,7 +287,7 @@ implements 
 
 getResults
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTgetResults()
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTgetResults()
 
 Returns:
 Collected results; wait till visits complete to collect all

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html 
b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html
index 8d07ed9..aa0de43 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.DefaultVisitorBase.html
@@ -121,7 +121,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public abstract static class 

[12/51] [partial] hbase-site git commit: Published site at .

2017-12-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c4c0cfa5/apidocs/src-html/org/apache/hadoop/hbase/types/DataType.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/types/DataType.html 
b/apidocs/src-html/org/apache/hadoop/hbase/types/DataType.html
index 02a06fc..2908977 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/types/DataType.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/types/DataType.html
@@ -25,9 +25,9 @@
 017 */
 018package org.apache.hadoop.hbase.types;
 019
-020import 
org.apache.yetus.audience.InterfaceAudience;
-021import 
org.apache.hadoop.hbase.util.Order;
-022import 
org.apache.hadoop.hbase.util.PositionedByteRange;
+020import 
org.apache.hadoop.hbase.util.Order;
+021import 
org.apache.hadoop.hbase.util.PositionedByteRange;
+022import 
org.apache.yetus.audience.InterfaceAudience;
 023
 024/**
 025 * p

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c4c0cfa5/apidocs/src-html/org/apache/hadoop/hbase/types/FixedLengthWrapper.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/types/FixedLengthWrapper.html 
b/apidocs/src-html/org/apache/hadoop/hbase/types/FixedLengthWrapper.html
index c4faacb..c955c7c 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/types/FixedLengthWrapper.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/types/FixedLengthWrapper.html
@@ -25,10 +25,10 @@
 017 */
 018package org.apache.hadoop.hbase.types;
 019
-020import 
org.apache.yetus.audience.InterfaceAudience;
-021import 
org.apache.hadoop.hbase.util.Order;
-022import 
org.apache.hadoop.hbase.util.PositionedByteRange;
-023import 
org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange;
+020import 
org.apache.hadoop.hbase.util.Order;
+021import 
org.apache.hadoop.hbase.util.PositionedByteRange;
+022import 
org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange;
+023import 
org.apache.yetus.audience.InterfaceAudience;
 024
 025/**
 026 * Wraps an existing {@link DataType} 
implementation as a fixed-length
@@ -55,62 +55,78 @@
 047  /**
 048   * Retrieve the maximum length (in 
bytes) of encoded values.
 049   */
-050  public int getLength() { return length; 
}
-051
-052  @Override
-053  public boolean isOrderPreserving() { 
return base.isOrderPreserving(); }
-054
-055  @Override
-056  public Order getOrder() { return 
base.getOrder(); }
-057
-058  @Override
-059  public boolean isNullable() { return 
base.isNullable(); }
-060
-061  @Override
-062  public boolean isSkippable() { return 
true; }
+050  public int getLength() {
+051return length;
+052  }
+053
+054  @Override
+055  public boolean isOrderPreserving() {
+056return base.isOrderPreserving();
+057  }
+058
+059  @Override
+060  public Order getOrder() {
+061return base.getOrder();
+062  }
 063
 064  @Override
-065  public int encodedLength(T val) { 
return length; }
-066
-067  @Override
-068  public ClassT encodedClass() { 
return base.encodedClass(); }
-069
-070  @Override
-071  public int skip(PositionedByteRange 
src) {
-072src.setPosition(src.getPosition() + 
this.length);
-073return this.length;
-074  }
-075
-076  @Override
-077  public T decode(PositionedByteRange 
src) {
-078if (src.getRemaining()  length) 
{
-079  throw new 
IllegalArgumentException("Not enough buffer remaining. src.offset: "
-080  + src.getOffset() + " 
src.length: " + src.getLength() + " src.position: "
-081  + src.getPosition() + " max 
length: " + length);
-082}
-083// create a copy range limited to 
length bytes. boo.
-084PositionedByteRange b = new 
SimplePositionedMutableByteRange(length);
-085src.get(b.getBytes());
-086return base.decode(b);
-087  }
-088
-089  @Override
-090  public int encode(PositionedByteRange 
dst, T val) {
-091if (dst.getRemaining()  length) 
{
-092  throw new 
IllegalArgumentException("Not enough buffer remaining. dst.offset: "
-093  + dst.getOffset() + " 
dst.length: " + dst.getLength() + " dst.position: "
-094  + dst.getPosition() + " max 
length: " + length);
-095}
-096int written = base.encode(dst, 
val);
-097if (written  length) {
-098  throw new 
IllegalArgumentException("Length of encoded value (" + written
-099  + ") exceeds max length (" + 
length + ").");
-100}
-101// TODO: is the zero-padding 
appropriate?
-102for (; written  length; 
written++) { dst.put((byte) 0x00); }
-103return written;
-104  }
-105}
+065  public boolean isNullable() {
+066return base.isNullable();
+067  }
+068
+069  @Override
+070  public boolean isSkippable() {
+071return true;
+072  }
+073
+074  @Override
+075  public int encodedLength(T val) {
+076return length;
+077  }
+078
+079  @Override
+080  public ClassT encodedClass() 
{
+081return base.encodedClass();
+082  }
+083
+084  @Override
+085  public int skip(PositionedByteRange 
src) 

[12/51] [partial] hbase-site git commit: Published site at .

2017-12-15 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/071f974b/devapidocs/src-html/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.html
index 0b8baa8..c77170b 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.html
@@ -30,308 +30,325 @@
 022import org.apache.commons.logging.Log;
 023import 
org.apache.commons.logging.LogFactory;
 024import 
org.apache.hadoop.conf.Configured;
-025import 
org.apache.hadoop.hbase.TableName;
-026import 
org.apache.hadoop.hbase.client.Connection;
-027import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-028import 
org.apache.hadoop.hbase.client.Put;
-029import 
org.apache.hadoop.hbase.client.RegionLocator;
-030import 
org.apache.hadoop.hbase.client.Result;
-031import 
org.apache.hadoop.hbase.client.ResultScanner;
-032import 
org.apache.hadoop.hbase.client.Scan;
-033import 
org.apache.hadoop.hbase.client.Table;
-034import 
org.apache.hadoop.hbase.filter.KeyOnlyFilter;
-035import 
org.apache.hadoop.hbase.util.Bytes;
-036import org.apache.hadoop.util.Tool;
-037import 
org.apache.hadoop.util.ToolRunner;
-038
-039import java.io.IOException;
-040import java.util.ArrayList;
-041import java.util.List;
-042import java.util.concurrent.Callable;
-043import 
java.util.concurrent.ExecutorService;
-044import java.util.concurrent.Executors;
-045import 
java.util.concurrent.ForkJoinPool;
-046import java.util.concurrent.Future;
-047import 
java.util.concurrent.ThreadFactory;
-048import 
java.util.concurrent.ThreadLocalRandom;
-049import java.util.concurrent.TimeUnit;
-050
-051
-052/**
-053 * Example on how to use HBase's {@link 
Connection} and {@link Table} in a
-054 * multi-threaded environment. Each table 
is a light weight object
-055 * that is created and thrown away. 
Connections are heavy weight objects
-056 * that hold on to zookeeper connections, 
async processes, and other state.
-057 *
-058 * pre
-059 * Usage:
-060 * bin/hbase 
org.apache.hadoop.hbase.client.example.MultiThreadedClientExample testTableName 
50
-061 * /pre
-062 *
-063 * p
-064 * The table should already be created 
before running the command.
-065 * This example expects one column family 
named d.
-066 * /p
-067 * p
-068 * This is meant to show different 
operations that are likely to be
-069 * done in a real world application. 
These operations are:
-070 * /p
-071 *
-072 * ul
-073 *   li
-074 * 30% of all operations performed 
are batch writes.
-075 * 30 puts are created and sent out 
at a time.
-076 * The response for all puts is 
waited on.
-077 *   /li
-078 *   li
-079 * 20% of all operations are single 
writes.
-080 * A single put is sent out and the 
response is waited for.
-081 *   /li
-082 *   li
-083 * 50% of all operations are scans.
-084 * These scans start at a random 
place and scan up to 100 rows.
-085 *   /li
-086 * /ul
-087 *
-088 */
-089public class MultiThreadedClientExample 
extends Configured implements Tool {
-090  private static final Log LOG = 
LogFactory.getLog(MultiThreadedClientExample.class);
-091  private static final int 
DEFAULT_NUM_OPERATIONS = 50;
-092
-093  /**
-094   * The name of the column family.
-095   *
-096   * d for default.
-097   */
-098  private static final byte[] FAMILY = 
Bytes.toBytes("d");
-099
-100  /**
-101   * For the example we're just using one 
qualifier.
-102   */
-103  private static final byte[] QUAL = 
Bytes.toBytes("test");
-104
-105  private final ExecutorService 
internalPool;
-106
-107  private final int threads;
-108
-109  public MultiThreadedClientExample() 
throws IOException {
-110// Base number of threads.
-111// This represents the number of 
threads you application has
-112// that can be interacting with an 
hbase client.
-113this.threads = 
Runtime.getRuntime().availableProcessors() * 4;
-114
-115// Daemon threads are great for 
things that get shut down.
-116ThreadFactory threadFactory = new 
ThreadFactoryBuilder()
-117
.setDaemon(true).setNameFormat("internal-pol-%d").build();
-118
-119
-120this.internalPool = 
Executors.newFixedThreadPool(threads, threadFactory);
-121  }
+025import 
org.apache.hadoop.hbase.CellBuilder;
+026import 
org.apache.hadoop.hbase.CellBuilderFactory;
+027import 
org.apache.hadoop.hbase.CellBuilderType;
+028import 
org.apache.hadoop.hbase.TableName;
+029import 
org.apache.hadoop.hbase.client.Connection;
+030import 
org.apache.hadoop.hbase.client.ConnectionFactory;
+031import 
org.apache.hadoop.hbase.client.Put;
+032import 
org.apache.hadoop.hbase.client.RegionLocator;
+033import 
org.apache.hadoop.hbase.client.Result;

[12/51] [partial] hbase-site git commit: Published site at .

2017-12-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dc4e5c85/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TruncateTableFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TruncateTableFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TruncateTableFuture.html
index 7c59e27..c904c56 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TruncateTableFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.TruncateTableFuture.html
@@ -119,4048 +119,4054 @@
 111import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
 112import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesRequest;
 113import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearRegionBlockCacheRequest;
-114import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-115import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-116import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-117import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest;
-119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse;
-120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ClearDeadServersRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse;
-140import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-141import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-142import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-143import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-144import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-145import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest;
-146import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse;
-147import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-148import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-149import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-150import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-151import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-152import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-153import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-154import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-155import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksRequest;
-156import 

[12/51] [partial] hbase-site git commit: Published site at .

2017-12-13 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4abd958d/devapidocs/org/apache/hadoop/hbase/util/package-summary.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-summary.html 
b/devapidocs/org/apache/hadoop/hbase/util/package-summary.html
index 4b51605..4195221 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-summary.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-summary.html
@@ -267,62 +267,55 @@
 
 
 
-ArrayUtils
-
-A set of array utility functions that return reasonable 
values in cases where an array is
- allocated or if it is null
-
-
-
 AtomicUtils
 
 Utilities related to atomic operations.
 
 
-
+
 AvlUtil
 
 Helper class that allows to create and manipulate an 
AvlTree.
 
 
-
+
 AvlUtil.AvlIterableList
 
 Helper class that allows to create and manipulate a linked 
list of AvlLinkedNodes
 
 
-
+
 AvlUtil.AvlLinkedNodeTNode extends AvlUtil.AvlLinkedNode
 
 This class extends the AvlNode and adds two links that will 
be used in conjunction
  with the AvlIterableList class.
 
 
-
+
 AvlUtil.AvlNodeTNode 
extends AvlUtil.AvlNode
 
 This class represent a node that will be used in an 
AvlTree.
 
 
-
+
 AvlUtil.AvlTree
 
 Helper class that allows to create and manipulate an AVL 
Tree
 
 
-
+
 AvlUtil.AvlTreeIteratorTNode extends AvlUtil.AvlNode
 
 Iterator for the AvlTree
 
 
-
+
 Base64
 
 Encodes and decodes to and from Base64 notation.
 
 
-
+
 Base64.Base64InputStream
 
 A Base64.Base64InputStream will 
read data from another
@@ -330,7 +323,7 @@
  encode/decode to/from Base64 notation on the fly.
 
 
-
+
 Base64.Base64OutputStream
 
 A Base64.Base64OutputStream will 
write data to another
@@ -338,80 +331,80 @@
  encode/decode to/from Base64 notation on the fly.
 
 
-
+
 BloomContext
 
 The bloom context that is used by the StorefileWriter to 
add the bloom details
  per cell
 
 
-
+
 BloomFilterChunk
 
 The basic building block for the CompoundBloomFilter
 
 
-
+
 BloomFilterFactory
 
 Handles Bloom filter initialization based on configuration 
and serialized metadata in the reader
  and writer of HStoreFile.
 
 
-
+
 BloomFilterUtil
 
 Utility methods related to BloomFilters
 
 
-
+
 BoundedCompletionServiceV
 
 A completion service, close to the one available in the JDK 
1.7
  However, this ones keeps the list of the future, and allows to cancel them 
all.
 
 
-
+
 BoundedPriorityBlockingQueueE
 
 A generic bounded blocking Priority-Queue.
 
 
-
+
 BoundedPriorityBlockingQueue.PriorityQueueE
 
 
-
+
 ByteArrayHashKey
 
 
-
+
 ByteBufferArray
 
 This class manages an array of ByteBuffers with a default 
size 4MB.
 
 
-
+
 ByteBufferArray.BufferCreatorCallable
 
 A callable that creates buffers of the specified length 
either onheap/offheap using the
  ByteBufferAllocator
 
 
-
+
 ByteBufferUtils
 
 Utility functions for working with byte buffers, such as 
reading/writing
  variable-length long numbers.
 
 
-
+
 ByteRangeUtils
 
 Utility methods for working with ByteRange.
 
 
-
+
 Bytes
 
 Utility class that handles byte arrays, conversions to/from 
other types,
@@ -419,50 +412,50 @@
  HashSets, and can be used as key in maps or trees.
 
 
-
+
 Bytes.ByteArrayComparator
 
 Byte array comparator class.
 
 
-
+
 Bytes.LexicographicalComparerHolder
 
 Provides a lexicographical comparer implementation; either 
a Java
  implementation or a faster implementation based on Unsafe.
 
 
-
+
 Bytes.RowEndKeyComparator
 
 A Bytes.ByteArrayComparator that 
treats the empty array as the largest value.
 
 
-
+
 ByteStringer
 
 Hack to workaround HBASE-10304 issue that keeps bubbling up 
when a mapreduce context.
 
 
-
+
 CellHashKey
 
 Extracts the byte for the hash calculation from the given 
cell
 
 
-
+
 Classes
 
 Utilities for class manipulation.
 
 
-
+
 ClassLoaderBase
 
 Base class loader that defines couple shared constants used 
by sub-classes.
 
 
-
+
 ClassSize
 
 Class for determining the "size" of a class, an attempt to 
calculate the
@@ -471,13 +464,13 @@
  The core of this class is taken from the Derby project
 
 
-
+
 ClassSize.MemoryLayout
 
 MemoryLayout abstracts details about the JVM object 
layout.
 
 
-
+
 ClassSize.UnsafeLayout
 
 UnsafeLayout uses Unsafe to guesstimate the object-layout 
related parameters like object header
@@ -485,400 +478,400 @@
  See HBASE-15950.
 
 
-
+
 ClassSize.UnsafeLayout.HeaderSize
 
 
-
+
 CollectionBackedScanner
 
 Utility scanner that wraps a sortable collection and serves 
as a KeyValueScanner.
 
 
-
+
 CollectionUtils
 
 Utility methods for dealing with Collections, including 
treating null collections as empty.
 
 
-
+
 CommonFSUtils
 
 Utility methods for interacting with the underlying file 
system.
 
 
-
+
 CommonFSUtils.StreamCapabilities
 
 
-
+
 CompressionTest
 
 Compression validation test.
 
 
-
+
 ConcatenatedListsT
 
 A collection class that contains multiple sub-lists, which 
allows us to not copy lists.
 
 
-
+
 

[12/51] [partial] hbase-site git commit: Published site at .

2017-12-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e23b49ba/devapidocs/org/apache/hadoop/hbase/client/Connection.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/Connection.html 
b/devapidocs/org/apache/hadoop/hbase/client/Connection.html
index e4a7797..2885fe8 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Connection.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Connection.html
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 
 
 All Known Implementing Classes:
-ConnectionImplementation, 
ConnectionUtils.MasterlessConnection, ConnectionUtils.ShortCircuitingClusterConnection
+ConnectionImplementation, 
ConnectionUtils.MasterlessConnection, ConnectionUtils.ShortCircuitingClusterConnection,
 SharedConnection
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e23b49ba/devapidocs/org/apache/hadoop/hbase/client/ImmutableHColumnDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/ImmutableHColumnDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/client/ImmutableHColumnDescriptor.html
index 175e2f3..129feec 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/ImmutableHColumnDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/ImmutableHColumnDescriptor.html
@@ -200,7 +200,7 @@ extends 
 
 Methods inherited from classorg.apache.hadoop.hbase.HColumnDescriptor
-compareTo,
 equals,
 getBlocksize,
 getBloomFilterType,
 getCompactionCompression,
 getCompactionCompressionType,
 getCompression,
 getCompressionType,
 getConfiguration,
 getConfigurationValue,
 getDataBlockEncoding,
 getDefaultValues,
 getDFSReplication,
 getEncryptionKey,
 getEncryptionType,
 getInMemoryCompaction,
 getKeepDeletedCells, getMaxVersions,
 getMinVersions,
 getMobCompactPartitionPolicy,
 getMobThreshold,
 getName,
 getNameAsString,
 getScope,
 getStoragePolicy,
 getTimeToLive, getUnit,
 getValue,
 getValue,
 getValue,
 getValues,
 hashCode,
 isBlockCacheEnabled,
 isCacheBloomsOnWrite,
 isCacheDataInL1, isCacheDataOnWrite,
 isCacheIndexesOnWrite,
 isCompressTags,
 isEvictBlocksOnClose,
 isInMemory,
 isLegalFamilyName,
 isMobEnabled,
 isNewVersionBehavior,
 isPrefetchBlocksOnOpen,
 parseFrom,
 remove,
 removeConfiguration,
 setBlockCacheEnabled,
 setBlocksize,
 setBloomFilterType,
 setCacheBloomsOnWrite,
 setCacheDataInL1,
 setCacheDataOnWrite,
 setCacheIndexesOnWrite,
 setCompactionCompressionType,
 setCompressionType,
 setCompressTags,
 setConfiguration,
 setDataBlockEncoding,
 setDFSReplication,
 setEncryptionKey,
 setEncryptionType,
 setEvictBlocksOnClose,
 setInMemory,
 setInMemoryCompaction,
 setKeepDeletedCells,
 setMaxVersions,
 setMinVersions,
 setMobCompactPartitionPolicy,
 setMobEnabled,
 setMobThreshold,
 setNewVersionBehavior,
 setPrefetchBlocksOnOpen, setScope,
 setStoragePolicy,
 setTimeToLive,
 setTimeToLive,
 setValue,
 setValue,
 setVersions,
 toByteArray,
 toString, toStringCustomizedValues
+compareTo,
 equals,
 getBlocksize,
 getBloomFilterType,
 getCompactionCompression,
 getCompactionCompressionType,
 getCompression,
 getCompressionType,
 getConfiguration,
 getConfigurationValue,
 getDataBlockEncoding,
 getDefaultValues,
 getDFSReplication,
 getEncryptionKey,
 getEncryptionType,
 getInMemoryCompaction,
 getKeepDeletedCells, getMaxVersions,
 getMinVersions,
 getMobCompactPartitionPolicy,
 getMobThreshold,
 getName,
 getNameAsString,
 getScope,
 getStoragePolicy,
 getTimeToLive, getUnit,
 getValue,
 getValue,
 getValue,
 getValues,
 hashCode,
 isBlockCacheEnabled,
 isCacheBloomsOnWrite,
 isCacheDataOnWrite, isCacheIndexesOnWrite,
 isCompressTags,
 isEvictBlocksOnClose,
 isInMemory,
 isLegalFamilyName,
 isMobEnabled,
 isNewVersionBehavior,
 isPrefetchBlocksOnOpen,
 parseFrom,
 remove,
 removeConfiguration,
 setBlockCacheEnabled,
 setBlocksize,
 setBloomFilterType,
 setCacheBloomsOnWrite,
 setCacheDataInL1,
 setCacheDataOnWrite,
 setCacheIndexesOnWrite,
 setCompactionCompressionType,
 setCompressionType,
 setCompressTags,
 setConfiguration,
 setDataBlockEncoding, setDFSReplication,
 setEncryptionKey,
 setEncryptionType,
 setEvictBlocksOnClose,
 setInMemory,
 setInMemoryCompaction,
 setKeepDeletedCells,
 setMaxVersions,
 setMinVersions,
 setMobCompactPartitionPolicy,
 setMobEnabled,
 setMobThreshold,
 setNewVersionBehavior,
 setPrefetchBlocksOnOpen,
 setScope,
 setStoragePolicy,
 setTimeToLive,
 setTimeToLive,
 setValue,
 setValue,
 setVersions,
 toByteArray,
 toString,
 toStringCustomizedValues
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e23b49ba/devapidocs/org/apache/hadoop/hbase/client/MasterCallable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/MasterCallable.html 
b/devapidocs/org/apache/hadoop/hbase/client/MasterCallable.html
index 641523e..47be427 100644
--- 

[12/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/dependency-info.html
--
diff --git a/hbase-build-configuration/hbase-archetypes/dependency-info.html 
b/hbase-build-configuration/hbase-archetypes/dependency-info.html
index 5d94285..94a5728 100644
--- a/hbase-build-configuration/hbase-archetypes/dependency-info.html
+++ b/hbase-build-configuration/hbase-archetypes/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetypes  Dependency Information
 
@@ -148,7 +148,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/dependency-management.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/dependency-management.html 
b/hbase-build-configuration/hbase-archetypes/dependency-management.html
index 324589b..f7f3c3e 100644
--- a/hbase-build-configuration/hbase-archetypes/dependency-management.html
+++ b/hbase-build-configuration/hbase-archetypes/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetypes  Project Dependency 
Management
 
@@ -775,18 +775,24 @@
 test-jar
 https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
 
+org.apache.hbase
+http://hbase.apache.org/hbase-build-configuration/hbase-zookeeper;>hbase-zookeeper
+3.0.0-SNAPSHOT
+test-jar
+https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
+
 org.bouncycastle
 http://www.bouncycastle.org/java.html;>bcprov-jdk16
 1.46
 jar
 http://www.bouncycastle.org/licence.html;>Bouncy Castle 
Licence
-
+
 org.hamcrest
 https://github.com/hamcrest/JavaHamcrest/hamcrest-core;>hamcrest-core
 1.3
 jar
 http://www.opensource.org/licenses/bsd-license.php;>New BSD 
License
-
+
 org.mockito
 http://mockito.org;>mockito-core
 2.1.0
@@ -804,7 +810,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependencies.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependencies.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependencies.html
index 3f41509..e3aa300 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependencies.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetype builder  Project 
Dependencies
 
@@ -330,7 +330,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 



[12/51] [partial] hbase-site git commit: Published site at .

2017-12-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c54c242b/devapidocs/src-html/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.html
new file mode 100644
index 000..6c7ef2a
--- /dev/null
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.html
@@ -0,0 +1,419 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/**
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License.  You may obtain a 
copy of the License at
+009 *
+010 * 
http://www.apache.org/licenses/LICENSE-2.0
+011 *
+012 * Unless required by applicable law or 
agreed to in writing, software
+013 * distributed under the License is 
distributed on an "AS IS" BASIS,
+014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+015 * See the License for the specific 
language governing permissions and
+016 * limitations under the License.
+017 */
+018package 
org.apache.hadoop.hbase.zookeeper;
+019
+020import static 
org.apache.hadoop.hbase.HConstants.DEFAULT_ZK_SESSION_TIMEOUT;
+021import static 
org.apache.hadoop.hbase.HConstants.ZK_SESSION_TIMEOUT;
+022
+023import java.io.Closeable;
+024import java.io.IOException;
+025import java.util.Arrays;
+026import java.util.EnumSet;
+027import 
java.util.concurrent.CompletableFuture;
+028import java.util.concurrent.DelayQueue;
+029import java.util.concurrent.Delayed;
+030import java.util.concurrent.TimeUnit;
+031import 
java.util.concurrent.atomic.AtomicBoolean;
+032
+033import org.apache.commons.logging.Log;
+034import 
org.apache.commons.logging.LogFactory;
+035import 
org.apache.hadoop.conf.Configuration;
+036import 
org.apache.yetus.audience.InterfaceAudience;
+037import 
org.apache.zookeeper.KeeperException;
+038import 
org.apache.zookeeper.KeeperException.Code;
+039import org.apache.zookeeper.ZooKeeper;
+040import org.apache.zookeeper.data.Stat;
+041
+042import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+043
+044/**
+045 * A very simple read only zookeeper 
implementation without watcher support.
+046 */
+047@InterfaceAudience.Private
+048public final class ReadOnlyZKClient 
implements Closeable {
+049
+050  private static final Log LOG = 
LogFactory.getLog(ReadOnlyZKClient.class);
+051
+052  public static final String 
RECOVERY_RETRY = "zookeeper.recovery.retry";
+053
+054  private static final int 
DEFAULT_RECOVERY_RETRY = 30;
+055
+056  public static final String 
RECOVERY_RETRY_INTERVAL_MILLIS =
+057  
"zookeeper.recovery.retry.intervalmill";
+058
+059  private static final int 
DEFAULT_RECOVERY_RETRY_INTERVAL_MILLIS = 1000;
+060
+061  public static final String 
KEEPALIVE_MILLIS = "zookeeper.keep-alive.time";
+062
+063  private static final int 
DEFAULT_KEEPALIVE_MILLIS = 6;
+064
+065  private static final 
EnumSetCode FAIL_FAST_CODES = EnumSet.of(Code.NOAUTH, 
Code.AUTHFAILED);
+066
+067  private final String connectString;
+068
+069  private final int sessionTimeoutMs;
+070
+071  private final int maxRetries;
+072
+073  private final int retryIntervalMs;
+074
+075  private final int keepAliveTimeMs;
+076
+077  private static abstract class Task 
implements Delayed {
+078
+079protected long time = 
System.nanoTime();
+080
+081public boolean needZk() {
+082  return false;
+083}
+084
+085public void exec(ZooKeeper zk) {
+086}
+087
+088public void connectFailed(IOException 
e) {
+089}
+090
+091public void closed(IOException e) {
+092}
+093
+094@Override
+095public int compareTo(Delayed o) {
+096  Task that = (Task) o;
+097  int c = Long.compare(time, 
that.time);
+098  if (c != 0) {
+099return c;
+100  }
+101  return 
Integer.compare(System.identityHashCode(this), 
System.identityHashCode(that));
+102}
+103
+104@Override
+105public long getDelay(TimeUnit unit) 
{
+106  return unit.convert(time - 
System.nanoTime(), TimeUnit.NANOSECONDS);
+107}
+108  }
+109
+110  private static final Task CLOSE = new 
Task() {
+111  };
+112
+113  private final DelayQueueTask 
tasks = new DelayQueue();
+114
+115  private final AtomicBoolean closed = 
new AtomicBoolean(false);
+116
+117  private ZooKeeper zookeeper;
+118
+119  private String getId() {
+120return String.format("0x%08x", 
System.identityHashCode(this));
+121  }
+122
+123  public ReadOnlyZKClient(Configuration 
conf) {
+124this.connectString = 

[12/51] [partial] hbase-site git commit: Published site at .

2017-11-30 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/713d773f/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
index 25e368d..d0f781f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
@@ -25,798 +25,798 @@
 017 */
 018package 
org.apache.hadoop.hbase.io.asyncfs;
 019
-020import static 
org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelOption.CONNECT_TIMEOUT_MILLIS;
-021import static 
org.apache.hadoop.hbase.shaded.io.netty.handler.timeout.IdleState.READER_IDLE;
-022import static 
org.apache.hadoop.fs.CreateFlag.CREATE;
-023import static 
org.apache.hadoop.fs.CreateFlag.OVERWRITE;
-024import static 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createEncryptor;
-025import static 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.trySaslNegotiate;
+020import static 
org.apache.hadoop.fs.CreateFlag.CREATE;
+021import static 
org.apache.hadoop.fs.CreateFlag.OVERWRITE;
+022import static 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createEncryptor;
+023import static 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.trySaslNegotiate;
+024import static 
org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelOption.CONNECT_TIMEOUT_MILLIS;
+025import static 
org.apache.hadoop.hbase.shaded.io.netty.handler.timeout.IdleState.READER_IDLE;
 026import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
 027import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME;
 028import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT;
 029import static 
org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage.PIPELINE_SETUP_CREATE;
 030
-031import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
-032import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap;
-033import 
com.google.protobuf.CodedOutputStream;
-034
-035import 
org.apache.hadoop.hbase.shaded.io.netty.bootstrap.Bootstrap;
-036import 
org.apache.hadoop.hbase.shaded.io.netty.buffer.ByteBuf;
-037import 
org.apache.hadoop.hbase.shaded.io.netty.buffer.ByteBufAllocator;
-038import 
org.apache.hadoop.hbase.shaded.io.netty.buffer.ByteBufOutputStream;
-039import 
org.apache.hadoop.hbase.shaded.io.netty.buffer.PooledByteBufAllocator;
-040import 
org.apache.hadoop.hbase.shaded.io.netty.channel.Channel;
-041import 
org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelFuture;
-042import 
org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelFutureListener;
-043import 
org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandler;
-044import 
org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContext;
-045import 
org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelInitializer;
-046import 
org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelPipeline;
-047import 
org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoop;
-048import 
org.apache.hadoop.hbase.shaded.io.netty.channel.SimpleChannelInboundHandler;
-049import 
org.apache.hadoop.hbase.shaded.io.netty.handler.codec.protobuf.ProtobufDecoder;
-050import 
org.apache.hadoop.hbase.shaded.io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
-051import 
org.apache.hadoop.hbase.shaded.io.netty.handler.timeout.IdleStateEvent;
-052import 
org.apache.hadoop.hbase.shaded.io.netty.handler.timeout.IdleStateHandler;
-053import 
org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.Future;
-054import 
org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.FutureListener;
-055import 
org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.Promise;
-056
-057import java.io.IOException;
-058import 
java.lang.reflect.InvocationTargetException;
-059import java.lang.reflect.Method;
-060import java.util.ArrayList;
-061import java.util.EnumSet;
-062import java.util.List;
-063import java.util.concurrent.TimeUnit;
-064
-065import org.apache.commons.logging.Log;
-066import 
org.apache.commons.logging.LogFactory;
-067import 
org.apache.hadoop.conf.Configuration;
-068import 
org.apache.hadoop.crypto.CryptoProtocolVersion;
-069import 
org.apache.hadoop.crypto.Encryptor;
-070import org.apache.hadoop.fs.CreateFlag;
-071import org.apache.hadoop.fs.FileSystem;
-072import 
org.apache.hadoop.fs.FileSystemLinkResolver;
-073import org.apache.hadoop.fs.Path;
-074import 
org.apache.hadoop.fs.UnresolvedLinkException;
-075import 

[12/51] [partial] hbase-site git commit: Published site at .

2017-11-29 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/fd365a2b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteTableFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteTableFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteTableFuture.html
index d438f22..7c59e27 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteTableFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteTableFuture.html
@@ -1290,8 +1290,8 @@
 1282   CompactType 
compactType) throws IOException {
 1283switch (compactType) {
 1284  case MOB:
-1285
compact(this.connection.getAdminForMaster(), getMobRegionInfo(tableName), 
major,
-1286  columnFamily);
+1285
compact(this.connection.getAdminForMaster(), 
RegionInfo.createMobRegionInfo(tableName),
+1286major, columnFamily);
 1287break;
 1288  case NORMAL:
 1289checkTableExists(tableName);
@@ -3248,7 +3248,7 @@
 3240  new 
CallableAdminProtos.GetRegionInfoResponse.CompactionState() {
 3241@Override
 3242public 
AdminProtos.GetRegionInfoResponse.CompactionState call() throws Exception {
-3243  RegionInfo info = 
getMobRegionInfo(tableName);
+3243  RegionInfo info = 
RegionInfo.createMobRegionInfo(tableName);
 3244  GetRegionInfoRequest 
request =
 3245
RequestConverter.buildGetRegionInfoRequest(info.getRegionName(), true);
 3246  GetRegionInfoResponse 
response = masterAdmin.getRegionInfo(rpcController, request);
@@ -3312,7 +3312,7 @@
 3304}
 3305break;
 3306  default:
-3307throw new 
IllegalArgumentException("Unknowne compactType: " + compactType);
+3307throw new 
IllegalArgumentException("Unknown compactType: " + compactType);
 3308}
 3309if (state != null) {
 3310  return 
ProtobufUtil.createCompactionState(state);
@@ -3847,325 +3847,320 @@
 3839});
 3840  }
 3841
-3842  private RegionInfo 
getMobRegionInfo(TableName tableName) {
-3843return 
RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes(".mob")).setRegionId(0)
-3844.build();
-3845  }
-3846
-3847  private RpcControllerFactory 
getRpcControllerFactory() {
-3848return this.rpcControllerFactory;
-3849  }
-3850
-3851  @Override
-3852  public void addReplicationPeer(String 
peerId, ReplicationPeerConfig peerConfig, boolean enabled)
-3853  throws IOException {
-3854executeCallable(new 
MasterCallableVoid(getConnection(), getRpcControllerFactory()) {
-3855  @Override
-3856  protected Void rpcCall() throws 
Exception {
-3857
master.addReplicationPeer(getRpcController(),
-3858  
RequestConverter.buildAddReplicationPeerRequest(peerId, peerConfig, 
enabled));
-3859return null;
-3860  }
-3861});
-3862  }
-3863
-3864  @Override
-3865  public void 
removeReplicationPeer(String peerId) throws IOException {
-3866executeCallable(new 
MasterCallableVoid(getConnection(), getRpcControllerFactory()) {
-3867  @Override
-3868  protected Void rpcCall() throws 
Exception {
-3869
master.removeReplicationPeer(getRpcController(),
-3870  
RequestConverter.buildRemoveReplicationPeerRequest(peerId));
-3871return null;
-3872  }
-3873});
-3874  }
-3875
-3876  @Override
-3877  public void 
enableReplicationPeer(final String peerId) throws IOException {
-3878executeCallable(new 
MasterCallableVoid(getConnection(), getRpcControllerFactory()) {
-3879  @Override
-3880  protected Void rpcCall() throws 
Exception {
-3881
master.enableReplicationPeer(getRpcController(),
-3882  
RequestConverter.buildEnableReplicationPeerRequest(peerId));
-3883return null;
-3884  }
-3885});
-3886  }
-3887
-3888  @Override
-3889  public void 
disableReplicationPeer(final String peerId) throws IOException {
-3890executeCallable(new 
MasterCallableVoid(getConnection(), getRpcControllerFactory()) {
-3891  @Override
-3892  protected Void rpcCall() throws 
Exception {
-3893
master.disableReplicationPeer(getRpcController(),
-3894  
RequestConverter.buildDisableReplicationPeerRequest(peerId));
-3895return null;
-3896  }
-3897});
-3898  }
-3899
-3900  @Override
-3901  public ReplicationPeerConfig 
getReplicationPeerConfig(final String peerId) throws IOException {
-3902return executeCallable(new 
MasterCallableReplicationPeerConfig(getConnection(),
-3903getRpcControllerFactory()) {
-3904  @Override
-3905  protected ReplicationPeerConfig 
rpcCall() throws Exception {
-3906GetReplicationPeerConfigResponse 
response = master.getReplicationPeerConfig(
-3907  getRpcController(), 

[12/51] [partial] hbase-site git commit: Published site at .

2017-11-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b9722a17/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
index 29ea7b3..6ed75c9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
@@ -1313,7093 +1313,7082 @@
 1305
 1306  @Override
 1307  public boolean isSplittable() {
-1308boolean result = isAvailable() 
 !hasReferences();
-1309LOG.info("ASKED IF SPLITTABLE " + 
result + " " + getRegionInfo().getShortNameToLog(),
-1310  new Throwable("LOGGING: 
REMOVE"));
-1311// REMOVE BELOW
-1312LOG.info("DEBUG LIST ALL FILES");
-1313for (HStore store : 
this.stores.values()) {
-1314  LOG.info("store " + 
store.getColumnFamilyName());
-1315  for (HStoreFile sf : 
store.getStorefiles()) {
-1316
LOG.info(sf.toStringDetailed());
-1317  }
-1318}
-1319return result;
-1320  }
-1321
-1322  @Override
-1323  public boolean isMergeable() {
-1324if (!isAvailable()) {
-1325  LOG.debug("Region " + this
-1326  + " is not mergeable because 
it is closing or closed");
-1327  return false;
-1328}
-1329if (hasReferences()) {
-1330  LOG.debug("Region " + this
-1331  + " is not mergeable because 
it has references");
-1332  return false;
-1333}
-1334
-1335return true;
+1308return isAvailable()  
!hasReferences();
+1309  }
+1310
+1311  @Override
+1312  public boolean isMergeable() {
+1313if (!isAvailable()) {
+1314  LOG.debug("Region " + this
+1315  + " is not mergeable because 
it is closing or closed");
+1316  return false;
+1317}
+1318if (hasReferences()) {
+1319  LOG.debug("Region " + this
+1320  + " is not mergeable because 
it has references");
+1321  return false;
+1322}
+1323
+1324return true;
+1325  }
+1326
+1327  public boolean areWritesEnabled() {
+1328synchronized(this.writestate) {
+1329  return 
this.writestate.writesEnabled;
+1330}
+1331  }
+1332
+1333  @VisibleForTesting
+1334  public MultiVersionConcurrencyControl 
getMVCC() {
+1335return mvcc;
 1336  }
 1337
-1338  public boolean areWritesEnabled() {
-1339synchronized(this.writestate) {
-1340  return 
this.writestate.writesEnabled;
-1341}
-1342  }
-1343
-1344  @VisibleForTesting
-1345  public MultiVersionConcurrencyControl 
getMVCC() {
-1346return mvcc;
-1347  }
-1348
-1349  @Override
-1350  public long getMaxFlushedSeqId() {
-1351return maxFlushedSeqId;
+1338  @Override
+1339  public long getMaxFlushedSeqId() {
+1340return maxFlushedSeqId;
+1341  }
+1342
+1343  /**
+1344   * @return readpoint considering given 
IsolationLevel. Pass {@code null} for default
+1345   */
+1346  public long 
getReadPoint(IsolationLevel isolationLevel) {
+1347if (isolationLevel != null 
 isolationLevel == IsolationLevel.READ_UNCOMMITTED) {
+1348  // This scan can read even 
uncommitted transactions
+1349  return Long.MAX_VALUE;
+1350}
+1351return mvcc.getReadPoint();
 1352  }
 1353
-1354  /**
-1355   * @return readpoint considering given 
IsolationLevel. Pass {@code null} for default
-1356   */
-1357  public long 
getReadPoint(IsolationLevel isolationLevel) {
-1358if (isolationLevel != null 
 isolationLevel == IsolationLevel.READ_UNCOMMITTED) {
-1359  // This scan can read even 
uncommitted transactions
-1360  return Long.MAX_VALUE;
-1361}
-1362return mvcc.getReadPoint();
-1363  }
-1364
-1365  public boolean 
isLoadingCfsOnDemandDefault() {
-1366return 
this.isLoadingCfsOnDemandDefault;
-1367  }
-1368
-1369  /**
-1370   * Close down this HRegion.  Flush the 
cache, shut down each HStore, don't
-1371   * service any more calls.
-1372   *
-1373   * pThis method could take 
some time to execute, so don't call it from a
-1374   * time-sensitive thread.
-1375   *
-1376   * @return Vector of all the storage 
files that the HRegion's component
-1377   * HStores make use of.  It's a list 
of all StoreFile objects. Returns empty
-1378   * vector if already closed and null 
if judged that it should not close.
-1379   *
-1380   * @throws IOException e
-1381   * @throws DroppedSnapshotException 
Thrown when replay of wal is required
-1382   * because a Snapshot was not properly 
persisted. The region is put in closing mode, and the
-1383   * caller MUST abort after this.
-1384   */
-1385  public Mapbyte[], 
ListHStoreFile close() throws IOException {
-1386return close(false);
-1387  }
-1388
-1389  private final Object closeLock = new 
Object();
-1390
-1391  /** Conf key for the periodic flush 
interval */
-1392  public 

[12/51] [partial] hbase-site git commit: Published site at .

2017-11-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b1eb7453/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KeyOnlyKeyValue.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KeyOnlyKeyValue.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KeyOnlyKeyValue.html
index d98042d..d549086 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KeyOnlyKeyValue.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KeyOnlyKeyValue.html
@@ -42,2537 +42,2536 @@
 034
 035import org.apache.commons.logging.Log;
 036import 
org.apache.commons.logging.LogFactory;
-037import 
org.apache.yetus.audience.InterfaceAudience;
+037import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 038import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
 039import 
org.apache.hadoop.hbase.util.Bytes;
 040import 
org.apache.hadoop.hbase.util.ClassSize;
 041import 
org.apache.hadoop.io.RawComparator;
-042
-043import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-044/**
-045 * An HBase Key/Value. This is the 
fundamental HBase Type.
-046 * p
-047 * HBase applications and users should 
use the Cell interface and avoid directly using KeyValue and
-048 * member functions not defined in 
Cell.
-049 * p
-050 * If being used client-side, the primary 
methods to access individual fields are
-051 * {@link #getRowArray()}, {@link 
#getFamilyArray()}, {@link #getQualifierArray()},
-052 * {@link #getTimestamp()}, and {@link 
#getValueArray()}. These methods allocate new byte arrays
-053 * and return copies. Avoid their use 
server-side.
-054 * p
-055 * Instances of this class are immutable. 
They do not implement Comparable but Comparators are
-056 * provided. Comparators change with 
context, whether user table or a catalog table comparison. Its
-057 * critical you use the appropriate 
comparator. There are Comparators for normal HFiles, Meta's
-058 * Hfiles, and bloom filter keys.
-059 * p
-060 * KeyValue wraps a byte array and takes 
offsets and lengths into passed array at where to start
-061 * interpreting the content as KeyValue. 
The KeyValue format inside a byte array is:
-062 * codelt;keylengthgt; 
lt;valuelengthgt; lt;keygt; 
lt;valuegt;/code Key is further
-063 * decomposed as: 
codelt;rowlengthgt; lt;rowgt; 
lt;columnfamilylengthgt;
-064 * lt;columnfamilygt; 
lt;columnqualifiergt;
-065 * lt;timestampgt; 
lt;keytypegt;/code The coderowlength/code 
maximum is
-066 * 
codeShort.MAX_SIZE/code, column family length maximum is 
codeByte.MAX_SIZE/code, and
-067 * column qualifier + key length must be 
lt; codeInteger.MAX_SIZE/code. The column does not
-068 * contain the family/qualifier 
delimiter, {@link #COLUMN_FAMILY_DELIMITER}br
-069 * KeyValue can optionally contain Tags. 
When it contains tags, it is added in the byte array after
-070 * the value part. The format for this 
part is: 
codelt;tagslengthgt;lt;tagsbytesgt;/code.
-071 * codetagslength/code 
maximum is codeShort.MAX_SIZE/code. The 
codetagsbytes/code
-072 * contain one or more tags where as each 
tag is of the form
-073 * 
codelt;taglengthgt;lt;tagtypegt;lt;tagbytesgt;/code.
 codetagtype/code is one byte
-074 * and codetaglength/code 
maximum is codeShort.MAX_SIZE/code and it includes 1 byte 
type
-075 * length and actual tag bytes length.
-076 */
-077@InterfaceAudience.Private
-078public class KeyValue implements 
ExtendedCell {
-079  private static final 
ArrayListTag EMPTY_ARRAY_LIST = new ArrayList();
-080
-081  private static final Log LOG = 
LogFactory.getLog(KeyValue.class);
-082
-083  public static final long FIXED_OVERHEAD 
= ClassSize.OBJECT + // the KeyValue object itself
-084  ClassSize.REFERENCE + // pointer to 
"bytes"
-085  2 * Bytes.SIZEOF_INT + // offset, 
length
-086  Bytes.SIZEOF_LONG;// memstoreTS
-087
-088  /**
-089   * Colon character in UTF-8
-090   */
-091  public static final char 
COLUMN_FAMILY_DELIMITER = ':';
-092
-093  public static final byte[] 
COLUMN_FAMILY_DELIM_ARRAY =
-094new 
byte[]{COLUMN_FAMILY_DELIMITER};
-095
-096  /**
-097   * Comparator for plain key/values; 
i.e. non-catalog table key/values. Works on Key portion
-098   * of KeyValue only.
-099   * @deprecated Use {@link 
CellComparator#getInstance()} instead. Deprecated for hbase 2.0, remove for 
hbase 3.0.
-100   */
-101  @Deprecated
-102  public static final KVComparator 
COMPARATOR = new KVComparator();
-103  /**
-104   * A {@link KVComparator} for 
codehbase:meta/code catalog table
-105   * {@link KeyValue}s.
-106   * @deprecated Use {@link 
CellComparatorImpl#META_COMPARATOR} instead. Deprecated for hbase 2.0, remove 
for hbase 3.0.
-107   */
-108  @Deprecated
-109  public static final KVComparator 
META_COMPARATOR = new MetaComparator();
-110
-111  /** Size of the key length field in 
bytes*/
-112  public static final int KEY_LENGTH_SIZE 
= Bytes.SIZEOF_INT;
-113
-114  

[12/51] [partial] hbase-site git commit: Published site at .

2017-11-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9118853f/testapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.html
--
diff --git a/testapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.html 
b/testapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.html
index c024cb3..7a0a7ba 100644
--- a/testapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.html
+++ b/testapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.html
@@ -4476,7 +4476,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.htm
 
 
 shutdownMiniMapReduceCluster
-publicvoidshutdownMiniMapReduceCluster()
+publicvoidshutdownMiniMapReduceCluster()
 Stops the previously started 
MiniMRCluster.
 
 
@@ -4486,7 +4486,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.htm
 
 
 createMockRegionServerService
-publicRegionServerServicescreateMockRegionServerService()
+publicRegionServerServicescreateMockRegionServerService()
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Create a stubbed out RegionServerService, mainly for 
getting FS.
 
@@ -4501,7 +4501,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.htm
 
 
 createMockRegionServerService
-publicRegionServerServicescreateMockRegionServerService(RpcServerInterfacerpc)
+publicRegionServerServicescreateMockRegionServerService(RpcServerInterfacerpc)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Create a stubbed out RegionServerService, mainly for 
getting FS.
  This version is used by TestTokenAuthentication
@@ -4517,7 +4517,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.htm
 
 
 createMockRegionServerService
-publicRegionServerServicescreateMockRegionServerService(ServerNamename)
+publicRegionServerServicescreateMockRegionServerService(ServerNamename)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Create a stubbed out RegionServerService, mainly for 
getting FS.
  This version is used by TestOpenRegionHandler
@@ -4533,7 +4533,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.htm
 
 
 enableDebug
-publicvoidenableDebug(http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class?clazz)
+publicvoidenableDebug(http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class?clazz)
 Switches the logger for the given class to DEBUG 
level.
 
 Parameters:
@@ -4547,7 +4547,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.htm
 
 
 expireMasterSession
-publicvoidexpireMasterSession()
+publicvoidexpireMasterSession()
  throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 Expire the Master's session
 
@@ -4562,7 +4562,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.htm
 
 
 expireRegionServerSession
-publicvoidexpireRegionServerSession(intindex)
+publicvoidexpireRegionServerSession(intindex)
throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 Expire a region server's session
 
@@ -4579,7 +4579,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.htm
 
 
 expireSession
-publicvoidexpireSession(ZKWatchernodeZK)
+publicvoidexpireSession(ZKWatchernodeZK)
throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -4593,7 +4593,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.htm
 
 
 expireSession
-publicvoidexpireSession(ZKWatchernodeZK,
+publicvoidexpireSession(ZKWatchernodeZK,
   booleancheckStatus)
throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 Expire a ZooKeeper session as recommended in ZooKeeper 
documentation
@@ -4617,7 +4617,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.htm
 
 
 getHBaseCluster
-publicMiniHBaseClustergetHBaseCluster()
+publicMiniHBaseClustergetHBaseCluster()
 Get the Mini HBase cluster.
 
 Returns:
@@ -4633,7 +4633,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.htm
 
 
 getHBaseClusterInterface
-publicHBaseClustergetHBaseClusterInterface()

[12/51] [partial] hbase-site git commit: Published site at .

2017-11-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a616706/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.html
index d309d87..1bddf29 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/ConnectionUtils.html
@@ -135,368 +135,369 @@
 127   * localhost if the invocation target 
is 'this' server; save on network and protobuf
 128   * invocations.
 129   */
-130  @VisibleForTesting // Class is visible 
so can assert we are short-circuiting when expected.
-131  public static class 
ShortCircuitingClusterConnection extends ConnectionImplementation {
-132private final ServerName 
serverName;
-133private final 
AdminService.BlockingInterface localHostAdmin;
-134private final 
ClientService.BlockingInterface localHostClient;
-135
-136private 
ShortCircuitingClusterConnection(Configuration conf, ExecutorService pool, User 
user,
-137ServerName serverName, 
AdminService.BlockingInterface admin,
-138ClientService.BlockingInterface 
client)
-139throws IOException {
-140  super(conf, pool, user);
-141  this.serverName = serverName;
-142  this.localHostAdmin = admin;
-143  this.localHostClient = client;
-144}
-145
-146@Override
-147public AdminService.BlockingInterface 
getAdmin(ServerName sn) throws IOException {
-148  return serverName.equals(sn) ? 
this.localHostAdmin : super.getAdmin(sn);
-149}
-150
-151@Override
-152public 
ClientService.BlockingInterface getClient(ServerName sn) throws IOException {
-153  return serverName.equals(sn) ? 
this.localHostClient : super.getClient(sn);
-154}
-155
-156@Override
-157public MasterKeepAliveConnection 
getKeepAliveMasterService() throws MasterNotRunningException {
-158  if (this.localHostClient instanceof 
MasterService.BlockingInterface) {
-159return new 
ShortCircuitMasterConnection((MasterService.BlockingInterface)this.localHostClient);
-160  }
-161  return 
super.getKeepAliveMasterService();
-162}
-163  }
-164
-165  /**
-166   * Creates a short-circuit connection 
that can bypass the RPC layer (serialization,
-167   * deserialization, networking, etc..) 
when talking to a local server.
-168   * @param conf the current 
configuration
-169   * @param pool the thread pool to use 
for batch operations
-170   * @param user the user the connection 
is for
-171   * @param serverName the local server 
name
-172   * @param admin the admin interface of 
the local server
-173   * @param client the client interface 
of the local server
-174   * @return an short-circuit 
connection.
-175   * @throws IOException if IO failure 
occurred
-176   */
-177  public static ClusterConnection 
createShortCircuitConnection(final Configuration conf,
-178  ExecutorService pool, User user, 
final ServerName serverName,
-179  final 
AdminService.BlockingInterface admin, final ClientService.BlockingInterface 
client)
-180  throws IOException {
-181if (user == null) {
-182  user = 
UserProvider.instantiate(conf).getCurrent();
-183}
-184return new 
ShortCircuitingClusterConnection(conf, pool, user, serverName, admin, 
client);
-185  }
-186
-187  /**
-188   * Setup the connection class, so that 
it will not depend on master being online. Used for testing
-189   * @param conf configuration to set
-190   */
-191  @VisibleForTesting
-192  public static void 
setupMasterlessConnection(Configuration conf) {
-193
conf.set(ClusterConnection.HBASE_CLIENT_CONNECTION_IMPL, 
MasterlessConnection.class.getName());
-194  }
-195
-196  /**
-197   * Some tests shut down the master. But 
table availability is a master RPC which is performed on
-198   * region re-lookups.
-199   */
-200  static class MasterlessConnection 
extends ConnectionImplementation {
-201MasterlessConnection(Configuration 
conf, ExecutorService pool, User user) throws IOException {
-202  super(conf, pool, user);
-203}
-204
-205@Override
-206public boolean 
isTableDisabled(TableName tableName) throws IOException {
-207  // treat all tables as enabled
-208  return false;
-209}
-210  }
-211
-212  /**
-213   * Return retires + 1. The returned 
value will be in range [1, Integer.MAX_VALUE].
-214   */
-215  static int retries2Attempts(int 
retries) {
-216return Math.max(1, retries == 
Integer.MAX_VALUE ? Integer.MAX_VALUE : retries + 1);
-217  }
-218
-219  /**
-220   * Get a unique key for the rpc stub to 
the given server.
-221   */
-222  static String getStubKey(String 
serviceName, ServerName serverName, boolean hostnameCanChange) {
-223// Sometimes, servers go down and 
they come back up with the same hostname but a 

[12/51] [partial] hbase-site git commit: Published site at .

2017-11-18 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/48aaec11/devapidocs/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.html 
b/devapidocs/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.html
deleted file mode 100644
index 4a5c3df..000
--- a/devapidocs/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.html
+++ /dev/null
@@ -1,1141 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-
-ZooKeeperWatcher (Apache HBase 3.0.0-SNAPSHOT API)
-
-
-
-
-
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":9,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10};
-var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
-var altColor = "altColor";
-var rowColor = "rowColor";
-var tableTab = "tableTab";
-var activeTableTab = "activeTableTab";
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-PrevClass
-NextClass
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-Summary:
-Nested|
-Field|
-Constr|
-Method
-
-
-Detail:
-Field|
-Constr|
-Method
-
-
-
-
-
-
-
-
-org.apache.hadoop.hbase.zookeeper
-Class ZooKeeperWatcher
-
-
-
-http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
-
-
-org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher
-
-
-
-
-
-
-
-All Implemented Interfaces:
-http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable, http://docs.oracle.com/javase/8/docs/api/java/lang/AutoCloseable.html?is-external=true;
 title="class or interface in java.lang">AutoCloseable, Abortable, org.apache.zookeeper.Watcher
-
-
-
-@InterfaceAudience.Private
-public class ZooKeeperWatcher
-extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-implements org.apache.zookeeper.Watcher, Abortable, http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable
-Acts as the single ZooKeeper Watcher.  One instance of this 
is instantiated
- for each Master, RegionServer, and client process.
-
- This is the only class that implements Watcher.  Other 
internal
- classes which need to be notified of ZooKeeper events must register with
- the local instance of this watcher via registerListener(org.apache.hadoop.hbase.zookeeper.ZooKeeperListener).
-
- This class also holds and manages the connection to ZooKeeper.  Code to
- deal with connection related events and exceptions are handled here.
-
-
-
-
-
-
-
-
-
-
-
-Nested Class Summary
-
-
-
-
-Nested classes/interfaces inherited from 
interfaceorg.apache.zookeeper.Watcher
-org.apache.zookeeper.Watcher.Event
-
-
-
-
-
-
-
-
-Field Summary
-
-Fields
-
-Modifier and Type
-Field and Description
-
-
-protected Abortable
-abortable
-
-
-private boolean
-aborted
-
-
-private 
org.apache.hadoop.conf.Configuration
-conf
-
-
-private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-identifier
-
-
-private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListZooKeeperListener
-listeners
-
-
-private static 
org.apache.commons.logging.Log
-LOG
-
-
-private static http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern
-NAME_PATTERN
-
-
-private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-prefix
-
-
-private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-quorum
-
-
-private RecoverableZooKeeper
-recoverableZooKeeper
-
-
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CountDownLatch.html?is-external=true;
 title="class or interface in 
java.util.concurrent">CountDownLatch
-saslLatch
-
-
-ZNodePaths
-znodePaths
-
-
-
-
-
-
-
-
-
-Constructor Summary
-

[12/51] [partial] hbase-site git commit: Published site at .

2017-11-17 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e60b829c/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
index b5293d0..02e4554 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.MergeTableRegionsFuture.html
@@ -38,11 +38,11 @@
 030import java.util.Collection;
 031import java.util.EnumSet;
 032import java.util.HashMap;
-033import java.util.LinkedList;
-034import java.util.List;
-035import java.util.Map;
-036import java.util.Set;
-037import java.util.TreeMap;
+033import java.util.Iterator;
+034import java.util.LinkedList;
+035import java.util.List;
+036import java.util.Map;
+037import java.util.Set;
 038import java.util.concurrent.Callable;
 039import 
java.util.concurrent.ExecutionException;
 040import java.util.concurrent.Future;
@@ -233,3930 +233,3945 @@
 225public class HBaseAdmin implements Admin 
{
 226  private static final Log LOG = 
LogFactory.getLog(HBaseAdmin.class);
 227
-228  private static final String 
ZK_IDENTIFIER_PREFIX =  "hbase-admin-on-";
+228  private ClusterConnection connection;
 229
-230  private ClusterConnection connection;
-231
-232  private volatile Configuration conf;
-233  private final long pause;
-234  private final int numRetries;
-235  private final int syncWaitTimeout;
-236  private boolean aborted;
-237  private int operationTimeout;
-238  private int rpcTimeout;
-239
-240  private RpcRetryingCallerFactory 
rpcCallerFactory;
-241  private RpcControllerFactory 
rpcControllerFactory;
+230  private volatile Configuration conf;
+231  private final long pause;
+232  private final int numRetries;
+233  private final int syncWaitTimeout;
+234  private boolean aborted;
+235  private int operationTimeout;
+236  private int rpcTimeout;
+237
+238  private RpcRetryingCallerFactory 
rpcCallerFactory;
+239  private RpcControllerFactory 
rpcControllerFactory;
+240
+241  private NonceGenerator ng;
 242
-243  private NonceGenerator ng;
-244
-245  @Override
-246  public int getOperationTimeout() {
-247return operationTimeout;
-248  }
-249
-250  HBaseAdmin(ClusterConnection 
connection) throws IOException {
-251this.conf = 
connection.getConfiguration();
-252this.connection = connection;
-253
-254// TODO: receive 
ConnectionConfiguration here rather than re-parsing these configs every time.
-255this.pause = 
this.conf.getLong(HConstants.HBASE_CLIENT_PAUSE,
-256
HConstants.DEFAULT_HBASE_CLIENT_PAUSE);
-257this.numRetries = 
this.conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,
-258
HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);
-259this.operationTimeout = 
this.conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT,
-260
HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT);
-261this.rpcTimeout = 
this.conf.getInt(HConstants.HBASE_RPC_TIMEOUT_KEY,
-262
HConstants.DEFAULT_HBASE_RPC_TIMEOUT);
-263this.syncWaitTimeout = 
this.conf.getInt(
-264  
"hbase.client.sync.wait.timeout.msec", 10 * 6); // 10min
-265
-266this.rpcCallerFactory = 
connection.getRpcRetryingCallerFactory();
-267this.rpcControllerFactory = 
connection.getRpcControllerFactory();
-268
-269this.ng = 
this.connection.getNonceGenerator();
-270  }
-271
-272  @Override
-273  public void abort(String why, Throwable 
e) {
-274// Currently does nothing but throw 
the passed message and exception
-275this.aborted = true;
-276throw new RuntimeException(why, e);
-277  }
-278
-279  @Override
-280  public boolean isAborted() {
-281return this.aborted;
-282  }
-283
-284  @Override
-285  public boolean abortProcedure(final 
long procId, final boolean mayInterruptIfRunning)
-286  throws IOException {
-287return 
get(abortProcedureAsync(procId, mayInterruptIfRunning), this.syncWaitTimeout,
-288  TimeUnit.MILLISECONDS);
-289  }
-290
-291  @Override
-292  public FutureBoolean 
abortProcedureAsync(final long procId, final boolean mayInterruptIfRunning)
-293  throws IOException {
-294Boolean abortProcResponse =
-295executeCallable(new 
MasterCallableAbortProcedureResponse(getConnection(),
-296getRpcControllerFactory()) 
{
-297  @Override
-298  protected AbortProcedureResponse 
rpcCall() throws Exception {
-299AbortProcedureRequest 
abortProcRequest =
-300
AbortProcedureRequest.newBuilder().setProcId(procId).build();
-301return 
master.abortProcedure(getRpcController(), abortProcRequest);
-302  }
-303}).getIsProcedureAborted();
-304return new AbortProcedureFuture(this, 
procId, abortProcResponse);
-305  }
-306
-307  @Override

[12/51] [partial] hbase-site git commit: Published site at .

2017-11-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/67a6e2ec/devapidocs/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.LocateRequest.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.LocateRequest.html
 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.LocateRequest.html
index e67bd67..5e6f8d5 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.LocateRequest.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.LocateRequest.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static final class AsyncNonMetaRegionLocator.LocateRequest
+private static final class AsyncNonMetaRegionLocator.LocateRequest
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 
 
@@ -210,7 +210,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 row
-public finalbyte[] row
+public finalbyte[] row
 
 
 
@@ -219,7 +219,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 locateType
-public finalRegionLocateType locateType
+public finalRegionLocateType locateType
 
 
 
@@ -236,7 +236,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 LocateRequest
-publicLocateRequest(byte[]row,
+publicLocateRequest(byte[]row,
  RegionLocateTypelocateType)
 
 
@@ -254,7 +254,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 hashCode
-publicinthashCode()
+publicinthashCode()
 
 Overrides:
 http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCodein 
classhttp://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
@@ -267,7 +267,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 equals
-publicbooleanequals(http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+publicbooleanequals(http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
 
 Overrides:
 http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equalsin 
classhttp://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/67a6e2ec/devapidocs/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.TableCache.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.TableCache.html
 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.TableCache.html
index 5fff896..c6385a2 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.TableCache.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.TableCache.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static final class AsyncNonMetaRegionLocator.TableCache
+private static final class AsyncNonMetaRegionLocator.TableCache
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 
 
@@ -233,7 +233,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 cache
-public finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentNavigableMap.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ConcurrentNavigableMapbyte[],HRegionLocation cache
+public finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentNavigableMap.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ConcurrentNavigableMapbyte[],HRegionLocation cache
 
 
 
@@ -242,7 +242,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 pendingRequests
-public finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetAsyncNonMetaRegionLocator.LocateRequest 
pendingRequests
+public finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetAsyncNonMetaRegionLocator.LocateRequest 
pendingRequests
 
 
 
@@ -251,7 +251,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 allRequests
-public finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in 

[12/51] [partial] hbase-site git commit: Published site at .

2017-11-15 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/cba900e4/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
index 51f1a63..2cfe6f1 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
@@ -151,111 +151,111 @@
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterCell(Cellcell)
+ValueFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterCell(Cellc)
+SkipFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-RowFilter.filterCell(Cellv)
+FilterListBase.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FuzzyRowFilter.filterCell(Cellc)
+FamilyFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-Filter.filterCell(Cellc)
-A way to filter based on the column family, column 
qualifier and/or the column value.
-
+ColumnPrefixFilter.filterCell(Cellcell)
 
 
 Filter.ReturnCode
-RandomRowFilter.filterCell(Cellc)
+PageFilter.filterCell(Cellignored)
 
 
 Filter.ReturnCode
-FirstKeyOnlyFilter.filterCell(Cellc)
+RowFilter.filterCell(Cellv)
 
 
 Filter.ReturnCode
-SkipFilter.filterCell(Cellc)
+ColumnRangeFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-TimestampsFilter.filterCell(Cellc)
+ColumnCountGetFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-ValueFilter.filterCell(Cellc)
+MultipleColumnPrefixFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterCell(Cellignored)
+ColumnPaginationFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FamilyFilter.filterCell(Cellc)
+DependentColumnFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-QualifierFilter.filterCell(Cellc)
+InclusiveStopFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FilterList.filterCell(Cellc)
+KeyOnlyFilter.filterCell(Cellignored)
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterCell(Cellc)
+MultiRowRangeFilter.filterCell(Cellignored)
 
 
 Filter.ReturnCode
-ColumnPaginationFilter.filterCell(Cellc)
+Filter.filterCell(Cellc)
+A way to filter based on the column family, column 
qualifier and/or the column value.
+
 
 
 Filter.ReturnCode
-WhileMatchFilter.filterCell(Cellc)
+FirstKeyOnlyFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-MultiRowRangeFilter.filterCell(Cellignored)
+WhileMatchFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-PrefixFilter.filterCell(Cellc)
+FirstKeyValueMatchingQualifiersFilter.filterCell(Cellc)
+Deprecated.
+
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterCell(Cellc)
+TimestampsFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FirstKeyValueMatchingQualifiersFilter.filterCell(Cellc)
-Deprecated.
-
+FuzzyRowFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-PageFilter.filterCell(Cellignored)
+FilterList.filterCell(Cellc)
 
 
 Filter.ReturnCode
-FilterListBase.filterCell(Cellc)
+RandomRowFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-InclusiveStopFilter.filterCell(Cellc)
+PrefixFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterCell(Cellc)
+SingleColumnValueFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
-SingleColumnValueFilter.filterCell(Cellc)
+QualifierFilter.filterCell(Cellc)
 
 
 Filter.ReturnCode
@@ -271,158 +271,158 @@
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterKeyValue(Cellc)
+ValueFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterKeyValue(Cellc)
+SkipFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-RowFilter.filterKeyValue(Cellc)
-Deprecated.
-
+FilterListBase.filterKeyValue(Cellc)
 
 
 Filter.ReturnCode
-FuzzyRowFilter.filterKeyValue(Cellc)
+FamilyFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-Filter.filterKeyValue(Cellc)
-Deprecated.
-As of release 2.0.0, this 
will be removed in HBase 3.0.0.
- Instead use filterCell(Cell)
-
+ColumnPrefixFilter.filterKeyValue(Cellc)
+Deprecated.
 
 
 
 Filter.ReturnCode
-RandomRowFilter.filterKeyValue(Cellc)
+PageFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-FirstKeyOnlyFilter.filterKeyValue(Cellc)
+RowFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-SkipFilter.filterKeyValue(Cellc)
+ColumnRangeFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-TimestampsFilter.filterKeyValue(Cellc)
+ColumnCountGetFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-ValueFilter.filterKeyValue(Cellc)
+MultipleColumnPrefixFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterKeyValue(Cellignored)
+ColumnPaginationFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-FamilyFilter.filterKeyValue(Cellc)
+DependentColumnFilter.filterKeyValue(Cellc)
 Deprecated.
 
 
 
 Filter.ReturnCode
-QualifierFilter.filterKeyValue(Cellc)
+InclusiveStopFilter.filterKeyValue(Cellc)
 

[12/51] [partial] hbase-site git commit: Published site at .

2017-11-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6607d33c/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html
index d9fb34d..4e9ad44 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html
@@ -283,27 +283,27 @@ service.
 
 
 private Scan
-ScannerCallableWithReplicas.scan
+AsyncScanSingleRegionRpcRetryingCaller.scan
 
 
 protected Scan
-ClientScanner.scan
+ScannerCallable.scan
 
 
 private Scan
-AsyncClientScanner.scan
+ScannerCallableWithReplicas.scan
 
 
-private Scan
-AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.scan
+protected Scan
+ClientScanner.scan
 
 
 private Scan
-AsyncScanSingleRegionRpcRetryingCaller.scan
+AsyncClientScanner.scan
 
 
-protected Scan
-ScannerCallable.scan
+private Scan
+AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.scan
 
 
 private Scan
@@ -339,11 +339,11 @@ service.
 
 
 protected Scan
-ClientScanner.getScan()
+ScannerCallable.getScan()
 
 
 protected Scan
-ScannerCallable.getScan()
+ClientScanner.getScan()
 
 
 Scan
@@ -638,8 +638,8 @@ service.
 
 
 ResultScanner
-AsyncTable.getScanner(Scanscan)
-Returns a scanner on the current table as specified by the 
Scan 
object.
+HTable.getScanner(Scanscan)
+The underlying HTable must 
not be closed.
 
 
 
@@ -655,8 +655,8 @@ service.
 
 
 ResultScanner
-HTable.getScanner(Scanscan)
-The underlying HTable must 
not be closed.
+AsyncTable.getScanner(Scanscan)
+Returns a scanner on the current table as specified by the 
Scan 
object.
 
 
 
@@ -689,16 +689,16 @@ service.
 
 
 void
+AsyncTableImpl.scan(Scanscan,
+ScanResultConsumerconsumer)
+
+
+void
 AsyncTable.scan(Scanscan,
 ScanResultConsumerconsumer)
 The scan API uses the observer pattern.
 
 
-
-void
-AsyncTableImpl.scan(Scanscan,
-ScanResultConsumerconsumer)
-
 
 private void
 AsyncTableImpl.scan0(Scanscan,
@@ -706,11 +706,11 @@ service.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListResult
-AsyncTableImpl.scanAll(Scanscan)
+RawAsyncTableImpl.scanAll(Scanscan)
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListResult
-RawAsyncTableImpl.scanAll(Scanscan)
+AsyncTableImpl.scanAll(Scanscan)
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListResult
@@ -1311,17 +1311,17 @@ service.
 
 
 private Scan
-TableInputFormatBase.scan
-Holds the details for the internal scanner.
-
+TableSnapshotInputFormatImpl.RecordReader.scan
 
 
 private Scan
-TableSnapshotInputFormatImpl.RecordReader.scan
+TableRecordReaderImpl.scan
 
 
 private Scan
-TableRecordReaderImpl.scan
+TableInputFormatBase.scan
+Holds the details for the internal scanner.
+
 
 
 
@@ -1371,14 +1371,14 @@ service.
 
 
 Scan
-TableInputFormatBase.getScan()
-Gets the scan defining the actual details like columns 
etc.
+TableSplit.getScan()
+Returns a Scan object from the stored string 
representation.
 
 
 
 Scan
-TableSplit.getScan()
-Returns a Scan object from the stored string 
representation.
+TableInputFormatBase.getScan()
+Gets the scan defining the actual details like columns 
etc.
 
 
 
@@ -1624,13 +1624,13 @@ service.
 
 
 void
-TableInputFormatBase.setScan(Scanscan)
+TableRecordReaderImpl.setScan(Scanscan)
 Sets the scan defining the actual details like columns 
etc.
 
 
 
 void
-TableRecordReaderImpl.setScan(Scanscan)
+TableInputFormatBase.setScan(Scanscan)
 Sets the scan defining the actual details like columns 
etc.
 
 
@@ -1697,12 +1697,6 @@ service.
 
 
 
-static void
-MultiTableSnapshotInputFormat.setInput(org.apache.hadoop.conf.Configurationconfiguration,
-http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionScansnapshotScans,
-org.apache.hadoop.fs.PathtmpRestoreDir)
-
-
 void
 

[12/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.NotSeekedException.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.NotSeekedException.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.NotSeekedException.html
index a89df18..ea0bc8c 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.NotSeekedException.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.NotSeekedException.html
@@ -49,26 +49,26 @@
 041import 
org.apache.hadoop.hbase.ByteBufferKeyValue;
 042import 
org.apache.hadoop.hbase.SizeCachedKeyValue;
 043import 
org.apache.hadoop.hbase.SizeCachedNoTagsKeyValue;
-044import 
org.apache.yetus.audience.InterfaceAudience;
-045import 
org.apache.hadoop.hbase.fs.HFileSystem;
-046import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
-047import 
org.apache.hadoop.hbase.io.compress.Compression;
-048import 
org.apache.hadoop.hbase.io.crypto.Cipher;
-049import 
org.apache.hadoop.hbase.io.crypto.Encryption;
-050import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
-051import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-052import 
org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
-053import 
org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
-054import 
org.apache.hadoop.hbase.nio.ByteBuff;
-055import 
org.apache.hadoop.hbase.regionserver.KeyValueScanner;
-056import 
org.apache.hadoop.hbase.security.EncryptionUtil;
-057import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-058import 
org.apache.hadoop.hbase.util.Bytes;
-059import 
org.apache.hadoop.hbase.util.IdLock;
-060import 
org.apache.hadoop.hbase.util.ObjectIntPair;
-061import 
org.apache.hadoop.io.WritableUtils;
-062import org.apache.htrace.Trace;
-063import org.apache.htrace.TraceScope;
+044import 
org.apache.hadoop.hbase.trace.TraceUtil;
+045import 
org.apache.yetus.audience.InterfaceAudience;
+046import 
org.apache.hadoop.hbase.fs.HFileSystem;
+047import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
+048import 
org.apache.hadoop.hbase.io.compress.Compression;
+049import 
org.apache.hadoop.hbase.io.crypto.Cipher;
+050import 
org.apache.hadoop.hbase.io.crypto.Encryption;
+051import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
+052import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+053import 
org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
+054import 
org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
+055import 
org.apache.hadoop.hbase.nio.ByteBuff;
+056import 
org.apache.hadoop.hbase.regionserver.KeyValueScanner;
+057import 
org.apache.hadoop.hbase.security.EncryptionUtil;
+058import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+059import 
org.apache.hadoop.hbase.util.Bytes;
+060import 
org.apache.hadoop.hbase.util.IdLock;
+061import 
org.apache.hadoop.hbase.util.ObjectIntPair;
+062import 
org.apache.hadoop.io.WritableUtils;
+063import 
org.apache.htrace.core.TraceScope;
 064
 065import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 066
@@ -263,1235 +263,1235 @@
 255// Prefetch file blocks upon open if 
requested
 256if (cacheConf.shouldPrefetchOnOpen()) 
{
 257  PrefetchExecutor.request(path, new 
Runnable() {
-258public void run() {
-259  long offset = 0;
-260  long end = 0;
-261  try {
-262end = 
getTrailer().getLoadOnOpenDataOffset();
-263if (LOG.isTraceEnabled()) {
-264  LOG.trace("Prefetch start " 
+ getPathOffsetEndStr(path, offset, end));
-265}
-266// TODO: Could we use block 
iterator in here? Would that get stuff into the cache?
-267HFileBlock prevBlock = 
null;
-268while (offset  end) {
-269  if (Thread.interrupted()) 
{
-270break;
-271  }
-272  // Perhaps we got our block 
from cache? Unlikely as this may be, if it happens, then
-273  // the 
internal-to-hfileblock thread local which holds the overread that gets the
-274  // next header, will not 
have happened...so, pass in the onDiskSize gotten from the
-275  // cached block. This 
'optimization' triggers extremely rarely I'd say.
-276  long onDiskSize = prevBlock 
!= null? prevBlock.getNextBlockOnDiskSize(): -1;
-277  HFileBlock block = 
readBlock(offset, onDiskSize, true, false, false, false,
-278  null, null);
-279  // Need not update the 
current block. Ideally here the readBlock won't find the
-280  // block in cache. We call 
this readBlock so that block data is read from FS and
-281  // cached in BC. So there 
is no reference count increment that happens here.
-282  // The return will ideally 

[12/51] [partial] hbase-site git commit: Published site at .

2017-11-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/809180c4/devapidocs/src-html/org/apache/hadoop/hbase/backup/example/HFileArchiveManager.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/example/HFileArchiveManager.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/example/HFileArchiveManager.html
index 0ac7e72..4186401 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/example/HFileArchiveManager.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/example/HFileArchiveManager.html
@@ -35,146 +35,147 @@
 027import 
org.apache.hadoop.hbase.client.Connection;
 028import 
org.apache.hadoop.hbase.util.Bytes;
 029import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-030import 
org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
-031import 
org.apache.zookeeper.KeeperException;
-032
-033/**
-034 * Client-side manager for which table's 
hfiles should be preserved for long-term archive.
-035 * @see ZKTableArchiveClient
-036 * @see HFileArchiveTableMonitor
-037 * @see LongTermArchivingHFileCleaner
-038 */
-039@InterfaceAudience.Private
-040class HFileArchiveManager {
-041
-042  private final String archiveZnode;
-043  private static final Log LOG = 
LogFactory.getLog(HFileArchiveManager.class);
-044  private final ZooKeeperWatcher 
zooKeeper;
-045  private volatile boolean stopped = 
false;
-046
-047  public HFileArchiveManager(Connection 
connection, Configuration conf)
-048  throws 
ZooKeeperConnectionException, IOException {
-049this.zooKeeper = new 
ZooKeeperWatcher(conf, "hfileArchiveManager-on-" + connection.toString(),
-050connection);
-051this.archiveZnode = 
ZKTableArchiveClient.getArchiveZNode(this.zooKeeper.getConfiguration(),
-052  this.zooKeeper);
-053  }
-054
-055  /**
-056   * Turn on auto-backups of HFiles on 
the specified table.
-057   * p
-058   * When HFiles would be deleted from 
the hfile archive, they are instead preserved.
-059   * @param table name of the table for 
which to preserve hfiles.
-060   * @return ttthis/tt 
for chaining.
-061   * @throws KeeperException if we can't 
reach zookeeper to update the hfile cleaner.
-062   */
-063  public HFileArchiveManager 
enableHFileBackup(byte[] table) throws KeeperException {
-064enable(this.zooKeeper, table);
-065return this;
-066  }
-067
-068  /**
-069   * Stop retaining HFiles for the given 
table in the archive. HFiles will be cleaned up on the next
-070   * pass of the {@link 
org.apache.hadoop.hbase.master.cleaner.HFileCleaner}, if the HFiles are 
retained by another
-071   * cleaner.
-072   * @param table name of the table for 
which to disable hfile retention.
-073   * @return ttthis/tt 
for chaining.
-074   * @throws KeeperException if if we 
can't reach zookeeper to update the hfile cleaner.
-075   */
-076  public HFileArchiveManager 
disableHFileBackup(byte[] table) throws KeeperException {
-077  disable(this.zooKeeper, table);
-078return this;
-079  }
-080
-081  /**
-082   * Disable long-term archival of all 
hfiles for all tables in the cluster.
-083   * @return ttthis/tt 
for chaining.
-084   * @throws IOException if the number of 
attempts is exceeded
-085   */
-086  public HFileArchiveManager 
disableHFileBackup() throws IOException {
-087LOG.debug("Disabling backups on all 
tables.");
-088try {
-089  
ZKUtil.deleteNodeRecursively(this.zooKeeper, archiveZnode);
-090  return this;
-091} catch (KeeperException e) {
-092  throw new IOException("Unexpected 
ZK exception!", e);
-093}
-094  }
-095
-096  /**
-097   * Perform a best effort enable of 
hfile retention, which relies on zookeeper communicating the //
-098   * * change back to the hfile 
cleaner.
-099   * p
-100   * No attempt is made to make sure that 
backups are successfully created - it is inherently an
-101   * basynchronous 
operation/b.
-102   * @param zooKeeper watcher connection 
to zk cluster
-103   * @param table table name on which to 
enable archiving
-104   * @throws KeeperException
-105   */
-106  private void enable(ZooKeeperWatcher 
zooKeeper, byte[] table)
-107  throws KeeperException {
-108LOG.debug("Ensuring archiving znode 
exists");
-109ZKUtil.createAndFailSilent(zooKeeper, 
archiveZnode);
-110
-111// then add the table to the list of 
znodes to archive
-112String tableNode = 
this.getTableNode(table);
-113LOG.debug("Creating: " + tableNode + 
", data: []");
-114ZKUtil.createSetData(zooKeeper, 
tableNode, new byte[0]);
-115  }
-116
-117  /**
-118   * Disable all archiving of files for a 
given table
-119   * p
-120   * Inherently an basynchronous 
operation/b.
-121   * @param zooKeeper watcher for the ZK 
cluster
-122   * @param table name of the table to 
disable
-123   * @throws KeeperException if an 
unexpected ZK connection issues occurs
-124   */
-125  private void disable(ZooKeeperWatcher 
zooKeeper, byte[] table) throws 

[12/51] [partial] hbase-site git commit: Published site at .

2017-11-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a108018f/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
index 5c5e0eb..7b41db2 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
@@ -2960,5416 +2960,5447 @@
 2952protected final 
ObservedExceptionsInBatch observedExceptions;
 2953//Durability of the batch (highest 
durability of all operations)
 2954protected Durability durability;
-2955
-2956public BatchOperation(final HRegion 
region, T[] operations) {
-2957  this.operations = operations;
-2958  this.retCodeDetails = new 
OperationStatus[operations.length];
-2959  Arrays.fill(this.retCodeDetails, 
OperationStatus.NOT_RUN);
-2960  this.walEditsFromCoprocessors = 
new WALEdit[operations.length];
-2961  familyCellMaps = new 
Map[operations.length];
-2962
-2963  this.region = region;
-2964  observedExceptions = new 
ObservedExceptionsInBatch();
-2965  durability = 
Durability.USE_DEFAULT;
-2966}
-2967
-2968/**
-2969 * Visitor interface for batch 
operations
-2970 */
-2971@FunctionalInterface
-2972public interface Visitor {
-2973  /**
-2974   * @param index operation index
-2975   * @return If true continue 
visiting remaining entries, break otherwise
-2976   */
-2977  boolean visit(int index) throws 
IOException;
-2978}
-2979
-2980/**
-2981 * Helper method for visiting 
pending/ all batch operations
-2982 */
-2983public void 
visitBatchOperations(boolean pendingOnly, int lastIndexExclusive, Visitor 
visitor)
-2984throws IOException {
-2985  assert lastIndexExclusive = 
this.size();
-2986  for (int i = nextIndexToProcess; i 
 lastIndexExclusive; i++) {
-2987if (!pendingOnly || 
isOperationPending(i)) {
-2988  if (!visitor.visit(i)) {
-2989break;
-2990  }
-2991}
-2992  }
-2993}
-2994
-2995public abstract Mutation 
getMutation(int index);
-2996public abstract long 
getNonceGroup(int index);
-2997public abstract long getNonce(int 
index);
-2998/** This method is potentially 
expensive and useful mostly for non-replay CP path. */
-2999public abstract Mutation[] 
getMutationsForCoprocs();
-3000public abstract boolean 
isInReplay();
-3001public abstract long 
getOrigLogSeqNum();
-3002public abstract void 
startRegionOperation() throws IOException;
-3003public abstract void 
closeRegionOperation() throws IOException;
-3004
-3005/**
-3006 * Validates each mutation and 
prepares a batch for write. If necessary (non-replay case), runs
-3007 * CP prePut()/ preDelete() hooks 
for all mutations in a batch. This is intended to operate on
-3008 * entire batch and will be called 
from outside of class to check and prepare batch. This can
-3009 * be implemented by calling helper 
method {@link #checkAndPrepareMutation(int, long)} in a
-3010 * 'for' loop over mutations.
-3011 */
-3012public abstract void 
checkAndPrepare() throws IOException;
-3013
-3014/**
-3015 * Implement any Put request 
specific check and prepare logic here. Please refer to
-3016 * {@link 
#checkAndPrepareMutation(Mutation, long)} for how its used.
-3017 */
-3018protected abstract void 
checkAndPreparePut(final Put p) throws IOException;
-3019
-3020/**
-3021 *  If necessary, calls 
preBatchMutate() CP hook for a mini-batch and updates metrics, cell
-3022 *  count, tags and timestamp for 
all cells of all operations in a mini-batch.
-3023 */
-3024public abstract void 
prepareMiniBatchOperations(MiniBatchOperationInProgressMutation
-3025miniBatchOp, long timestamp, 
final ListRowLock acquiredRowLocks) throws IOException;
-3026
-3027/**
-3028 * Write mini-batch operations to 
MemStore
-3029 */
-3030public abstract WriteEntry 
writeMiniBatchOperationsToMemStore(
-3031final 
MiniBatchOperationInProgressMutation miniBatchOp, final WriteEntry 
writeEntry)
-3032throws IOException;
-3033
-3034protected void 
writeMiniBatchOperationsToMemStore(
-3035final 
MiniBatchOperationInProgressMutation miniBatchOp, final long 
writeNumber)
-3036throws IOException {
-3037  MemStoreSizing memStoreAccounting 
= new MemStoreSizing();
-3038  visitBatchOperations(true, 
miniBatchOp.getLastIndexExclusive(), (int index) - {
-3039// We need to update the 
sequence id for following reasons.
-3040// 1) If the op is in replay 
mode, FSWALEntry#stampRegionSequenceId won't stamp sequence 

[12/51] [partial] hbase-site git commit: Published site at .

2017-11-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2b3f2bee/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html 
b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
index a3e1347..8bfd8c6 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
@@ -459,7 +459,7 @@ implements HRegionServer
-cacheConfig,
 cacheFlusher,
 clusterConnection,
 clusterStatusTracker,
 compactSplitThread,
 conf,
 configurationManager,
 csm,
 executorService, fs,
 fsOk,
 fsUtilizationChore,
 hMemManager,
 infoServer,
 initLatch,
 leases,
 lock,
 MASTER_HOSTNAME_KEY
 , metaTableLocator,
 movedRegions,
 msgInterval,
 numRegionsToReport,
 onlineRegions,
 recoveringRegions,
 regionFavoredNodesMap,
 REGIONSERVER,
 regionsInTransitionInRS,
 replicationSinkHandler,
 replicationSourceHandler,
 rpcServices,
 secureBulkLoadManager,
 serverName,
 sleeper,
 startcode,
 tableDescriptors, TEST_SKIP_REPORTING_TRANSITION,
 threadWakeFrequency,
 useThisHostnameInstead,
 walFactory,
 walFs,
 walRoller,
 zooKeeper
+cacheConfig,
 cacheFlusher,
 clusterConnection,
 clusterStatusTracker,
 compactSplitThread,
 conf,
 configurationManager,
 csm,
 executorService, fs,
 fsOk,
 fsUtilizationChore,
 hMemManager,
 infoServer,
 initLatch,
 leases,
 lock,
 MASTER_HOSTNAME_KEY
 , metaTableLocator,
 movedRegions,
 msgInterval,
 numRegionsToReport,
 onlineRegions,
 regionFavoredNodesMap,
 REGIONSERVER,
 regionsInTransitionInRS,
 replicationSinkHandler,
 replicationSourceHandler,
 rpcServices,
 secureBulkLoadManager,
 serverName,
 sleeper,
 startcode,
 tableDescriptors,
 TEST_SKIP_REPORTING_TRANSITION, threadWakeFrequency,
 useThisHostnameInstead,
 walFactory,
 walFs,
 walRoller,
 zooKeeper
 
 
 
@@ -1369,7 +1369,7 @@ implements HRegionServer
-abort,
 addRegion,
 addToMovedRegions,
 checkFileSystem,
 cleanMovedRegions,
 closeAllRegions,
 closeAndOfflineRegionForSplitOrMerge, closeRegion,
 constructRegionServer,
 convertThrowableToIOE,
 createClusterConnection,
 createRegionLoad,
 createRegionServerStatusStub,
 createRegionServerStatusStub,
 execRegionServerService,
 getCacheConfig,
 getChoreService,
 getClusterConnection,
 getClusterId,
 getCompactionPress
 ure, getCompactionRequestor,
 getCompactSplitThread,
 getConfiguration,
 getConfigurationManager,
 getConnection,
 getCoordinatedStateManager,
 getEventLoopGroupConfig,
 getExecutorService, getFavoredNodesForRegion,
 getFileSystem,
 getFlushPressure,
 getFlushRequester,
 getFlushThroughputController,
 getFsTableDescriptors,
 getHeapMemoryManager,
 getInfoServer,
 getLastSequenceId,
 getLeases,
 getMasterAddressTracker,
 getMetaTableLocator,
 getMetrics,
 getMostLoadedRegions,
 getNonceManager,
 getNumberOfOnlineRegions,
 getOnlineRegion,
 getOnlineRegionsLocalContext,
 getOnlineTables,
 getRecoveringRegions,
 getRegion,
 getRegion,
 getRegionBlo
 ckLocations, getRegionByEncodedName,
 getRegionByEncodedName,
 getRegions,
 getRegions,
 getRegionServerAccounting,
 getRegionServerCoprocessorHost,
 getRegionServerCopr
 ocessors, getRegionServerMetrics,
 getRegionServerRpcQuotaManager,
 getRegionServerSpaceQuotaManager,
 getRegionsInTransitionInRS,
 getReplicationSourceService,
 getRootDir,
 getRpcServer,
 getRSRpcServices,
 getSecureBulkLoadManager,
 getStartcode,
 getThreadWakeFrequency,
 getWAL,
 getWALFileSystem,
 getWalRoller,
 getWALRootDir,
 getWALs, handleReportForDutyResponse,
 initializeMemStoreChunkCreator,
 isAborted,
 isOnline,
 isStopped,
 isStopping,
 kill, movedRegionCleanerPeriod,
 onConfigurationChange,
 postOpenDeployTasks,
 regionLock,
 removeRegion,
 reportRegionSizesForQuotas, reportRegionStateTransition,
 sendShutdownInterrupt,
 setInitLatch,
 setupClusterConnection,
 shouldUseThisHostnameInstead,
 stop,
 stop,
 toString,
 tryRegionServerReport,
 unassign,
 updateConfiguration,
 updateRegionFavoredNodesMapping,
 waitForServerOnline,
 walRollRequestFinished
+abort,
 addRegion,
 addToMovedRegions,
 checkFileSystem,
 cleanMovedRegions,
 clearRegionBlockCache,
 closeAllRegions, href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#closeAndOfflineRegionForSplitOrMerge-java.util.List-">closeAndOfflineRegionForSplitOrMerge,
 > href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#closeRegion-java.lang.String-boolean-org.apache.hadoop.hbase.ServerName-">closeRegion,
 > href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#constructRegionServer-java.lang.Class-org.apache.hadoop.conf.Configuration-">constructRegionServer,
 > href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#convertThrowableToIOE-java.lang.Throwable-java.lang.String-">convertThrowableToIOE,
 > href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createClusterConnection--">createClusterConnection,
 > 

[12/51] [partial] hbase-site git commit: Published site at .

2017-11-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2cef721c/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html
index aa5ad0d..30d80c0 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html
@@ -32,619 +32,617 @@
 024import java.util.PriorityQueue;
 025
 026import org.apache.hadoop.hbase.Cell;
-027import 
org.apache.hadoop.hbase.CellComparatorImpl;
-028import 
org.apache.hadoop.hbase.CellUtil;
-029import 
org.apache.hadoop.hbase.PrivateCellUtil;
-030import 
org.apache.yetus.audience.InterfaceAudience;
-031import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-032import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
-033import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
-034import 
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
-035import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair;
-036import 
org.apache.hadoop.hbase.util.Bytes;
-037import 
org.apache.hadoop.hbase.util.Pair;
-038import 
org.apache.hadoop.hbase.util.UnsafeAccess;
-039import 
org.apache.hadoop.hbase.util.UnsafeAvailChecker;
-040
-041import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-042
-043/**
-044 * This is optimized version of a 
standard FuzzyRowFilter Filters data based on fuzzy row key.
-045 * Performs fast-forwards during 
scanning. It takes pairs (row key, fuzzy info) to match row keys.
-046 * Where fuzzy info is a byte array with 
0 or 1 as its values:
-047 * ul
-048 * li0 - means that this byte in 
provided row key is fixed, i.e. row key's byte at same position
-049 * must match/li
-050 * li1 - means that this byte in 
provided row key is NOT fixed, i.e. row key's byte at this
-051 * position can be different from the one 
in provided row key/li
-052 * /ul
-053 * Example: Let's assume row key format 
is userId_actionId_year_month. Length of userId is fixed and
-054 * is 4, length of actionId is 2 and year 
and month are 4 and 2 bytes long respectively. Let's
-055 * assume that we need to fetch all users 
that performed certain action (encoded as "99") in Jan of
-056 * any year. Then the pair (row key, 
fuzzy info) would be the following: row key = "_99__01"
-057 * (one can use any value instead of "?") 
fuzzy info =
-058 * 
"\x01\x01\x01\x01\x00\x00\x00\x00\x01\x01\x01\x01\x00\x00\x00" I.e. fuzzy info 
tells the matching
-059 * mask is "_99__01", where at ? 
can be any value.
-060 */
-061@InterfaceAudience.Public
-062public class FuzzyRowFilter extends 
FilterBase {
-063  private static final boolean 
UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
-064  private ListPairbyte[], 
byte[] fuzzyKeysData;
-065  private boolean done = false;
-066
-067  /**
-068   * The index of a last successfully 
found matching fuzzy string (in fuzzyKeysData). We will start
-069   * matching next KV with this one. If 
they do not match then we will return back to the one-by-one
-070   * iteration over fuzzyKeysData.
-071   */
-072  private int lastFoundIndex = -1;
-073
-074  /**
-075   * Row tracker (keeps all next rows 
after SEEK_NEXT_USING_HINT was returned)
-076   */
-077  private RowTracker tracker;
-078
-079  public 
FuzzyRowFilter(ListPairbyte[], byte[] fuzzyKeysData) {
-080ListPairbyte[], 
byte[] fuzzyKeyDataCopy = new 
ArrayList(fuzzyKeysData.size());
-081
-082for (Pairbyte[], byte[] 
aFuzzyKeysData : fuzzyKeysData) {
-083  if 
(aFuzzyKeysData.getFirst().length != aFuzzyKeysData.getSecond().length) {
-084PairString, String 
readable =
-085  new 
Pair(Bytes.toStringBinary(aFuzzyKeysData.getFirst()), 
Bytes.toStringBinary(aFuzzyKeysData.getSecond()));
-086throw new 
IllegalArgumentException("Fuzzy pair lengths do not match: " + readable);
-087  }
-088
-089  Pairbyte[], byte[] p = new 
Pair();
-090  // create a copy of pair bytes so 
that they are not modified by the filter.
-091  
p.setFirst(Arrays.copyOf(aFuzzyKeysData.getFirst(), 
aFuzzyKeysData.getFirst().length));
-092  
p.setSecond(Arrays.copyOf(aFuzzyKeysData.getSecond(), 
aFuzzyKeysData.getSecond().length));
-093
-094  // update mask ( 0 - -1 (0xff), 
1 - 2)
-095  
p.setSecond(preprocessMask(p.getSecond()));
-096  preprocessSearchKey(p);
-097
-098  fuzzyKeyDataCopy.add(p);
-099}
-100this.fuzzyKeysData = 
fuzzyKeyDataCopy;
-101this.tracker = new RowTracker();
-102  }
+027import 
org.apache.hadoop.hbase.CellComparator;
+028import 
org.apache.hadoop.hbase.PrivateCellUtil;
+029import 
org.apache.yetus.audience.InterfaceAudience;
+030import 

[12/51] [partial] hbase-site git commit: Published site at .

2017-11-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/32453e2d/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TruncateTableProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TruncateTableProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TruncateTableProcedureBiConsumer.html
index 531081e..a22e5ce 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TruncateTableProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TruncateTableProcedureBiConsumer.html
@@ -34,2832 +34,3011 @@
 026import java.util.Collections;
 027import java.util.EnumSet;
 028import java.util.HashMap;
-029import java.util.LinkedList;
-030import java.util.List;
-031import java.util.Map;
-032import java.util.Optional;
-033import java.util.Set;
-034import 
java.util.concurrent.CompletableFuture;
-035import java.util.concurrent.TimeUnit;
-036import 
java.util.concurrent.atomic.AtomicReference;
-037import java.util.function.BiConsumer;
-038import java.util.function.Function;
-039import java.util.regex.Pattern;
-040import java.util.stream.Collectors;
-041import java.util.stream.Stream;
-042
-043import org.apache.commons.io.IOUtils;
-044import org.apache.commons.logging.Log;
-045import 
org.apache.commons.logging.LogFactory;
-046import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-047import 
org.apache.hadoop.hbase.ClusterStatus;
-048import 
org.apache.hadoop.hbase.ClusterStatus.Option;
-049import 
org.apache.hadoop.hbase.HConstants;
-050import 
org.apache.hadoop.hbase.HRegionLocation;
-051import 
org.apache.hadoop.hbase.MetaTableAccessor;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-053import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-054import 
org.apache.hadoop.hbase.RegionLoad;
-055import 
org.apache.hadoop.hbase.RegionLocations;
-056import 
org.apache.hadoop.hbase.ServerName;
-057import 
org.apache.hadoop.hbase.TableExistsException;
-058import 
org.apache.hadoop.hbase.TableName;
-059import 
org.apache.hadoop.hbase.TableNotDisabledException;
-060import 
org.apache.hadoop.hbase.TableNotEnabledException;
-061import 
org.apache.hadoop.hbase.TableNotFoundException;
-062import 
org.apache.hadoop.hbase.UnknownRegionException;
-063import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-064import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-065import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.ServerRequestCallerBuilder;
-066import 
org.apache.hadoop.hbase.client.RawAsyncTable.CoprocessorCallable;
-067import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-068import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-069import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-070import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
-071import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-072import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-073import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-074import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-075import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-076import 
org.apache.hadoop.hbase.replication.ReplicationException;
-077import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-078import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-079import 
org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
-080import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-081import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-082import 
org.apache.hadoop.hbase.util.Bytes;
-083import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-084import 
org.apache.hadoop.hbase.util.ForeignExceptionUtil;
-085import 
org.apache.hadoop.hbase.util.Pair;
-086import 
org.apache.yetus.audience.InterfaceAudience;
-087
-088import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-089import 
org.apache.hadoop.hbase.shaded.io.netty.util.Timeout;
-090import 
org.apache.hadoop.hbase.shaded.io.netty.util.TimerTask;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesRequest;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesResponse;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-098import 

[12/51] [partial] hbase-site git commit: Published site at .

2017-11-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/315ffef7/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/decode/package-use.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/decode/package-use.html 
b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/decode/package-use.html
deleted file mode 100644
index 1fc98d9..000
--- 
a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/decode/package-use.html
+++ /dev/null
@@ -1,208 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-
-Uses of Package org.apache.hadoop.hbase.codec.prefixtree.decode (Apache 
HBase 3.0.0-SNAPSHOT API)
-
-
-
-
-
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-
-
-
-Uses of 
Packageorg.apache.hadoop.hbase.codec.prefixtree.decode
-
-
-
-
-
-Packages that use org.apache.hadoop.hbase.codec.prefixtree.decode
-
-Package
-Description
-
-
-
-org.apache.hadoop.hbase.codec.prefixtree
-
-
-
-org.apache.hadoop.hbase.codec.prefixtree.decode
-
-
-
-
-
-
-
-
-
-Classes in org.apache.hadoop.hbase.codec.prefixtree.decode
 used by org.apache.hadoop.hbase.codec.prefixtree
-
-Class and Description
-
-
-
-PrefixTreeArraySearcher
-
- Searcher extends the capabilities of the Scanner + ReversibleScanner to add 
the ability to
- position itself on a requested Cell without scanning through cells before 
it.
-
-
-
-
-
-
-
-
-
-Classes in org.apache.hadoop.hbase.codec.prefixtree.decode
 used by org.apache.hadoop.hbase.codec.prefixtree.decode
-
-Class and Description
-
-
-
-ArraySearcherPool
-
- Pools PrefixTreeArraySearcher objects.
-
-
-
-PrefixTreeArrayReversibleScanner
-Methods for going backwards through a PrefixTree 
block.
-
-
-
-PrefixTreeArrayScanner
-Extends PtCell and manipulates its protected fields.
-
-
-
-PrefixTreeArraySearcher
-
- Searcher extends the capabilities of the Scanner + ReversibleScanner to add 
the ability to
- position itself on a requested Cell without scanning through cells before 
it.
-
-
-
-PrefixTreeCell
-As the PrefixTreeArrayScanner moves through the tree bytes, 
it changes the
- values in the fields of this class so that Cell logic can be applied, but
- without allocating new memory for every Cell iterated through.
-
-
-
-
-
-
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-
-
-Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
-
-

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/315ffef7/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/decode/row/RowNodeReader.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/decode/row/RowNodeReader.html
 
b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/decode/row/RowNodeReader.html
deleted file mode 100644
index d3097c4..000
--- 
a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/decode/row/RowNodeReader.html
+++ /dev/null
@@ -1,1002 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-
-RowNodeReader (Apache HBase 3.0.0-SNAPSHOT API)
-
-
-
-
-
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
-var altColor = "altColor";
-var rowColor = "rowColor";
-var tableTab = "tableTab";
-var activeTableTab = "activeTableTab";
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help

[12/51] [partial] hbase-site git commit: Published site at .

2017-11-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/src-html/org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.html
index cece735..cb909d9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.html
@@ -28,446 +28,458 @@
 020import static 
org.apache.hadoop.hbase.client.coprocessor.AggregationHelper.getParsedGenericInstance;
 021import static 
org.apache.hadoop.hbase.client.coprocessor.AggregationHelper.validateArgAndGetPB;
 022
-023import java.io.IOException;
-024import java.util.Map;
-025import java.util.NavigableMap;
-026import java.util.NavigableSet;
-027import 
java.util.NoSuchElementException;
-028import java.util.TreeMap;
-029import 
java.util.concurrent.CompletableFuture;
-030
-031import org.apache.hadoop.hbase.Cell;
-032import 
org.apache.hadoop.hbase.client.RawAsyncTable;
-033import 
org.apache.hadoop.hbase.client.RawAsyncTable.CoprocessorCallback;
-034import 
org.apache.hadoop.hbase.client.RawScanResultConsumer;
-035import 
org.apache.hadoop.hbase.client.RegionInfo;
-036import 
org.apache.hadoop.hbase.client.Result;
-037import 
org.apache.hadoop.hbase.client.Scan;
-038import 
org.apache.hadoop.hbase.coprocessor.ColumnInterpreter;
-039import 
org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest;
-040import 
org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse;
-041import 
org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateService;
-042import 
org.apache.hadoop.hbase.util.Bytes;
-043import 
org.apache.hadoop.hbase.util.ReflectionUtils;
-044import 
org.apache.yetus.audience.InterfaceAudience;
-045
-046import com.google.protobuf.Message;
-047
-048/**
-049 * This client class is for invoking the 
aggregate functions deployed on the Region Server side via
-050 * the AggregateService. This class will 
implement the supporting functionality for
-051 * summing/processing the individual 
results obtained from the AggregateService for each region.
-052 */
-053@InterfaceAudience.Public
-054public class AsyncAggregationClient {
-055
-056  private static abstract class 
AbstractAggregationCallbackT
-057  implements 
CoprocessorCallbackAggregateResponse {
-058
-059private final 
CompletableFutureT future;
-060
-061protected boolean finished = false;
-062
-063private void 
completeExceptionally(Throwable error) {
-064  if (finished) {
-065return;
-066  }
-067  finished = true;
-068  
future.completeExceptionally(error);
-069}
-070
-071protected 
AbstractAggregationCallback(CompletableFutureT future) {
-072  this.future = future;
-073}
-074
-075@Override
-076public synchronized void 
onRegionError(RegionInfo region, Throwable error) {
-077  completeExceptionally(error);
-078}
-079
-080@Override
-081public synchronized void 
onError(Throwable error) {
-082  completeExceptionally(error);
-083}
-084
-085protected abstract void 
aggregate(RegionInfo region, AggregateResponse resp)
-086throws IOException;
-087
-088@Override
-089public synchronized void 
onRegionComplete(RegionInfo region, AggregateResponse resp) {
-090  try {
-091aggregate(region, resp);
-092  } catch (IOException e) {
-093completeExceptionally(e);
-094  }
-095}
-096
-097protected abstract T 
getFinalResult();
-098
-099@Override
-100public synchronized void onComplete() 
{
-101  if (finished) {
-102return;
-103  }
-104  finished = true;
-105  
future.complete(getFinalResult());
-106}
-107  }
-108
-109  private static R, S, P extends 
Message, Q extends Message, T extends Message R
-110  
getCellValueFromProto(ColumnInterpreterR, S, P, Q, T ci, 
AggregateResponse resp,
-111  int firstPartIndex) throws 
IOException {
-112Q q = 
getParsedGenericInstance(ci.getClass(), 3, 
resp.getFirstPart(firstPartIndex));
-113return ci.getCellValueFromProto(q);
-114  }
-115
-116  private static R, S, P extends 
Message, Q extends Message, T extends Message S
-117  
getPromotedValueFromProto(ColumnInterpreterR, S, P, Q, T ci, 
AggregateResponse resp,
-118  int firstPartIndex) throws 
IOException {
-119T t = 
getParsedGenericInstance(ci.getClass(), 4, 
resp.getFirstPart(firstPartIndex));
-120return 
ci.getPromotedValueFromProto(t);
-121  }
-122
-123  public static R, S, P extends 
Message, Q extends Message, T extends Message CompletableFutureR
-124  max(RawAsyncTable table, 
ColumnInterpreterR, S, P, Q, T ci, Scan scan) {
-125

[12/51] [partial] hbase-site git commit: Published site at .

2017-11-01 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/93ae3fc9/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html 
b/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html
index e1232ef..057eb53 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html
@@ -735,7 +735,7 @@ implements 
-boolean
+void
 preBalanceSwitch(ObserverContextMasterCoprocessorEnvironmentc,
 booleannewValue)
 Called prior to modifying the flag used to enable/disable 
region balancing.
@@ -893,8 +893,7 @@ implements preDeleteNamespace(ObserverContextMasterCoprocessorEnvironmentctx,
   http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringnamespace)
 Called before HMaster 
deletes a
- namespace
- It can't bypass the default action, e.g., ctx.bypass() won't have 
effect.
+ namespace
 
 
 
@@ -1219,11 +1218,12 @@ implements 
-boolean
+void
 preSetSplitOrMergeEnabled(ObserverContextMasterCoprocessorEnvironmentctx,
  booleannewValue,
  MasterSwitchTypeswitchType)
-Called prior to setting split / merge switch
+Called prior to setting split / merge switch
+ Supports Coprocessor 'bypass'.
 
 
 
@@ -2260,8 +2260,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:MasterObserver
 Called before HMaster 
deletes a
- table.  Called as part of delete table RPC call.
- It can't bypass the default action, e.g., ctx.bypass() won't have 
effect.
+ table.  Called as part of delete table RPC call.
 
 Specified by:
 preDeleteTablein
 interfaceMasterObserver
@@ -2307,8 +2306,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:MasterObserver
 Called before HMaster 
truncates a
- table.  Called as part of truncate table RPC call.
- It can't bypass the default action, e.g., ctx.bypass() won't have 
effect.
+ table.  Called as part of truncate table RPC call.
 
 Specified by:
 preTruncateTablein
 interfaceMasterObserver
@@ -2357,8 +2355,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:MasterObserver
 Called prior to modifying a table's properties.  Called as 
part of modify
- table RPC call.
- It can't bypass the default action, e.g., ctx.bypass() won't have 
effect.
+ table RPC call.
 
 Specified by:
 preModifyTablein
 interfaceMasterObserver
@@ -2406,8 +2403,7 @@ implements TableNametableName)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:MasterObserver
-Called prior to enabling a table.  Called as part of enable 
table RPC call.
- It can't bypass the default action, e.g., ctx.bypass() won't have 
effect.
+Called prior to enabling a table.  Called as part of enable 
table RPC call.
 
 Specified by:
 preEnableTablein
 interfaceMasterObserver
@@ -2430,8 +2426,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:MasterObserver
 Called prior to disabling a table.  Called as part of 
disable table RPC
- call.
- It can't bypass the default action, e.g., ctx.bypass() won't have 
effect.
+ call.
 
 Specified by:
 preDisableTablein
 interfaceMasterObserver
@@ -2606,8 +2601,7 @@ implements RegionInforegionInfo)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:MasterObserver
-Called prior to marking a given region as offline. 
ctx.bypass() will not have any
- impact on this hook.
+Called prior to marking a given region as offline.
 
 Specified by:
 preRegionOfflinein
 interfaceMasterObserver
@@ -2624,12 +2618,13 @@ implements 
 
 preSetSplitOrMergeEnabled
-publicbooleanpreSetSplitOrMergeEnabled(ObserverContextMasterCoprocessorEnvironmentctx,
- booleannewValue,
- MasterSwitchTypeswitchType)
-  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException

[12/51] [partial] hbase-site git commit: Published site at .

2017-10-31 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/35decbe4/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-info.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-info.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-info.html
index c27d983..7a9fca5 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-info.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-shaded-client archetype  
Dependency Information
 
@@ -147,7 +147,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-29
+  Last Published: 
2017-10-31
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/35decbe4/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-management.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-management.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-management.html
index 7c7a87a..590764a 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-management.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-shaded-client archetype  
Project Dependency Management
 
@@ -784,7 +784,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-29
+  Last Published: 
2017-10-31
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/35decbe4/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/index.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/index.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/index.html
index cf3eaef..7be121f 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/index.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/index.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-shaded-client archetype  
About
 
@@ -119,7 +119,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-29
+  Last Published: 
2017-10-31
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/35decbe4/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/integration.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/integration.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/integration.html
index a3ecf3a..c1dc8ab 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/integration.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/integration.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-shaded-client archetype  
CI Management
 
@@ -126,7 +126,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-29
+  Last Published: 
2017-10-31
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/35decbe4/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/issue-tracking.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/issue-tracking.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/issue-tracking.html
index cf0db1e..15da6dc 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/issue-tracking.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/issue-tracking.html
@@ -7,7 +7,7 @@
   
 

[12/51] [partial] hbase-site git commit: Published site at .

2017-10-29 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1d9053bc/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
 
b/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
index 8d0d3dc..df59ed3 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/security/visibility/VisibilityController.html
@@ -493,10 +493,9 @@ implements 
-RegionScanner
-preScannerOpen(ObserverContextRegionCoprocessorEnvironmente,
-  Scanscan,
-  RegionScanners)
+void
+preScannerOpen(ObserverContextRegionCoprocessorEnvironmente,
+  Scanscan)
 Called before the client opens a new scanner.
 
 
@@ -573,7 +572,7 @@ implements RegionObserver
-postAppend,
 postBatchMutate,
 postBatchMutateIndispensably,
 postBulkLoadHFile,
 postCheckAndDelete,
 postCheckAndPut,
 postClose,
 postCloseRegionOperation, postCommitStoreFile,
 postCompact,
 postCompactSelection,
 postDelete,
 postExists,
 postFlush,
 postFlush,
 postGetOp,
 postIncrement,
 postPut,
 postReplayWALs,
 postScannerNext,
 postStartRegionOperation,
 postStoreFileReaderOpen, postWALRestore,
 preAppendAfterRowLock,
 preBulkLoadHFile,
 preCheckAndDelete, preCheckAndDeleteAfterRowLock,
 preCheckAndPut,
 preCheckAndPutAfterRowLock,
 preClose,
 preCommitStoreFile,
 preCompact,
 preCompactSelection,
 preDelete,
 preExists,
 preFlush,
 preFlush,
 preIncrementAfterRowLock,
 preOpen,
 prePut,
 preReplayWALs,
 preStoreFileReaderOpen,
 preWALRestore
+postAppend,
 postBatchMutate,
 postBatchMutateIndispensably,
 postBulkLoadHFile,
 postCheckAndDelete,
 postCheckAndPut,
 postClose,
 postCloseRegionOperation, postCommitStoreFile,
 postCompact,
 postCompactSelection,
 postDelete,
 postExists,
 postFlush,
 postFlush,
 postGetOp,
 postIncrement,
 postPut,
 postReplayWALs,
 postScannerNext,
 postStartRegionOperation,
 postStoreFileReaderOpen, postWALRestore,
 preAppendAfterRowLock,
 preBulkLoadHFile,
 preCheckAndDelete, preCheckAndDeleteAfterRowLock,
 preCheckAndPut,
 preCheckAndPutAfterRowLock,
 preClose,
 preCommitStoreFile,
 preCompact,
 preCompactScannerOpen,
 preCompactSelection,
 preDelete,
 preExists,
 preFlush,
 preFlush,
 preFlushScannerOpen, preIncrementAfterRowLock,
 preOpen,
 prePut,
 preReplayWALs,
 preStoreFileReaderOpen,
 preStoreScannerOpen,
 preWALRestore
 
 
 
@@ -1096,21 +1095,18 @@ implements 
+
 
 
 
 
 preScannerOpen
-publicRegionScannerpreScannerOpen(ObserverContextRegionCoprocessorEnvironmente,
-Scanscan,
-RegionScanners)
- throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
-Description copied from 
interface:RegionObserver
+publicvoidpreScannerOpen(ObserverContextRegionCoprocessorEnvironmente,
+   Scanscan)
+throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+Description copied from 
interface:RegionObserver
 Called before the client opens a new scanner.
  
- Call CoprocessorEnvironment#bypass to skip default actions
- 
  Call CoprocessorEnvironment#complete to skip any subsequent chained
  coprocessors
  
@@ -1118,14 +1114,10 @@ implements Specified by:
-preScannerOpenin
 interfaceRegionObserver
+preScannerOpenin
 interfaceRegionObserver
 Parameters:
 e - the environment provided by the region server
 scan - the Scan specification
-s - if not null, the base scanner
-Returns:
-an RegionScanner instance to use instead of the base scanner if
- overriding default behavior, null otherwise
 Throws:
 http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
@@ -1137,7 +1129,7 @@ implements 
 
 postInstantiateDeleteTracker
-publicDeleteTrackerpostInstantiateDeleteTracker(ObserverContextRegionCoprocessorEnvironmentctx,
+publicDeleteTrackerpostInstantiateDeleteTracker(ObserverContextRegionCoprocessorEnvironmentctx,
   DeleteTrackerdelTracker)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:RegionObserver
@@ -1165,7 +1157,7 @@ implements 
 
 postScannerOpen
-publicRegionScannerpostScannerOpen(ObserverContextRegionCoprocessorEnvironmentc,
+publicRegionScannerpostScannerOpen(ObserverContextRegionCoprocessorEnvironmentc,
  Scanscan,
  RegionScanners)
 

[12/51] [partial] hbase-site git commit: Published site at .

2017-10-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5018ccb3/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferCell.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferCell.html
 
b/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferCell.html
new file mode 100644
index 000..c708a8c
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferCell.html
@@ -0,0 +1,448 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+PrivateCellUtil.FirstOnRowByteBufferCell (Apache HBase 3.0.0-SNAPSHOT 
API)
+
+
+
+
+
+var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase
+Class 
PrivateCellUtil.FirstOnRowByteBufferCell
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.ByteBufferCell
+
+
+org.apache.hadoop.hbase.PrivateCellUtil.EmptyByteBufferCell
+
+
+org.apache.hadoop.hbase.PrivateCellUtil.FirstOnRowByteBufferCell
+
+
+
+
+
+
+
+
+
+
+
+All Implemented Interfaces:
+Cell, SettableSequenceId
+
+
+Direct Known Subclasses:
+PrivateCellUtil.FirstOnRowColByteBufferCell
+
+
+Enclosing class:
+PrivateCellUtil
+
+
+
+private static class PrivateCellUtil.FirstOnRowByteBufferCell
+extends PrivateCellUtil.EmptyByteBufferCell
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+private short
+rlength
+
+
+private int
+roffset
+
+
+private http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer
+rowBuff
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+FirstOnRowByteBufferCell(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBufferrow,
+introffset,
+shortrlength)
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsInstance MethodsConcrete Methods
+
+Modifier and Type
+Method and Description
+
+
+http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer
+getRowByteBuffer()
+
+
+short
+getRowLength()
+
+
+int
+getRowPosition()
+
+
+long
+getTimestamp()
+
+
+byte
+getTypeByte()
+
+
+
+
+
+
+Methods inherited from classorg.apache.hadoop.hbase.PrivateCellUtil.EmptyByteBufferCell
+getFamilyArray,
 getFamilyByteBuffer,
 getFamilyLength,
 getFamilyOffset,
 getFamilyPosition,
 getQualifierArray,
 getQualifierByteBuffer,
 getQualifierLength, getQualifierOffset,
 getQualifierPosition,
 getRowArray,
 getRowOffset,
 getSequenceId,
 getTagsArray,
 getTagsByteBuffer,
 getTagsLength, getTagsOffset,
 getTagsPosition,
 getValueArray,
 getValueByteBuffer,
 getValueLength,
 getValueOffset,
 getValuePosition,
 setSequenceId
+
+
+
+
+
+Methods inherited from classjava.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, 

[12/51] [partial] hbase-site git commit: Published site at .

2017-10-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/00c22388/devapidocs/org/apache/hadoop/hbase/client/Table.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/Table.html 
b/devapidocs/org/apache/hadoop/hbase/client/Table.html
index 0f1320b..b77757a 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Table.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Table.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":38,"i7":6,"i8":38,"i9":6,"i10":6,"i11":38,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":38,"i29":6,"i30":6,"i31":6,"i32":38,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":38,"i41":38,"i42":38,"i43":38};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":38,"i7":6,"i8":38,"i9":6,"i10":6,"i11":38,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":38,"i27":6,"i28":38,"i29":6,"i30":38,"i31":6,"i32":6,"i33":6,"i34":6,"i35":38,"i36":38,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":38,"i45":38,"i46":38,"i47":38};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public interface Table
+public interface Table
 extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable
 Used to communicate with a single HBase table.
  Obtain an instance from a Connection 
and call close()
 afterwards.
@@ -383,44 +383,69 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 int
 getOperationTimeout()
-Get timeout (millisecond) of each operation for in Table 
instance.
+Deprecated.
+since 2.0 and will be 
removed in 3.0 version
+ use getOperationTimeout(TimeUnit)
 instead
+
 
 
 
+long
+getOperationTimeout(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
 title="class or interface in 
java.util.concurrent">TimeUnitunit)
+Get timeout of each operation in Table instance.
+
+
+
 int
 getReadRpcTimeout()
-Get timeout (millisecond) of each rpc read request in this 
Table instance.
+Deprecated.
+since 2.0 and will be 
removed in 3.0 version
+ use getReadRpcTimeout(TimeUnit)
 instead
+
 
 
-
+
+long
+getReadRpcTimeout(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
 title="class or interface in 
java.util.concurrent">TimeUnitunit)
+Get timeout of each rpc read request in this Table 
instance.
+
+
+
 int
 getRpcTimeout()
 Deprecated.
-Use getReadRpcTimeout or 
getWriteRpcTimeout instead
+use getReadRpcTimeout(TimeUnit)
 or
+ getWriteRpcTimeout(TimeUnit)
 instead
 
 
 
-
+
+long
+getRpcTimeout(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
 title="class or interface in 
java.util.concurrent">TimeUnitunit)
+Get timeout of each rpc request in this Table 
instance.
+
+
+
 ResultScanner
 getScanner(byte[]family)
 Gets a scanner on the current table for the given 
family.
 
 
-
+
 ResultScanner
 getScanner(byte[]family,
   byte[]qualifier)
 Gets a scanner on the current table for the given family 
and qualifier.
 
 
-
+
 ResultScanner
 getScanner(Scanscan)
 Returns a scanner on the current table as specified by the 
Scan
  object.
 
 
-
+
 HTableDescriptor
 getTableDescriptor()
 Deprecated.
@@ -429,19 +454,28 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 
-
+
 int
 getWriteRpcTimeout()
-Get timeout (millisecond) of each rpc write request in this 
Table instance.
+Deprecated.
+since 2.0 and will be 
removed in 3.0 version
+ use getWriteRpcTimeout(TimeUnit)
 instead
+
 
 
-
+
+long
+getWriteRpcTimeout(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
 title="class or interface in 
java.util.concurrent">TimeUnitunit)
+Get timeout of each rpc write request in this Table 
instance.
+
+
+
 Result
 increment(Incrementincrement)
 Increments one or more columns within a single row.
 
 
-
+
 long
 incrementColumnValue(byte[]row,
 byte[]family,
@@ -450,7 +484,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 See incrementColumnValue(byte[],
 byte[], byte[], long, Durability)
 
 
-
+
 long
 incrementColumnValue(byte[]row,
 byte[]family,
@@ -460,25 +494,25 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 Atomically increments a column value.
 
 
-
+
 void
 mutateRow(RowMutationsrm)
 Performs multiple mutations 

[12/51] [partial] hbase-site git commit: Published site at .

2017-10-25 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
index c9aa50a..aa7cf49 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class HRegionServer.CompactionChecker
+private static class HRegionServer.CompactionChecker
 extends ScheduledChore
 
 
@@ -233,7 +233,7 @@ extends 
 
 instance
-private finalHRegionServer instance
+private finalHRegionServer instance
 
 
 
@@ -242,7 +242,7 @@ extends 
 
 majorCompactPriority
-private finalint majorCompactPriority
+private finalint majorCompactPriority
 
 
 
@@ -251,7 +251,7 @@ extends 
 
 DEFAULT_PRIORITY
-private static finalint DEFAULT_PRIORITY
+private static finalint DEFAULT_PRIORITY
 
 See Also:
 Constant
 Field Values
@@ -264,7 +264,7 @@ extends 
 
 iteration
-privatelong iteration
+privatelong iteration
 
 
 
@@ -281,7 +281,7 @@ extends 
 
 CompactionChecker
-CompactionChecker(HRegionServerh,
+CompactionChecker(HRegionServerh,
   intsleepTime,
   Stoppablestopper)
 
@@ -300,7 +300,7 @@ extends 
 
 chore
-protectedvoidchore()
+protectedvoidchore()
 Description copied from 
class:ScheduledChore
 The task to execute on each scheduled execution of the 
Chore
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
index baa9abf..af22eb0 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class HRegionServer.MovedRegionInfo
+private static class HRegionServer.MovedRegionInfo
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 
 
@@ -218,7 +218,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 serverName
-private finalServerName serverName
+private finalServerName serverName
 
 
 
@@ -227,7 +227,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 seqNum
-private finallong seqNum
+private finallong seqNum
 
 
 
@@ -236,7 +236,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 ts
-private finallong ts
+private finallong ts
 
 
 
@@ -253,7 +253,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 MovedRegionInfo
-publicMovedRegionInfo(ServerNameserverName,
+publicMovedRegionInfo(ServerNameserverName,
longcloseSeqNum)
 
 
@@ -271,7 +271,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getServerName
-publicServerNamegetServerName()
+publicServerNamegetServerName()
 
 
 
@@ -280,7 +280,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getSeqNum
-publiclonggetSeqNum()
+publiclonggetSeqNum()
 
 
 
@@ -289,7 +289,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getMoveTime
-publiclonggetMoveTime()
+publiclonggetMoveTime()
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21726f5a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
index f1e7550..9303410 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-protected static final class HRegionServer.MovedRegionsCleaner
+protected static final class HRegionServer.MovedRegionsCleaner
 extends ScheduledChore
 implements Stoppable
 Creates a Chore thread to clean the moved region 
cache.
@@ -242,7 +242,7 @@ implements 
 
 regionServer
-privateHRegionServer regionServer
+privateHRegionServer regionServer
 
 
 
@@ -251,7 +251,7 @@ implements 
 
 

[12/51] [partial] hbase-site git commit: Published site at .

2017-10-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8847591c/devapidocs/org/apache/hadoop/hbase/regionserver/HStoreFile.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HStoreFile.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HStoreFile.html
index 26c6467..e9885e0 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HStoreFile.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HStoreFile.html
@@ -1265,7 +1265,7 @@ implements 
 
 initReader
-publicvoidinitReader()
+publicvoidinitReader()
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Initialize the reader used for pread.
 
@@ -1280,7 +1280,7 @@ implements 
 
 createStreamReader
-privateStoreFileReadercreateStreamReader(booleancanUseDropBehind)
+privateStoreFileReadercreateStreamReader(booleancanUseDropBehind)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -1294,7 +1294,7 @@ implements 
 
 getPreadScanner
-publicStoreFileScannergetPreadScanner(booleancacheBlocks,
+publicStoreFileScannergetPreadScanner(booleancacheBlocks,
 longreadPt,
 longscannerOrder,
 
booleancanOptimizeForNonNullColumn)
@@ -1309,7 +1309,7 @@ implements 
 
 getStreamScanner
-publicStoreFileScannergetStreamScanner(booleancanUseDropBehind,
+publicStoreFileScannergetStreamScanner(booleancanUseDropBehind,
  booleancacheBlocks,
  booleanisCompaction,
  longreadPt,
@@ -1331,7 +1331,7 @@ implements 
 
 getReader
-publicStoreFileReadergetReader()
+publicStoreFileReadergetReader()
 
 Returns:
 Current reader. Must call initReader first else returns null.
@@ -1346,7 +1346,7 @@ implements 
 
 closeStoreFile
-publicvoidcloseStoreFile(booleanevictOnClose)
+publicvoidcloseStoreFile(booleanevictOnClose)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Parameters:
@@ -1362,7 +1362,7 @@ implements 
 
 deleteStoreFile
-publicvoiddeleteStoreFile()
+publicvoiddeleteStoreFile()
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Delete this file
 
@@ -1377,7 +1377,7 @@ implements 
 
 markCompactedAway
-publicvoidmarkCompactedAway()
+publicvoidmarkCompactedAway()
 
 
 
@@ -1386,7 +1386,7 @@ implements 
 
 toString
-publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
+publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
 
 Overrides:
 http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--;
 title="class or interface in java.lang">toStringin 
classhttp://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
@@ -1399,7 +1399,7 @@ implements 
 
 toStringDetailed
-publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoStringDetailed()
+publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoStringDetailed()
 
 Specified by:
 toStringDetailedin
 interfaceStoreFile
@@ -1414,7 +1414,7 @@ implements 
 
 isSkipResetSeqId
-privatebooleanisSkipResetSeqId(byte[]skipResetSeqId)
+privatebooleanisSkipResetSeqId(byte[]skipResetSeqId)
 Gets whether to skip resetting the sequence id for 
cells.
 
 Parameters:
@@ -1430,7 +1430,7 @@ implements 
 
 getMinimumTimestamp
-publichttp://docs.oracle.com/javase/8/docs/api/java/util/OptionalLong.html?is-external=true;
 title="class or interface in java.util">OptionalLonggetMinimumTimestamp()
+publichttp://docs.oracle.com/javase/8/docs/api/java/util/OptionalLong.html?is-external=true;
 title="class or interface in java.util">OptionalLonggetMinimumTimestamp()
 Description copied from 
interface:StoreFile
 Get the min timestamp of all the cells in the store 
file.
 
@@ -1445,7 +1445,7 @@ implements 
 
 getMaximumTimestamp
-publichttp://docs.oracle.com/javase/8/docs/api/java/util/OptionalLong.html?is-external=true;
 title="class or interface in java.util">OptionalLonggetMaximumTimestamp()
+publichttp://docs.oracle.com/javase/8/docs/api/java/util/OptionalLong.html?is-external=true;
 title="class or interface in 

[12/51] [partial] hbase-site git commit: Published site at .

2017-10-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/41a7fcc5/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockContext.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockContext.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockContext.html
index 12fe16f..b1e0997 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockContext.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockContext.html
@@ -1960,6279 +1960,6285 @@
 1952  protected void 
doRegionCompactionPrep() throws IOException {
 1953  }
 1954
-1955  @Override
-1956  public void triggerMajorCompaction() 
throws IOException {
-1957
stores.values().forEach(HStore::triggerMajorCompaction);
-1958  }
-1959
-1960  /**
-1961   * Synchronously compact all stores in 
the region.
-1962   * pThis operation could block 
for a long time, so don't call it from a
-1963   * time-sensitive thread.
-1964   * pNote that no locks are 
taken to prevent possible conflicts between
-1965   * compaction and splitting 
activities. The regionserver does not normally compact
-1966   * and split in parallel. However by 
calling this method you may introduce
-1967   * unexpected and unhandled 
concurrency. Don't do this unless you know what
-1968   * you are doing.
-1969   *
-1970   * @param majorCompaction True to 
force a major compaction regardless of thresholds
-1971   * @throws IOException
-1972   */
-1973  public void compact(boolean 
majorCompaction) throws IOException {
-1974if (majorCompaction) {
-1975  triggerMajorCompaction();
-1976}
-1977for (HStore s : stores.values()) {
-1978  OptionalCompactionContext 
compaction = s.requestCompaction();
-1979  if (compaction.isPresent()) {
-1980ThroughputController controller 
= null;
-1981if (rsServices != null) {
-1982  controller = 
CompactionThroughputControllerFactory.create(rsServices, conf);
-1983}
-1984if (controller == null) {
-1985  controller = 
NoLimitThroughputController.INSTANCE;
-1986}
-1987compact(compaction.get(), s, 
controller, null);
-1988  }
-1989}
-1990  }
-1991
-1992  /**
-1993   * This is a helper function that 
compact all the stores synchronously.
-1994   * p
-1995   * It is used by utilities and 
testing
-1996   */
-1997  @VisibleForTesting
-1998  public void compactStores() throws 
IOException {
-1999for (HStore s : stores.values()) {
-2000  OptionalCompactionContext 
compaction = s.requestCompaction();
-2001  if (compaction.isPresent()) {
-2002compact(compaction.get(), s, 
NoLimitThroughputController.INSTANCE, null);
-2003  }
-2004}
-2005  }
-2006
-2007  /**
-2008   * This is a helper function that 
compact the given store.
-2009   * p
-2010   * It is used by utilities and 
testing
-2011   */
-2012  @VisibleForTesting
-2013  void compactStore(byte[] family, 
ThroughputController throughputController) throws IOException {
-2014HStore s = getStore(family);
-2015OptionalCompactionContext 
compaction = s.requestCompaction();
-2016if (compaction.isPresent()) {
-2017  compact(compaction.get(), s, 
throughputController, null);
-2018}
-2019  }
-2020
-2021  /**
-2022   * Called by compaction thread and 
after region is opened to compact the
-2023   * HStores if necessary.
-2024   *
-2025   * pThis operation could block 
for a long time, so don't call it from a
-2026   * time-sensitive thread.
-2027   *
-2028   * Note that no locking is necessary 
at this level because compaction only
-2029   * conflicts with a region split, and 
that cannot happen because the region
-2030   * server does them sequentially and 
not in parallel.
-2031   *
-2032   * @param compaction Compaction 
details, obtained by requestCompaction()
-2033   * @param throughputController
-2034   * @return whether the compaction 
completed
-2035   */
+1955  /**
+1956   * Synchronously compact all stores in 
the region.
+1957   * pThis operation could block 
for a long time, so don't call it from a
+1958   * time-sensitive thread.
+1959   * pNote that no locks are 
taken to prevent possible conflicts between
+1960   * compaction and splitting 
activities. The regionserver does not normally compact
+1961   * and split in parallel. However by 
calling this method you may introduce
+1962   * unexpected and unhandled 
concurrency. Don't do this unless you know what
+1963   * you are doing.
+1964   *
+1965   * @param majorCompaction True to 
force a major compaction regardless of thresholds
+1966   * @throws IOException
+1967   */
+1968  public void compact(boolean 
majorCompaction) throws IOException {
+1969if (majorCompaction) {
+1970  
stores.values().forEach(HStore::triggerMajorCompaction);
+1971}
+1972for (HStore s : stores.values()) {
+1973  

[12/51] [partial] hbase-site git commit: Published site at .

2017-10-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4f94a4c5/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.BlockingServiceAndInterface.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.BlockingServiceAndInterface.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.BlockingServiceAndInterface.html
index 68df449..5e45072 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.BlockingServiceAndInterface.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.BlockingServiceAndInterface.html
@@ -6,7 +6,7 @@
 
 
 
-001/**
+001/*
 002 * Licensed to the Apache Software 
Foundation (ASF) under one
 003 * or more contributor license 
agreements.  See the NOTICE file
 004 * distributed with this work for 
additional information
@@ -32,775 +32,772 @@
 024import java.net.InetAddress;
 025import java.net.InetSocketAddress;
 026import java.nio.ByteBuffer;
-027import 
java.nio.channels.GatheringByteChannel;
-028import 
java.nio.channels.ReadableByteChannel;
-029import 
java.nio.channels.WritableByteChannel;
-030import java.util.ArrayList;
-031import java.util.Collections;
-032import java.util.HashMap;
-033import java.util.List;
-034import java.util.Locale;
-035import java.util.Map;
-036import java.util.Optional;
-037import 
java.util.concurrent.atomic.LongAdder;
-038
-039import 
com.fasterxml.jackson.databind.ObjectMapper;
-040import org.apache.commons.logging.Log;
-041import 
org.apache.commons.logging.LogFactory;
-042import 
org.apache.hadoop.conf.Configuration;
-043import 
org.apache.hadoop.hbase.CallQueueTooBigException;
-044import 
org.apache.hadoop.hbase.CellScanner;
-045import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-046import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-047import 
org.apache.hadoop.hbase.HConstants;
-048import org.apache.hadoop.hbase.Server;
-049import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-050import 
org.apache.hadoop.hbase.exceptions.RequestTooBigException;
-051import 
org.apache.hadoop.hbase.io.ByteBufferPool;
-052import 
org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler;
-053import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-054import 
org.apache.hadoop.hbase.nio.ByteBuff;
-055import 
org.apache.hadoop.hbase.nio.MultiByteBuff;
-056import 
org.apache.hadoop.hbase.nio.SingleByteBuff;
-057import 
org.apache.hadoop.hbase.regionserver.RSRpcServices;
-058import 
org.apache.hadoop.hbase.security.SaslUtil;
-059import 
org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
-060import 
org.apache.hadoop.hbase.security.User;
-061import 
org.apache.hadoop.hbase.security.UserProvider;
-062import 
org.apache.hadoop.hbase.security.token.AuthenticationTokenSecretManager;
-063import 
org.apache.hadoop.hbase.util.Pair;
-064import 
org.apache.hadoop.security.UserGroupInformation;
-065import 
org.apache.hadoop.security.authorize.AuthorizationException;
-066import 
org.apache.hadoop.security.authorize.PolicyProvider;
-067import 
org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
-068import 
org.apache.hadoop.security.token.SecretManager;
-069import 
org.apache.hadoop.security.token.TokenIdentifier;
-070import 
org.apache.yetus.audience.InterfaceAudience;
-071import 
org.apache.yetus.audience.InterfaceStability;
-072
-073import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-074import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService;
-075import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor;
-076import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
-077import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
-078import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat;
-079import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader;
-082
-083/**
-084 * An RPC server that hosts protobuf 
described Services.
-085 *
-086 */
-087@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
 HBaseInterfaceAudience.PHOENIX})
-088@InterfaceStability.Evolving
-089public abstract class RpcServer 
implements RpcServerInterface,
-090ConfigurationObserver {
-091  // LOG is being used in CallRunner and 
the log level is being changed in tests
-092  public static final Log LOG = 
LogFactory.getLog(RpcServer.class);
-093  protected static final 
CallQueueTooBigException CALL_QUEUE_TOO_BIG_EXCEPTION
-094  = new CallQueueTooBigException();
-095
-096  private final boolean authorize;
-097  protected boolean isSecurityEnabled;
-098
-099  public static final byte 
CURRENT_VERSION = 0;
-100
-101  /**
-102   * Whether we allow a fallback to 
SIMPLE auth for insecure clients when security is enabled.
-103   */

[12/51] [partial] hbase-site git commit: Published site at .

2017-10-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c0c4a947/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
 
b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
index df713a8..4b5efa2 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
@@ -248,7 +248,7 @@ extends 
 
 Methods inherited from classorg.apache.hadoop.hbase.master.HMaster
-abort,
 abortProcedure,
 addColumn,
 addReplicationPeer,
 balance,
 balance,
 balanceSwitch,
 canCreateBaseZNode, canUpdateTableDescriptor,
 checkIfShouldMoveSystemRegionAsync,
 checkInitialized,
 checkServiceStarted,
 checkTableModifiable,
 configureInfoServer,
 constructMaster, createMetaBootstrap,
 createNamespace,
 createQuotaSnapshotNotifier,
 createRpcServices,
 createServerManager,
 createSystemTable,
 createTable,
 deleteColumn,
 deleteNamespace,
 deleteTable,
 disableReplicationPeer,
 disableTable,
 drainRegionServer,
 enableReplicationPeer,
 enableTable,
 getAssignmentManager,
 getAverageLoad,
 getCatalogJanitor,
 getClientIdAuditPrefix,
 getClusterSchema,
 getClusterStatus,
 getClusterStatus, getDumpServlet,
 getFavoredNodesManager,
 getHFileCleaner,
 getInitializedEvent,
 getLastMajorCompactionTimestamp,
 getLastMajorCompactionTimestampForRegion,
 getLoadBalancer,
 getLoadBalancerClassName,
 getLoadedCoprocessors,
 getLockManager,
 getLocks,
 getLogCleaner,
 getMasterActiveTime,
 getMasterCoprocessorHost,
 getMasterCoprocessors,
 getMasterFileSystem,
 getMasterFinishedInitializationTime,
 getMasterMetrics,
 getMasterProcedureExecutor,
 getMasterProcedureManagerHost,
 getMasterQuotaManager,
 getMasterRpcServices,
 getMasterStartTime,
 getMasterWalManager,
 getMergePlanCount,
 getMetaTableObserver,
 getMobCompactionState,
 getNamespace,
 getNamespaces,
 getNumWALFiles,
 getProcedures,
 getProcessName,
 getQuotaObserverChore, getRegionNormalizer,
 getRegionNormalizerTracker,
 getRegionServerFatalLogBuffer,
 getRegionServerInfoPort,
 getRegionServerVersion,
 getRemoteInetAddress,
 getReplicationPeerConfig, getServerCrashProcessingEnabledEvent,
 getServerManager,
 getServerName,
 getSnapshotManager,
 getSpaceQuotaSnapshotNotifier,
 getSplitOrMergeTracker,
 getSplitPlanCount,
 getTableDescriptors,
 getTableRegionForRow,
 getTableStateManager,
 getWalProcedureStore,
 getZooKeeper,
 initClusterSchemaService,
 initializeZKBasedSystemTrackers,
 initQuotaManager,
 isActiveMaster,
 isBalancerOn, isCatalogJanitorEnabled,
 isCleanerChoreEnabled,
 isInitialized,
 isInMaintenanceMode,
 isNormalizerOn,
 isServerCrashProcessingEnabled,
 isSplitOrMergeEnabled,
 listDrainingRegionServers, listReplicationPeers,
 listTableDescriptors,
 listTableDescriptorsByNamespace,
 listTableNames,
 listTableNamesByNamespace,
 login,
 main, mergeRegions,
 modifyColumn,
 modifyNamespace,
 modifyTable,
 move,
 normalizeRegions,
 recoverMeta,
 registerService,
 removeDrainFromRegionServer,
 removeReplicationPeer,
 reportMobCompactionEnd,
 reportMobCompactionStart,
 r
 equestMobCompaction, restoreSnapshot,
 setCatalogJanitorEnabled,
 setInitialized,
 setServerCrashProcessingEnabled,
 shutdown,
 splitRegion,
 stopMaster,
 stopServiceThreads,
 truncateTable,
 updateConfigurationForSpaceQuotaObserver,
 updateReplicationPeerConfig,
 waitForMasterActive
+abort,
 abortProcedure,
 addColumn,
 addReplicationPeer,
 balance,
 balance,
 balanceSwitch,
 canCreateBaseZNode, canUpdateTableDescriptor,
 checkIfShouldMoveSystemRegionAsync,
 checkInitialized,
 checkServiceStarted,
 checkTableModifiable,
 configureInfoServer,
 constructMaster, createMetaBootstrap,
 createNamespace,
 createQuotaSnapshotNotifier,
 createRpcServices,
 createServerManager,
 createSystemTable,
 createTable,
 decommissionRegionServers,
 deleteColumn,
 deleteNamespace,
 deleteTable,
 disableReplicationPeer,
 disableTable,
 enableReplicationPeer,
 enableTable,
 getAssignmentManager,
 getAverageLoad,
 getCatalogJanitor,
 getClientIdAuditPrefix,
 getClusterSchema,
 getClusterStatus,
 getClusterStatus, getDumpServlet,
 getFavoredNodesManager,
 getHFileCleaner,
 getInitializedEvent,
 getLastMajorCompactionTimestamp,
 getLastMajorCompactionTimestampForRegion,
 getLoadBalancer,
 getLoadBalancerClassName,
 getLoadedCoprocessors,
 getLockManager,
 getLocks,
 getLogCleaner,
 getMasterActiveTime,
 getMasterCoprocessorHost,
 getMasterCoprocessors,
 getMasterFileSystem
 , getMasterFinishedInitializationTime,
 getMasterMetrics,
 getMasterProcedureExecutor,
 getMasterProcedureManagerHost,
 getMasterQuotaManager,
 getMasterRpcServices,
 getMasterStartTime,
 getMasterWalManager,
 getMergePlanCount,
 getMetaTableObserver,
 getMobCompactionState,
 getNamespace,
 getNamespaces,
 getNumWALFiles,
 getProcedures,
 

[12/51] [partial] hbase-site git commit: Published site at .

2017-10-18 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ed0004f8/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.html
 
b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.html
index 69d3bdc..bb04c33 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.html
@@ -123,7 +123,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class PrefixTreeCell
+public class PrefixTreeCell
 extends ByteBufferCell
 implements SettableSequenceId, http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableCell
 As the PrefixTreeArrayScanner moves through the tree bytes, 
it changes the
@@ -158,7 +158,7 @@ implements 
-protected CellComparator
+protected CellComparator
 comparator
 
 
@@ -461,7 +461,7 @@ implements 
 
 comparator
-protectedCellComparator comparator
+protectedCellComparator comparator
 
 
 
@@ -470,7 +470,7 @@ implements 
 
 TYPES
-public static finalKeyValue.Type[] TYPES
+public static finalKeyValue.Type[] TYPES
 static
 
 
@@ -480,7 +480,7 @@ implements 
 
 DEFAULT_TYPE
-public static finalKeyValue.Type DEFAULT_TYPE
+public static finalKeyValue.Type DEFAULT_TYPE
 
 
 
@@ -489,7 +489,7 @@ implements 
 
 block
-protectedByteBuff block
+protectedByteBuff block
 fields
 
 
@@ -499,7 +499,7 @@ implements 
 
 includeMvccVersion
-protectedboolean includeMvccVersion
+protectedboolean includeMvccVersion
 
 
 
@@ -508,7 +508,7 @@ implements 
 
 rowBuffer
-protectedbyte[] rowBuffer
+protectedbyte[] rowBuffer
 
 
 
@@ -517,7 +517,7 @@ implements 
 
 rowLength
-protectedint rowLength
+protectedint rowLength
 
 
 
@@ -526,7 +526,7 @@ implements 
 
 familyBuffer
-protectedbyte[] familyBuffer
+protectedbyte[] familyBuffer
 
 
 
@@ -535,7 +535,7 @@ implements 
 
 familyOffset
-protectedint familyOffset
+protectedint familyOffset
 
 
 
@@ -544,7 +544,7 @@ implements 
 
 familyLength
-protectedint familyLength
+protectedint familyLength
 
 
 
@@ -553,7 +553,7 @@ implements 
 
 qualifierBuffer
-protectedbyte[] qualifierBuffer
+protectedbyte[] qualifierBuffer
 
 
 
@@ -562,7 +562,7 @@ implements 
 
 qualifierOffset
-protectedint qualifierOffset
+protectedint qualifierOffset
 
 
 
@@ -571,7 +571,7 @@ implements 
 
 qualifierLength
-protectedint qualifierLength
+protectedint qualifierLength
 
 
 
@@ -580,7 +580,7 @@ implements 
 
 timestamp
-protectedhttp://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long timestamp
+protectedhttp://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long timestamp
 
 
 
@@ -589,7 +589,7 @@ implements 
 
 mvccVersion
-protectedhttp://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long mvccVersion
+protectedhttp://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long mvccVersion
 
 
 
@@ -598,7 +598,7 @@ implements 
 
 type
-protectedKeyValue.Type type
+protectedKeyValue.Type type
 
 
 
@@ -607,7 +607,7 @@ implements 
 
 absoluteValueOffset
-protectedint absoluteValueOffset
+protectedint absoluteValueOffset
 
 
 
@@ -616,7 +616,7 @@ implements 
 
 valueLength
-protectedint valueLength
+protectedint valueLength
 
 
 
@@ -625,7 +625,7 @@ implements 
 
 tagsBuffer
-protectedbyte[] tagsBuffer
+protectedbyte[] tagsBuffer
 
 
 
@@ -634,7 +634,7 @@ implements 
 
 tagsOffset
-protectedint tagsOffset
+protectedint tagsOffset
 
 
 
@@ -643,7 +643,7 @@ implements 
 
 tagsLength
-protectedint tagsLength
+protectedint tagsLength
 
 
 
@@ -652,7 +652,7 @@ implements 
 
 pair
-protectedObjectIntPairhttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer pair
+protectedObjectIntPairhttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer pair
 
 
 
@@ -669,7 +669,7 @@ implements 
 
 PrefixTreeCell
-publicPrefixTreeCell()
+publicPrefixTreeCell()
 
 
 
@@ -686,7 +686,7 @@ implements 
 
 toString
-publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
+publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
 For debugging. Currently creates new KeyValue to utilize 
its toString()
  method.
 
@@ -701,7 +701,7 @@ implements 
 
 equals

[12/51] [partial] hbase-site git commit: Published site at .

2017-10-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/47abd8e6/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.SplitTableRegionProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.SplitTableRegionProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.SplitTableRegionProcedureBiConsumer.html
index 6150454..b712da3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.SplitTableRegionProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.SplitTableRegionProcedureBiConsumer.html
@@ -391,2559 +391,2494 @@
 383CompletableFutureVoid 
operate(TableName table);
 384  }
 385
-386  private 
CompletableFutureListTableDescriptor 
batchTableOperations(Pattern pattern,
-387  TableOperator operator, String 
operationType) {
-388
CompletableFutureListTableDescriptor future = new 
CompletableFuture();
-389ListTableDescriptor failed = 
new LinkedList();
-390
listTables(Optional.ofNullable(pattern), false).whenComplete(
-391  (tables, error) - {
-392if (error != null) {
-393  
future.completeExceptionally(error);
-394  return;
-395}
-396CompletableFuture[] futures =
-397tables.stream()
-398.map((table) - 
operator.operate(table.getTableName()).whenComplete((v, ex) - {
-399  if (ex != null) {
-400LOG.info("Failed to " 
+ operationType + " table " + table.getTableName(), ex);
-401failed.add(table);
-402  }
-403
})).CompletableFuture toArray(size - new 
CompletableFuture[size]);
-404
CompletableFuture.allOf(futures).thenAccept((v) - {
-405  future.complete(failed);
-406});
-407  });
-408return future;
-409  }
-410
-411  @Override
-412  public CompletableFutureBoolean 
tableExists(TableName tableName) {
-413return 
AsyncMetaTableAccessor.tableExists(metaTable, tableName);
+386  @Override
+387  public CompletableFutureBoolean 
tableExists(TableName tableName) {
+388return 
AsyncMetaTableAccessor.tableExists(metaTable, tableName);
+389  }
+390
+391  @Override
+392  public 
CompletableFutureListTableDescriptor 
listTables(OptionalPattern pattern,
+393  boolean includeSysTables) {
+394return 
this.ListTableDescriptor newMasterCaller()
+395.action((controller, stub) - 
this
+396
.GetTableDescriptorsRequest, GetTableDescriptorsResponse, 
ListTableDescriptor call(
+397  controller, stub,
+398  
RequestConverter.buildGetTableDescriptorsRequest(pattern, includeSysTables),
+399  (s, c, req, done) - 
s.getTableDescriptors(c, req, done),
+400  (resp) - 
ProtobufUtil.toTableDescriptorList(resp)))
+401.call();
+402  }
+403
+404  @Override
+405  public 
CompletableFutureListTableName 
listTableNames(OptionalPattern pattern,
+406  boolean includeSysTables) {
+407return 
this.ListTableName newMasterCaller()
+408.action((controller, stub) - 
this
+409.GetTableNamesRequest, 
GetTableNamesResponse, ListTableName call(controller, stub,
+410  
RequestConverter.buildGetTableNamesRequest(pattern, includeSysTables),
+411  (s, c, req, done) - 
s.getTableNames(c, req, done),
+412  (resp) - 
ProtobufUtil.toTableNameList(resp.getTableNamesList(
+413.call();
 414  }
 415
 416  @Override
-417  public 
CompletableFutureListTableDescriptor 
listTables(OptionalPattern pattern,
-418  boolean includeSysTables) {
-419return 
this.ListTableDescriptor newMasterCaller()
-420.action((controller, stub) - 
this
-421
.GetTableDescriptorsRequest, GetTableDescriptorsResponse, 
ListTableDescriptor call(
-422  controller, stub,
-423  
RequestConverter.buildGetTableDescriptorsRequest(pattern, includeSysTables),
-424  (s, c, req, done) - 
s.getTableDescriptors(c, req, done),
-425  (resp) - 
ProtobufUtil.toTableDescriptorList(resp)))
-426.call();
-427  }
-428
-429  @Override
-430  public 
CompletableFutureListTableName 
listTableNames(OptionalPattern pattern,
-431  boolean includeSysTables) {
-432return 
this.ListTableName newMasterCaller()
-433.action((controller, stub) - 
this
-434.GetTableNamesRequest, 
GetTableNamesResponse, ListTableName call(controller, stub,
-435  
RequestConverter.buildGetTableNamesRequest(pattern, includeSysTables),
-436  (s, c, req, done) - 
s.getTableNames(c, req, done),
-437  (resp) - 
ProtobufUtil.toTableNameList(resp.getTableNamesList(
-438.call();
-439  }
-440
-441  @Override
-442  public 
CompletableFutureTableDescriptor getTableDescriptor(TableName 

[12/51] [partial] hbase-site git commit: Published site at .

2017-10-15 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5a2158f2/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
index 5d138ea..81d256e 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
@@ -2387,7 +2387,7 @@
 2379  return true;
 2380}
 2381long modifiedFlushCheckInterval = 
flushCheckInterval;
-2382if (getRegionInfo().isSystemTable() 

+2382if 
(getRegionInfo().getTable().isSystemTable() 
 2383getRegionInfo().getReplicaId() 
== RegionInfo.DEFAULT_REPLICA_ID) {
 2384  modifiedFlushCheckInterval = 
SYSTEM_CACHE_FLUSH_INTERVAL;
 2385}
@@ -7869,7 +7869,7 @@
 7861   */
 7862  public byte[] checkSplit() {
 7863// Can't split META
-7864if 
(this.getRegionInfo().isMetaTable() ||
+7864if 
(this.getRegionInfo().isMetaRegion() ||
 7865
TableName.NAMESPACE_TABLE_NAME.equals(this.getRegionInfo().getTable())) {
 7866  if (shouldForceSplit()) {
 7867LOG.warn("Cannot split meta 
region in HBase 0.20 and above");
@@ -7914,374 +7914,372 @@
 7906  }
 7907
 7908  /** @return the coprocessor host */
-7909  @Override
-7910  public RegionCoprocessorHost 
getCoprocessorHost() {
-7911return coprocessorHost;
-7912  }
-7913
-7914  /** @param coprocessorHost the new 
coprocessor host */
-7915  public void setCoprocessorHost(final 
RegionCoprocessorHost coprocessorHost) {
-7916this.coprocessorHost = 
coprocessorHost;
-7917  }
-7918
-7919  @Override
-7920  public void startRegionOperation() 
throws IOException {
-7921
startRegionOperation(Operation.ANY);
-7922  }
-7923
-7924  @Override
-7925  
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SF_SWITCH_FALLTHROUGH",
-7926justification="Intentional")
-7927  public void 
startRegionOperation(Operation op) throws IOException {
-7928switch (op) {
-7929  case GET:  // read operations
-7930  case SCAN:
-7931checkReadsEnabled();
-7932  case INCREMENT: // write 
operations
-7933  case APPEND:
-7934  case SPLIT_REGION:
-7935  case MERGE_REGION:
-7936  case PUT:
-7937  case DELETE:
-7938  case BATCH_MUTATE:
-7939  case COMPACT_REGION:
-7940  case SNAPSHOT:
-7941// when a region is in 
recovering state, no read, split, merge or snapshot is allowed
-7942if (isRecovering()  
(this.disallowWritesInRecovering ||
-7943  (op != Operation.PUT 
 op != Operation.DELETE  op != Operation.BATCH_MUTATE))) 
{
-7944  throw new 
RegionInRecoveryException(getRegionInfo().getRegionNameAsString() +
-7945" is recovering; cannot take 
reads");
-7946}
-7947break;
-7948  default:
-7949break;
-7950}
-7951if (op == Operation.MERGE_REGION || 
op == Operation.SPLIT_REGION
-7952|| op == 
Operation.COMPACT_REGION) {
-7953  // split, merge or compact region 
doesn't need to check the closing/closed state or lock the
-7954  // region
-7955  return;
-7956}
-7957if (this.closing.get()) {
-7958  throw new 
NotServingRegionException(getRegionInfo().getRegionNameAsString() + " is 
closing");
-7959}
-7960lock(lock.readLock());
-7961if (this.closed.get()) {
-7962  lock.readLock().unlock();
-7963  throw new 
NotServingRegionException(getRegionInfo().getRegionNameAsString() + " is 
closed");
-7964}
-7965// The unit for snapshot is a 
region. So, all stores for this region must be
-7966// prepared for snapshot operation 
before proceeding.
-7967if (op == Operation.SNAPSHOT) {
-7968  
stores.values().forEach(HStore::preSnapshotOperation);
-7969}
-7970try {
-7971  if (coprocessorHost != null) {
-7972
coprocessorHost.postStartRegionOperation(op);
-7973  }
-7974} catch (Exception e) {
-7975  lock.readLock().unlock();
-7976  throw new IOException(e);
-7977}
-7978  }
-7979
-7980  @Override
-7981  public void closeRegionOperation() 
throws IOException {
-7982
closeRegionOperation(Operation.ANY);
-7983  }
-7984
-7985  @Override
-7986  public void 
closeRegionOperation(Operation operation) throws IOException {
-7987if (operation == Operation.SNAPSHOT) 
{
-7988  
stores.values().forEach(HStore::postSnapshotOperation);
-7989}
-7990lock.readLock().unlock();
-7991if (coprocessorHost != null) {
-7992  
coprocessorHost.postCloseRegionOperation(operation);
-7993}
-7994  }
-7995
-7996  /**
-7997   * This method needs to be called 
before any public call that reads or
-7998   * modifies stores in bulk. It has to 
be called just before a try.

[12/51] [partial] hbase-site git commit: Published site at .

2017-10-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7816cbde/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
index f8309f2..7f75adf 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-shaded-client archetype  
Reactor Dependency Convergence
 
@@ -123,10 +123,10 @@
 305
 
 Number of unique artifacts (NOA):
-331
+329
 
 Number of version-conflicting artifacts (NOC):
-17
+16
 
 Number of SNAPSHOT artifacts (NOS):
 0
@@ -191,75 +191,20 @@
 11.0.2
 
 
-org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT+-org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile|\-org.apache.hadoop:hadoop-common:jar:2.7.1:compile|\-(com.google.guava:guava:jar:11.0.2:compile
 - omitted for duplicate)\-org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT:compile\-com.google.guava:guava:jar:11.0.2:compile
-org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT+-org.apache.hadoop:hadoop-common:jar:2.7.1:compile|\-(com.google.guava:guava:jar:11.0.2:compile - omitted for 
duplicate)\-com.google.guava:guava:jar:11.0.2:compile
-org.apache.hbase:hbase-client-project:jar:3.0.0-SNAPSHOT\-org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile\-org.apache.hadoop:hadoop-common:jar:2.7.1:compile\-com.google.guava:guava:jar:11.0.2:compile
-org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT\-org.apache.hadoop:hadoop-common:jar:2.7.1:compile\-com.google.guava:guava:jar:11.0.2:compile
-org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT\-org.apache.hadoop:hadoop-common:jar:2.7.1:compile\-com.google.guava:guava:jar:11.0.2:compile
-org.apache.hbase:hbase-endpoint:jar:3.0.0-SNAPSHOT\-org.apache.hadoop:hadoop-common:jar:2.7.1:compile\-com.google.guava:guava:jar:11.0.2:compile
-org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT+-org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.7.1:compile|\-org.apache.hadoop:hadoop-yarn-common:jar:2.7.1:compile|\-(com.google.guava:guava:jar:11.0.2:compile
 - omitted for duplicate)\-org.apache.hadoop:hadoop-common:jar:2.7.1:compile\-com.google.guava:guava:jar:11.0.2:compile
-org.apache.hbase:hbase-hadoop-compat:jar:3.0.0-SNAPSHOT\-org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT:compile\-org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile\-org.apache.hadoop:hadoop-common:jar:2.7.1:compile\-com.google.guava:guava:jar:11.0.2:compile
-org.apache.hbase:hbase-hadoop2-compat:jar:3.0.0-SNAPSHOT\-org.apache.hadoop:hadoop-common:jar:2.7.1:compile\-com.google.guava:guava:jar:11.0.2:compile
-org.apache.hbase:hbase-it:jar:3.0.0-SNAPSHOT+-org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile|\-org.apache.hadoop:hadoop-hdfs:jar:2.7.1:compile|\-(com.google.guava:guava:jar:11.0.2:compile
 - omitted for duplicate)+-org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT:compile|\-com.google.guava:guava:jar:11.0.2:compile\-org.apache.hadoop:hadoop-common:jar:2.7.1:compile\-(com.google.guava:guava:jar:11.0.2:compile - 
omitted for duplicate)
-org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT+-org.apache.hadoop:hadoop-common:jar:2.7.1:compile|\-com.google.guava:guava:jar:11.0.2:compile+-org.apache.hadoop:hadoop-hdfs:jar:2.7.1:compile|\-(com.google.guava:guava:jar:11.0.2:compile - omitted for 
duplicate)\-org.apache.hadoop:hadoop-minicluster:jar:2.7.1:test+-org.apache.hadoop:hadoop-common:test-jar:tests:2.7.1:test|\-(com.google.guava:guava:jar:11.0.2:test
 - omitted for duplicate)\-org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.7.1:test+-org.apache.hadoop:hadoop-yarn-server-nodemanager:jar:2.7.1:test|\-(com.google.guava:guava:jar:11.0.2:test
 - omitted for duplicate)+-org
 .apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.7.1:test|+-(com.google.guava:guava:jar:11.0.2:test
 - omitted for duplicate)|+-org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice:jar:2.7.1:test||\-(com.google.guava:guava:jar:11.0.2:test
 - omitted for duplicate)|\-org.apache.hadoop:hadoop-yarn-server-web-proxy:jar:2.7.1:test|\-(com.google.guava:guava:jar:11.0.2:test
 - omitted for duplicate)\-(com.google.guava:guava:jar:11.0.2:test
 - omitted for duplicate)
-org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT\-org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile\-org.apache.hadoop:hadoop-common:jar:2.7.1:compile\-com.google.guava:guava:jar:11.0.2:compile

[12/51] [partial] hbase-site git commit: Published site at .

2017-10-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/c0571676/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatch.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatch.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatch.html
index 41e0c24..eac35d3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatch.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatch.html
@@ -110,8004 +110,8178 @@
 102import 
org.apache.hadoop.hbase.UnknownScannerException;
 103import 
org.apache.hadoop.hbase.client.Append;
 104import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-105import 
org.apache.hadoop.hbase.client.Delete;
-106import 
org.apache.hadoop.hbase.client.Durability;
-107import 
org.apache.hadoop.hbase.client.Get;
-108import 
org.apache.hadoop.hbase.client.Increment;
-109import 
org.apache.hadoop.hbase.client.IsolationLevel;
-110import 
org.apache.hadoop.hbase.client.Mutation;
-111import 
org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;
-112import 
org.apache.hadoop.hbase.client.Put;
-113import 
org.apache.hadoop.hbase.client.RegionInfo;
-114import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-115import 
org.apache.hadoop.hbase.client.Result;
-116import 
org.apache.hadoop.hbase.client.RowMutations;
-117import 
org.apache.hadoop.hbase.client.Scan;
-118import 
org.apache.hadoop.hbase.client.TableDescriptor;
-119import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-120import 
org.apache.hadoop.hbase.conf.ConfigurationManager;
-121import 
org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;
-122import 
org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
-123import 
org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
-124import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-125import 
org.apache.hadoop.hbase.exceptions.RegionInRecoveryException;
-126import 
org.apache.hadoop.hbase.exceptions.TimeoutIOException;
-127import 
org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
-128import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-129import 
org.apache.hadoop.hbase.filter.FilterWrapper;
-130import 
org.apache.hadoop.hbase.filter.IncompatibleFilterException;
-131import 
org.apache.hadoop.hbase.io.HFileLink;
-132import 
org.apache.hadoop.hbase.io.HeapSize;
-133import 
org.apache.hadoop.hbase.io.TimeRange;
-134import 
org.apache.hadoop.hbase.io.hfile.HFile;
-135import 
org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
-136import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-137import 
org.apache.hadoop.hbase.ipc.RpcCall;
-138import 
org.apache.hadoop.hbase.ipc.RpcServer;
-139import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
-140import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-141import 
org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;
-142import 
org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;
-143import 
org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;
-144import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
-145import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
-146import 
org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;
-147import 
org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;
-148import 
org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
-149import 
org.apache.hadoop.hbase.regionserver.wal.WALUtil;
-150import 
org.apache.hadoop.hbase.security.User;
-151import 
org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
-152import 
org.apache.hadoop.hbase.snapshot.SnapshotManifest;
-153import 
org.apache.hadoop.hbase.util.Bytes;
-154import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-155import 
org.apache.hadoop.hbase.util.ClassSize;
-156import 
org.apache.hadoop.hbase.util.CollectionUtils;
-157import 
org.apache.hadoop.hbase.util.CompressionTest;
-158import 
org.apache.hadoop.hbase.util.EncryptionTest;
-159import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-160import 
org.apache.hadoop.hbase.util.FSUtils;
-161import 
org.apache.hadoop.hbase.util.HashedBytes;
-162import 
org.apache.hadoop.hbase.util.Pair;
-163import 
org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
-164import 
org.apache.hadoop.hbase.util.Threads;
-165import org.apache.hadoop.hbase.wal.WAL;
-166import 
org.apache.hadoop.hbase.wal.WALEdit;
-167import 
org.apache.hadoop.hbase.wal.WALFactory;
-168import 
org.apache.hadoop.hbase.wal.WALKey;
-169import 
org.apache.hadoop.hbase.wal.WALSplitter;
-170import 
org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;
-171import 
org.apache.hadoop.io.MultipleIOException;
-172import 
org.apache.hadoop.util.StringUtils;
-173import 

[12/51] [partial] hbase-site git commit: Published site at .

2017-10-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9e2ced84/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
index caa765b..9aad245 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
@@ -577,956 +577,954 @@
 569   * @param store The store where 
compaction is being requested
 570   * @param candidates The currently 
available store files
 571   * @param tracker used to track the 
life cycle of a compaction
-572   * @param request the compaction 
request
-573   * @param user the user
-574   * @return If {@code true}, skip the 
normal selection process and use the current list
-575   * @throws IOException
-576   */
-577  public boolean 
preCompactSelection(final HStore store, final ListHStoreFile 
candidates,
-578  final CompactionLifeCycleTracker 
tracker, final CompactionRequest request,
-579  final User user) throws IOException 
{
-580return 
execOperation(coprocEnvironments.isEmpty() ? null : new 
RegionObserverOperation(user) {
-581  @Override
-582  public void call(RegionObserver 
observer) throws IOException {
-583
observer.preCompactSelection(this, store, candidates, tracker, request);
-584  }
-585});
-586  }
-587
-588  /**
-589   * Called after the {@link HStoreFile}s 
to be compacted have been selected from the available
-590   * candidates.
-591   * @param store The store where 
compaction is being requested
-592   * @param selected The store files 
selected to compact
-593   * @param tracker used to track the 
life cycle of a compaction
-594   * @param request the compaction 
request
-595   * @param user the user
-596   */
-597  public void postCompactSelection(final 
HStore store, final ImmutableListHStoreFile selected,
-598  final CompactionLifeCycleTracker 
tracker, final CompactionRequest request,
-599  final User user) throws IOException 
{
-600
execOperation(coprocEnvironments.isEmpty() ? null : new 
RegionObserverOperation(user) {
-601  @Override
-602  public void call(RegionObserver 
observer) throws IOException {
-603
observer.postCompactSelection(this, store, selected, tracker, request);
-604  }
-605});
-606  }
-607
-608  /**
-609   * Called prior to rewriting the store 
files selected for compaction
-610   * @param store the store being 
compacted
-611   * @param scanner the scanner used to 
read store data during compaction
-612   * @param scanType type of Scan
-613   * @param tracker used to track the 
life cycle of a compaction
-614   * @param request the compaction 
request
-615   * @param user the user
-616   * @throws IOException
-617   */
-618  public InternalScanner preCompact(final 
HStore store, final InternalScanner scanner,
-619  final ScanType scanType, final 
CompactionLifeCycleTracker tracker,
-620  final CompactionRequest request, 
final User user) throws IOException {
-621return execOperationWithResult(false, 
scanner, coprocEnvironments.isEmpty() ? null :
-622new 
ObserverOperationWithResultRegionObserver, InternalScanner(
-623regionObserverGetter, user) 
{
-624  @Override
-625  public InternalScanner 
call(RegionObserver observer) throws IOException {
-626return 
observer.preCompact(this, store, getResult(), scanType, tracker, request);
-627  }
-628});
-629  }
-630
-631  /**
-632   * Called after the store compaction 
has completed.
-633   * @param store the store being 
compacted
-634   * @param resultFile the new store file 
written during compaction
-635   * @param tracker used to track the 
life cycle of a compaction
-636   * @param request the compaction 
request
-637   * @param user the user
-638   * @throws IOException
-639   */
-640  public void postCompact(final HStore 
store, final HStoreFile resultFile,
-641  final CompactionLifeCycleTracker 
tracker, final CompactionRequest request, final User user)
-642  throws IOException {
-643
execOperation(coprocEnvironments.isEmpty() ? null : new 
RegionObserverOperation(user) {
-644  @Override
-645  public void call(RegionObserver 
observer) throws IOException {
-646observer.postCompact(this, store, 
resultFile, tracker, request);
-647  }
-648});
-649  }
-650
-651  /**
-652   * Invoked before a memstore flush
-653   * @throws IOException
-654   */
-655  public InternalScanner preFlush(HStore 
store, final InternalScanner scanner)
-656  throws IOException {
-657return 

[12/51] [partial] hbase-site git commit: Published site at .

2017-10-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
index 03692cd..a9dd1e3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
@@ -2120,7 +2120,7 @@
 2112
 2113  @Override
 2114  public void stop(final String msg) {
-2115stop(msg, false, 
RpcServer.getRequestUser());
+2115stop(msg, false, 
RpcServer.getRequestUser().orElse(null));
 2116  }
 2117
 2118  /**

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
index 03692cd..a9dd1e3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
@@ -2120,7 +2120,7 @@
 2112
 2113  @Override
 2114  public void stop(final String msg) {
-2115stop(msg, false, 
RpcServer.getRequestUser());
+2115stop(msg, false, 
RpcServer.getRequestUser().orElse(null));
 2116  }
 2117
 2118  /**

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
index 03692cd..a9dd1e3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
@@ -2120,7 +2120,7 @@
 2112
 2113  @Override
 2114  public void stop(final String msg) {
-2115stop(msg, false, 
RpcServer.getRequestUser());
+2115stop(msg, false, 
RpcServer.getRequestUser().orElse(null));
 2116  }
 2117
 2118  /**

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
index 03692cd..a9dd1e3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
@@ -2120,7 +2120,7 @@
 2112
 2113  @Override
 2114  public void stop(final String msg) {
-2115stop(msg, false, 
RpcServer.getRequestUser());
+2115stop(msg, false, 
RpcServer.getRequestUser().orElse(null));
 2116  }
 2117
 2118  /**

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
index 03692cd..a9dd1e3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
@@ -2120,7 +2120,7 @@
 2112
 2113  @Override
 2114  public void stop(final String msg) {
-2115stop(msg, false, 
RpcServer.getRequestUser());
+2115stop(msg, false, 
RpcServer.getRequestUser().orElse(null));
 2116  }
 2117
 2118  /**

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b838bdf0/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
index 9ec6b56..8802652 100644
--- 

[12/51] [partial] hbase-site git commit: Published site at .

2017-10-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/387c1112/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/Import.KeyValueReducer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/Import.KeyValueReducer.html
 
b/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/Import.KeyValueReducer.html
deleted file mode 100644
index 849e123..000
--- 
a/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/Import.KeyValueReducer.html
+++ /dev/null
@@ -1,125 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-
-Uses of Class org.apache.hadoop.hbase.mapreduce.Import.KeyValueReducer 
(Apache HBase 3.0.0-SNAPSHOT API)
-
-
-
-
-
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-
-
-
-Uses of 
Classorg.apache.hadoop.hbase.mapreduce.Import.KeyValueReducer
-
-No usage of 
org.apache.hadoop.hbase.mapreduce.Import.KeyValueReducer
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-
-
-Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
-
-

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/387c1112/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/Import.KeyValueSortImporter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/Import.KeyValueSortImporter.html
 
b/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/Import.KeyValueSortImporter.html
deleted file mode 100644
index 5eec966..000
--- 
a/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/Import.KeyValueSortImporter.html
+++ /dev/null
@@ -1,125 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-
-Uses of Class 
org.apache.hadoop.hbase.mapreduce.Import.KeyValueSortImporter (Apache HBase 
3.0.0-SNAPSHOT API)
-
-
-
-
-
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-
-
-
-Uses of 
Classorg.apache.hadoop.hbase.mapreduce.Import.KeyValueSortImporter
-
-No usage of 
org.apache.hadoop.hbase.mapreduce.Import.KeyValueSortImporter
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-
-
-Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
-
-

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/387c1112/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/Import.KeyValueWritableComparable.KeyValueWritableComparator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/Import.KeyValueWritableComparable.KeyValueWritableComparator.html
 
b/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/Import.KeyValueWritableComparable.KeyValueWritableComparator.html
deleted file mode 100644
index 9b1e89a..000
--- 
a/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/Import.KeyValueWritableComparable.KeyValueWritableComparator.html
+++ /dev/null
@@ -1,125 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-

[12/51] [partial] hbase-site git commit: Published site at .

2017-10-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3332caca/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.FlushHandler.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.FlushHandler.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.FlushHandler.html
index ce2e1a7..8596033 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.FlushHandler.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.FlushHandler.html
@@ -115,10 +115,10 @@
 107this.flushHandlers = new 
FlushHandler[handlerCount];
 108LOG.info("globalMemStoreLimit="
 109+ TraditionalBinaryPrefix
-110
.long2String(this.server.getRegionServerAccounting().getGlobalMemstoreLimit(), 
"", 1)
+110
.long2String(this.server.getRegionServerAccounting().getGlobalMemStoreLimit(), 
"", 1)
 111+ ", 
globalMemStoreLimitLowMark="
 112+ 
TraditionalBinaryPrefix.long2String(
-113  
this.server.getRegionServerAccounting().getGlobalMemstoreLimitLowMark(), "", 
1)
+113  
this.server.getRegionServerAccounting().getGlobalMemStoreLimitLowMark(), "", 
1)
 114+ ", Offheap="
 115+ 
(this.server.getRegionServerAccounting().isOffheap()));
 116  }
@@ -144,12 +144,12 @@
 136while (!flushedOne) {
 137  // Find the biggest region that 
doesn't have too many storefiles
 138  // (might be null!)
-139  Region bestFlushableRegion = 
getBiggestMemstoreRegion(regionsBySize, excludedRegions, true);
+139  Region bestFlushableRegion = 
getBiggestMemStoreRegion(regionsBySize, excludedRegions, true);
 140  // Find the biggest region, total, 
even if it might have too many flushes.
-141  Region bestAnyRegion = 
getBiggestMemstoreRegion(
+141  Region bestAnyRegion = 
getBiggestMemStoreRegion(
 142  regionsBySize, excludedRegions, 
false);
 143  // Find the biggest region that is 
a secondary region
-144  Region bestRegionReplica = 
getBiggestMemstoreOfRegionReplica(regionsBySize,
+144  Region bestRegionReplica = 
getBiggestMemStoreOfRegionReplica(regionsBySize,
 145excludedRegions);
 146
 147  if (bestAnyRegion == null 
 bestRegionReplica == null) {
@@ -159,7 +159,7 @@
 151
 152  Region regionToFlush;
 153  if (bestFlushableRegion != null 

-154  bestAnyRegion.getMemstoreSize() 
 2 * bestFlushableRegion.getMemstoreSize()) {
+154  bestAnyRegion.getMemStoreSize() 
 2 * bestFlushableRegion.getMemStoreSize()) {
 155// Even if it's not supposed to 
be flushed, pick a region if it's more than twice
 156// as big as the best flushable 
one - otherwise when we're under pressure we make
 157// lots of little flushes and 
cause lots of compactions, etc, which just makes
@@ -168,9 +168,9 @@
 160  LOG.debug("Under global heap 
pressure: " + "Region "
 161  + 
bestAnyRegion.getRegionInfo().getRegionNameAsString()
 162  + " has too many " + "store 
files, but is "
-163  + 
TraditionalBinaryPrefix.long2String(bestAnyRegion.getMemstoreSize(), "", 1)
+163  + 
TraditionalBinaryPrefix.long2String(bestAnyRegion.getMemStoreSize(), "", 1)
 164  + " vs best flushable 
region's "
-165  + 
TraditionalBinaryPrefix.long2String(bestFlushableRegion.getMemstoreSize(), "", 
1)
+165  + 
TraditionalBinaryPrefix.long2String(bestFlushableRegion.getMemStoreSize(), "", 
1)
 166  + ". Choosing the 
bigger.");
 167}
 168regionToFlush = bestAnyRegion;
@@ -183,20 +183,20 @@
 175  }
 176
 177  Preconditions.checkState(
-178(regionToFlush != null  
regionToFlush.getMemstoreSize()  0) ||
-179(bestRegionReplica != null 
 bestRegionReplica.getMemstoreSize()  0));
+178(regionToFlush != null  
regionToFlush.getMemStoreSize()  0) ||
+179(bestRegionReplica != null 
 bestRegionReplica.getMemStoreSize()  0));
 180
 181  if (regionToFlush == null ||
 182  (bestRegionReplica != null 

 183   
ServerRegionReplicaUtil.isRegionReplicaStoreFileRefreshEnabled(conf) 

-184   
(bestRegionReplica.getMemstoreSize()
-185secondaryMultiplier * 
regionToFlush.getMemstoreSize( {
+184   
(bestRegionReplica.getMemStoreSize()
+185secondaryMultiplier * 
regionToFlush.getMemStoreSize( {
 186LOG.info("Refreshing storefiles 
of region " + bestRegionReplica
 187+ " due to global heap 
pressure. Total memstore datasize="
 188+ StringUtils
-189
.humanReadableInt(server.getRegionServerAccounting().getGlobalMemstoreDataSize())
+189
.humanReadableInt(server.getRegionServerAccounting().getGlobalMemStoreDataSize())
 190+ " memstore heap 

[12/51] [partial] hbase-site git commit: Published site at .

2017-09-30 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d41f56fe/devapidocs/src-html/org/apache/hadoop/hbase/util/RegionSplitter.SplitAlgorithm.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/RegionSplitter.SplitAlgorithm.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/RegionSplitter.SplitAlgorithm.html
index c78b462..84a35ea 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/RegionSplitter.SplitAlgorithm.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/RegionSplitter.SplitAlgorithm.html
@@ -57,1071 +57,1142 @@
 049import 
org.apache.hadoop.hbase.ClusterStatus;
 050import 
org.apache.hadoop.hbase.ClusterStatus.Option;
 051import 
org.apache.hadoop.hbase.HBaseConfiguration;
-052import 
org.apache.hadoop.hbase.HRegionInfo;
-053import 
org.apache.hadoop.hbase.HRegionLocation;
-054import 
org.apache.hadoop.hbase.MetaTableAccessor;
-055import 
org.apache.hadoop.hbase.ServerName;
-056import 
org.apache.hadoop.hbase.TableName;
-057import 
org.apache.yetus.audience.InterfaceAudience;
-058import 
org.apache.hadoop.hbase.client.Admin;
-059import 
org.apache.hadoop.hbase.client.ClusterConnection;
-060import 
org.apache.hadoop.hbase.client.Connection;
-061import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-062import 
org.apache.hadoop.hbase.client.NoServerForRegionException;
-063import 
org.apache.hadoop.hbase.client.RegionLocator;
-064import 
org.apache.hadoop.hbase.client.Table;
-065import 
org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-066
-067import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-068import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-069import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-070import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-071import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
-072import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
-073
-074/**
-075 * The {@link RegionSplitter} class 
provides several utilities to help in the
-076 * administration lifecycle for 
developers who choose to manually split regions
-077 * instead of having HBase handle that 
automatically. The most useful utilities
-078 * are:
-079 * p
-080 * ul
-081 * liCreate a table with a 
specified number of pre-split regions
-082 * liExecute a rolling split of 
all regions on an existing table
-083 * /ul
-084 * p
-085 * Both operations can be safely done on 
a live server.
-086 * p
-087 * bQuestion:/b How do I 
turn off automatic splitting? br
-088 * bAnswer:/b Automatic 
splitting is determined by the configuration value
-089 * 
iHConstants.HREGION_MAX_FILESIZE/i. It is not recommended that 
you set this
-090 * to Long.MAX_VALUE in case you forget 
about manual splits. A suggested setting
-091 * is 100GB, which would result in 
gt; 1hr major compactions if reached.
-092 * p
-093 * bQuestion:/b Why did 
the original authors decide to manually split? br
-094 * bAnswer:/b Specific 
workload characteristics of our use case allowed us
-095 * to benefit from a manual split 
system.
-096 * p
-097 * ul
-098 * liData (~1k) that would grow 
instead of being replaced
-099 * liData growth was roughly 
uniform across all regions
-100 * liOLTP workload. Data loss is 
a big deal.
-101 * /ul
-102 * p
-103 * bQuestion:/b Why is 
manual splitting good for this workload? br
-104 * bAnswer:/b Although 
automated splitting is not a bad option, there are
-105 * benefits to manual splitting.
-106 * p
-107 * ul
-108 * liWith growing amounts of 
data, splits will continually be needed. Since
-109 * you always know exactly what regions 
you have, long-term debugging and
-110 * profiling is much easier with manual 
splits. It is hard to trace the logs to
-111 * understand region level problems if it 
keeps splitting and getting renamed.
-112 * liData offlining bugs + 
unknown number of split regions == oh crap! If an
-113 * WAL or StoreFile was mistakenly 
unprocessed by HBase due to a weird bug and
-114 * you notice it a day or so later, you 
can be assured that the regions
-115 * specified in these files are the same 
as the current regions and you have
-116 * less headaches trying to 
restore/replay your data.
-117 * liYou can finely tune your 
compaction algorithm. With roughly uniform data
-118 * growth, it's easy to cause split / 
compaction storms as the regions all
-119 * roughly hit the same data size at the 
same time. With manual splits, you can
-120 * let staggered, time-based major 
compactions spread out your network IO load.
-121 * /ul
-122 * p
-123 * bQuestion:/b What's 
the optimal number of pre-split regions to create? br
-124 * bAnswer:/b Mileage 
will vary depending upon your application.
-125 * p
-126 * The short answer for our application 
is that we started with 10 pre-split
-127 * regions / server and watched our data 
growth over time. It's better to err on
-128 

[12/51] [partial] hbase-site git commit: Published site at .

2017-09-29 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aea328be/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.FirstOnRowColTSCell.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.FirstOnRowColTSCell.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.FirstOnRowColTSCell.html
index 26125e3..eb6a00d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.FirstOnRowColTSCell.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/CellUtil.FirstOnRowColTSCell.html
@@ -28,3193 +28,3241 @@
 020
 021import static 
org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
 022import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
-023
-024import java.io.DataOutputStream;
-025import java.io.IOException;
-026import java.io.OutputStream;
-027import java.math.BigDecimal;
-028import java.nio.ByteBuffer;
-029import java.util.ArrayList;
-030import java.util.Arrays;
-031import java.util.Iterator;
-032import java.util.List;
-033import java.util.Map.Entry;
-034import java.util.NavigableMap;
-035
-036import 
org.apache.hadoop.hbase.KeyValue.Type;
-037import 
org.apache.yetus.audience.InterfaceAudience;
-038import 
org.apache.yetus.audience.InterfaceAudience.Private;
-039import 
org.apache.hadoop.hbase.io.HeapSize;
-040import 
org.apache.hadoop.hbase.io.TagCompressionContext;
-041import 
org.apache.hadoop.hbase.io.util.Dictionary;
-042import 
org.apache.hadoop.hbase.io.util.StreamUtils;
-043import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-044import 
org.apache.hadoop.hbase.util.ByteRange;
-045import 
org.apache.hadoop.hbase.util.Bytes;
-046import 
org.apache.hadoop.hbase.util.ClassSize;
-047
-048/**
-049 * Utility methods helpful slinging 
{@link Cell} instances.
-050 * Some methods below are for internal 
use only and are marked InterfaceAudience.Private at the
-051 * method level.
-052 */
-053@InterfaceAudience.Public
-054public final class CellUtil {
-055
-056  /**
-057   * Private constructor to keep this 
class from being instantiated.
-058   */
-059  private CellUtil(){}
-060
-061  /*** ByteRange 
***/
-062
-063  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
-064return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
-065  }
-066
-067  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
-068return 
range.set(cell.getFamilyArray(), cell.getFamilyOffset(), 
cell.getFamilyLength());
-069  }
-070
-071  public static ByteRange 
fillQualifierRange(Cell cell, ByteRange range) {
-072return 
range.set(cell.getQualifierArray(), cell.getQualifierOffset(),
-073  cell.getQualifierLength());
-074  }
-075
-076  public static ByteRange 
fillValueRange(Cell cell, ByteRange range) {
-077return 
range.set(cell.getValueArray(), cell.getValueOffset(), 
cell.getValueLength());
-078  }
-079
-080  public static ByteRange 
fillTagRange(Cell cell, ByteRange range) {
-081return range.set(cell.getTagsArray(), 
cell.getTagsOffset(), cell.getTagsLength());
-082  }
-083
-084  /* get individual 
arrays for tests /
-085
-086  public static byte[] cloneRow(Cell 
cell){
-087byte[] output = new 
byte[cell.getRowLength()];
-088copyRowTo(cell, output, 0);
-089return output;
-090  }
-091
-092  public static byte[] cloneFamily(Cell 
cell){
-093byte[] output = new 
byte[cell.getFamilyLength()];
-094copyFamilyTo(cell, output, 0);
-095return output;
-096  }
-097
-098  public static byte[] 
cloneQualifier(Cell cell){
-099byte[] output = new 
byte[cell.getQualifierLength()];
-100copyQualifierTo(cell, output, 0);
-101return output;
-102  }
-103
-104  public static byte[] cloneValue(Cell 
cell){
-105byte[] output = new 
byte[cell.getValueLength()];
-106copyValueTo(cell, output, 0);
-107return output;
-108  }
-109
-110  public static byte[] cloneTags(Cell 
cell) {
-111byte[] output = new 
byte[cell.getTagsLength()];
-112copyTagTo(cell, output, 0);
-113return output;
-114  }
-115
-116  /**
-117   * Returns tag value in a new byte 
array. If server-side, use
-118   * {@link Tag#getValueArray()} with 
appropriate {@link Tag#getValueOffset()} and
-119   * {@link Tag#getValueLength()} instead 
to save on allocations.
-120   * @param cell
-121   * @return tag value in a new byte 
array.
-122   */
-123  public static byte[] getTagArray(Cell 
cell){
-124byte[] output = new 
byte[cell.getTagsLength()];
-125copyTagTo(cell, output, 0);
-126return output;
-127  }
-128
-129
-130  / copyTo 
**/
+023import static 
org.apache.hadoop.hbase.KeyValue.COLUMN_FAMILY_DELIMITER;
+024import static 
org.apache.hadoop.hbase.KeyValue.getDelimiter;
+025import static 
org.apache.hadoop.hbase.KeyValue.COLUMN_FAMILY_DELIM_ARRAY;
+026
+027import java.io.DataOutputStream;
+028import 

[12/51] [partial] hbase-site git commit: Published site at .

2017-09-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/67deb422/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index c0c201e..85a8662 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -475,7 +475,7 @@ service.
 
 
 
-static http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapHRegionInfo,ServerName
+static http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapRegionInfo,ServerName
 MetaTableAccessor.allTableRegions(Connectionconnection,
TableNametableName)
 Deprecated.
@@ -563,7 +563,7 @@ service.
 TableDescriptors.get(TableNametableName)
 
 
-static HRegionInfo
+static RegionInfo
 MetaTableAccessor.getClosestRegionInfo(Connectionconnection,
 TableNametableName,
 byte[]row)
@@ -603,14 +603,14 @@ service.
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ExecutorServiceservice)
 
 
-static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo
+static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
 MetaTableAccessor.getTableRegions(Connectionconnection,
TableNametableName)
 Gets all of the regions of the specified table.
 
 
 
-static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo
+static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionInfo
 MetaTableAccessor.getTableRegions(Connectionconnection,
TableNametableName,
booleanexcludeOfflinedSplitParents)
@@ -618,14 +618,14 @@ service.
 
 
 
-static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListPairHRegionInfo,ServerName
+static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListPairRegionInfo,ServerName
 MetaTableAccessor.getTableRegionsAndLocations(Connectionconnection,
TableNametableName)
 Do not use this method to get meta table regions, use 
methods in MetaTableLocator instead.
 
 
 
-static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListPairHRegionInfo,ServerName
+static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListPairRegionInfo,ServerName
 MetaTableAccessor.getTableRegionsAndLocations(Connectionconnection,
TableNametableName,
booleanexcludeOfflinedSplitParents)
@@ -656,7 +656,7 @@ service.
 
 
 (package private) static boolean
-MetaTableAccessor.isInsideTable(HRegionInfocurrent,
+MetaTableAccessor.isInsideTable(RegionInfocurrent,
  TableNametableName)
 
 
@@ -729,7 +729,7 @@ service.
 
 
 
-private static http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListPairHRegionInfo,ServerName
+private static http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListPairRegionInfo,ServerName
 AsyncMetaTableAccessor.getTableRegionsAndLocations(RawAsyncTablemetaTable,
http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">OptionalTableNametableName,
booleanexcludeOfflinedSplitParents)
@@ -2071,81 +2071,85 @@ service.
 
 
 private TableName
-RegionInfoBuilder.MutableRegionInfo.tableName
+RegionInfoBuilder.tableName
 
 
 private TableName
-RawAsyncTableImpl.tableName
+RegionInfoBuilder.MutableRegionInfo.tableName
 
 
 private TableName
-RegionCoprocessorRpcChannelImpl.tableName
+RawAsyncTableImpl.tableName
 
 
 private TableName

[12/51] [partial] hbase-site git commit: Published site at .

2017-09-26 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e4e1542/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
index be637e0..a5ef9d6 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
@@ -77,2686 +77,2678 @@
 069import 
org.apache.hadoop.hbase.client.RegionInfo;
 070import 
org.apache.hadoop.hbase.client.Result;
 071import 
org.apache.hadoop.hbase.client.Scan;
-072import 
org.apache.hadoop.hbase.client.Table;
-073import 
org.apache.hadoop.hbase.client.TableDescriptor;
-074import 
org.apache.hadoop.hbase.coprocessor.BulkLoadObserver;
-075import 
org.apache.hadoop.hbase.coprocessor.CoprocessorException;
-076import 
org.apache.hadoop.hbase.coprocessor.CoprocessorService;
-077import 
org.apache.hadoop.hbase.coprocessor.EndpointObserver;
-078import 
org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
-079import 
org.apache.hadoop.hbase.coprocessor.MasterObserver;
-080import 
org.apache.hadoop.hbase.coprocessor.ObserverContext;
-081import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
-082import 
org.apache.hadoop.hbase.coprocessor.RegionObserver;
-083import 
org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
-084import 
org.apache.hadoop.hbase.coprocessor.RegionServerObserver;
-085import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-086import 
org.apache.hadoop.hbase.filter.Filter;
-087import 
org.apache.hadoop.hbase.filter.FilterList;
-088import 
org.apache.hadoop.hbase.io.hfile.HFile;
-089import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-090import 
org.apache.hadoop.hbase.ipc.RpcServer;
-091import 
org.apache.hadoop.hbase.master.MasterServices;
-092import 
org.apache.hadoop.hbase.master.locking.LockProcedure;
-093import 
org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
-094import 
org.apache.hadoop.hbase.net.Address;
-095import 
org.apache.hadoop.hbase.procedure2.LockType;
-096import 
org.apache.hadoop.hbase.procedure2.Procedure;
-097import 
org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-098import 
org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-099import 
org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos;
-100import 
org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService;
-101import 
org.apache.hadoop.hbase.quotas.GlobalQuotaSettings;
-102import 
org.apache.hadoop.hbase.regionserver.InternalScanner;
-103import 
org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
-104import 
org.apache.hadoop.hbase.regionserver.Region;
-105import 
org.apache.hadoop.hbase.regionserver.RegionScanner;
-106import 
org.apache.hadoop.hbase.regionserver.ScanType;
-107import 
org.apache.hadoop.hbase.regionserver.ScannerContext;
-108import 
org.apache.hadoop.hbase.regionserver.Store;
-109import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
-110import 
org.apache.hadoop.hbase.replication.ReplicationEndpoint;
-111import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-112import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-113import 
org.apache.hadoop.hbase.security.Superusers;
-114import 
org.apache.hadoop.hbase.security.User;
-115import 
org.apache.hadoop.hbase.security.UserProvider;
-116import 
org.apache.hadoop.hbase.security.access.Permission.Action;
-117import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
-118import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet;
-119import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
-120import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-121import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker;
-122import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
-123import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry;
+072import 
org.apache.hadoop.hbase.client.SnapshotDescription;
+073import 
org.apache.hadoop.hbase.client.Table;
+074import 
org.apache.hadoop.hbase.client.TableDescriptor;
+075import 
org.apache.hadoop.hbase.coprocessor.BulkLoadObserver;
+076import 
org.apache.hadoop.hbase.coprocessor.CoprocessorException;
+077import 
org.apache.hadoop.hbase.coprocessor.CoprocessorService;
+078import 
org.apache.hadoop.hbase.coprocessor.EndpointObserver;
+079import 
org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
+080import 
org.apache.hadoop.hbase.coprocessor.MasterObserver;
+081import 

  1   2   3   >