hbase git commit: HBASE-19240 more error-prone results

2017-11-12 Thread mdrob
Repository: hbase
Updated Branches:
  refs/heads/branch-2 bc8048cf6 -> cd681f26b


HBASE-19240 more error-prone results


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cd681f26
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cd681f26
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cd681f26

Branch: refs/heads/branch-2
Commit: cd681f26bc27c2f5d6386060d456e233568044b4
Parents: bc8048c
Author: Mike Drob 
Authored: Fri Nov 10 16:32:25 2017 -0600
Committer: Mike Drob 
Committed: Sun Nov 12 19:47:38 2017 -0600

--
 .../hbase/client/TestRpcControllerFactory.java  |   8 +-
 .../hbase/ipc/IntegrationTestRpcClient.java |  10 +-
 .../hadoop/hbase/rest/model/VersionModel.java   |   5 +-
 .../hbase/rest/HBaseRESTTestingUtility.java |   3 +-
 .../hadoop/hbase/rest/TestVersionResource.java  |   8 +-
 .../hbase/rest/model/TestVersionModel.java  |   5 +
 .../org/apache/hadoop/hbase/TestZooKeeper.java  |   5 +-
 .../hadoop/hbase/client/TestFromClientSide.java |   8 +-
 .../coprocessor/TestCoprocessorInterface.java   |   4 +-
 ...erverForAddingMutationsFromCoprocessors.java |   2 +-
 .../apache/hadoop/hbase/filter/TestFilter.java  |   6 +-
 .../TestMasterOperationsForRegionReplicas.java  |   3 +-
 .../hbase/master/TestRegionPlacement.java   |   3 +-
 .../normalizer/TestSimpleRegionNormalizer.java  |   6 +-
 ...TestMasterProcedureSchedulerConcurrency.java |   2 +
 .../procedure/TestWALProcedureStoreOnHDFS.java  | 149 +--
 .../hadoop/hbase/mob/TestCachedMobFile.java |   1 +
 .../TestEndToEndSplitTransaction.java   |   4 +-
 .../hbase/regionserver/TestHRegionInfo.java |   1 +
 .../hbase/regionserver/TestKeepDeletes.java |   1 +
 .../regionserver/TestMemStoreChunkPool.java |   4 +-
 .../regionserver/TestRegionServerMetrics.java   |   4 +-
 .../regionserver/TestServerNonceManager.java|   2 +-
 .../TestStoreFileRefresherChore.java|  33 ++--
 .../replication/TestReplicationEndpoint.java|   2 +-
 .../security/access/TestTablePermissions.java   |   4 +-
 .../hbase/util/MultiThreadedUpdaterWithACL.java |   8 +-
 27 files changed, 150 insertions(+), 141 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
--
diff --git 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
index 9007f65..2f9fe2a 100644
--- 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
+++ 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
@@ -22,14 +22,10 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-
 import java.io.IOException;
 import java.util.List;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import 
org.apache.curator.shaded.com.google.common.collect.ConcurrentHashMultiset;
-import org.apache.curator.shaded.com.google.common.collect.Multiset;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.CellScannable;
 import org.apache.hadoop.hbase.CellScanner;
@@ -41,9 +37,13 @@ import 
org.apache.hadoop.hbase.coprocessor.ProtobufCoprocessorService;
 import org.apache.hadoop.hbase.ipc.DelegatingHBaseRpcController;
 import org.apache.hadoop.hbase.ipc.HBaseRpcController;
 import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ConcurrentHashMultiset;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multiset;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
+
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Rule;

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java
 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java
index f955610..27a2d85 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java
+++ 

[26/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.html
index d078639..f14fa8a 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public abstract class AbstractFSWALW
 extends WALProvider.WriterBase
+public abstract class AbstractFSWALW
 extends WALProvider.WriterBase
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements WAL
 Implementation of WAL to go against 
FileSystem; i.e. keep WALs in HDFS. Only one
@@ -431,7 +431,7 @@ implements 
 
 
-protected org.apache.htrace.Span
+protected org.apache.htrace.core.Span
 blockOnSync(SyncFuturesyncFuture)
 
 
@@ -584,8 +584,8 @@ implements 
 
 protected SyncFuture
-getSyncFuture(longsequence,
- org.apache.htrace.Spanspan)
+getSyncFuture(longsequence,
+ org.apache.htrace.core.Spanspan)
 
 
 (package private) long
@@ -742,7 +742,7 @@ implements 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -751,7 +751,7 @@ implements 
 
 DEFAULT_SLOW_SYNC_TIME_MS
-protected static finalint DEFAULT_SLOW_SYNC_TIME_MS
+protected static finalint DEFAULT_SLOW_SYNC_TIME_MS
 
 See Also:
 Constant
 Field Values
@@ -764,7 +764,7 @@ implements 
 
 DEFAULT_WAL_SYNC_TIMEOUT_MS
-private static finalint DEFAULT_WAL_SYNC_TIMEOUT_MS
+private static finalint DEFAULT_WAL_SYNC_TIMEOUT_MS
 
 See Also:
 Constant
 Field Values
@@ -777,7 +777,7 @@ implements 
 
 fs
-protected finalorg.apache.hadoop.fs.FileSystem fs
+protected finalorg.apache.hadoop.fs.FileSystem fs
 file system instance
 
 
@@ -787,7 +787,7 @@ implements 
 
 walDir
-protected finalorg.apache.hadoop.fs.Path walDir
+protected finalorg.apache.hadoop.fs.Path walDir
 WAL directory, where all WAL files would be placed.
 
 
@@ -797,7 +797,7 @@ implements 
 
 walArchiveDir
-protected finalorg.apache.hadoop.fs.Path walArchiveDir
+protected finalorg.apache.hadoop.fs.Path walArchiveDir
 dir path where old logs are kept.
 
 
@@ -807,7 +807,7 @@ implements 
 
 ourFiles
-protected finalorg.apache.hadoop.fs.PathFilter ourFiles
+protected finalorg.apache.hadoop.fs.PathFilter ourFiles
 Matches just those wal files that belong to this wal 
instance.
 
 
@@ -817,7 +817,7 @@ implements 
 
 walFilePrefix
-protected finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String walFilePrefix
+protected finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String walFilePrefix
 Prefix of a WAL file, usually the region server name it is 
hosted on.
 
 
@@ -827,7 +827,7 @@ implements 
 
 walFileSuffix
-protected finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String walFileSuffix
+protected finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String walFileSuffix
 Suffix included on generated wal file names
 
 
@@ -837,7 +837,7 @@ implements 
 
 prefixPathStr
-protected finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String prefixPathStr
+protected finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String prefixPathStr
 Prefix used when checking for wal membership.
 
 
@@ -847,7 +847,7 @@ implements 
 
 coprocessorHost
-protected finalWALCoprocessorHost coprocessorHost
+protected finalWALCoprocessorHost coprocessorHost
 
 
 
@@ -856,7 +856,7 @@ implements 
 
 conf
-protected finalorg.apache.hadoop.conf.Configuration conf
+protected finalorg.apache.hadoop.conf.Configuration conf
 conf object
 
 
@@ -866,7 +866,7 @@ implements 
 
 listeners
-protected finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWALActionsListener listeners
+protected finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListWALActionsListener listeners
 Listeners that are called on WAL events.
 
 
@@ -876,7 +876,7 @@ implements 
 
 sequenceIdAccounting
-protected finalSequenceIdAccounting sequenceIdAccounting
+protected finalSequenceIdAccounting sequenceIdAccounting
 Class that does accounting of 

[47/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.SingleServerRequestRunnable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.SingleServerRequestRunnable.html
 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.SingleServerRequestRunnable.html
index ff49ebb..9657f4f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.SingleServerRequestRunnable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.SingleServerRequestRunnable.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-final class AsyncRequestFutureImpl.SingleServerRequestRunnable
+final class AsyncRequestFutureImpl.SingleServerRequestRunnable
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
 title="class or interface in java.lang">Runnable
 Runnable (that can be submitted to thread pool) that 
submits MultiAction to a
@@ -223,7 +223,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
 
 
 multiAction
-private finalMultiAction multiAction
+private finalMultiAction multiAction
 
 
 
@@ -232,7 +232,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
 
 
 numAttempt
-private finalint numAttempt
+private finalint numAttempt
 
 
 
@@ -241,7 +241,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
 
 
 server
-private finalServerName server
+private finalServerName server
 
 
 
@@ -250,7 +250,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
 
 
 callsInProgress
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetCancellableRegionServerCallable callsInProgress
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetCancellableRegionServerCallable callsInProgress
 
 
 
@@ -267,7 +267,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
 
 
 SingleServerRequestRunnable
-SingleServerRequestRunnable(MultiActionmultiAction,
+SingleServerRequestRunnable(MultiActionmultiAction,
 intnumAttempt,
 ServerNameserver,
 http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetCancellableRegionServerCallablecallsInProgress)
@@ -287,7 +287,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.
 
 
 run
-publicvoidrun()
+publicvoidrun()
 
 Specified by:
 http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true#run--;
 title="class or interface in java.lang">runin 
interfacehttp://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
 title="class or interface in java.lang">Runnable

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
index 3980f41..897cf16 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-class AsyncRequestFutureImplCResult
+class AsyncRequestFutureImplCResult
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements AsyncRequestFuture
 The context, and return value, for a single 
submit/submitAll call.
@@ -533,7 +533,7 @@ implements 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -542,7 +542,7 @@ implements 
 
 tracker
-privateRetryingTimeTracker tracker
+privateRetryingTimeTracker tracker
 
 
 
@@ -551,7 +551,7 @@ implements 
 
 callback
-private finalBatch.CallbackCResult callback
+private finalBatch.CallbackCResult callback
 
 
 
@@ -560,7 +560,7 @@ implements 
 
 errors
-private finalBatchErrors errors
+private finalBatchErrors errors
 
 
 
@@ -569,7 +569,7 @@ implements 
 
 errorsByServer
-private finalConnectionImplementation.ServerErrorTracker 
errorsByServer
+private finalConnectionImplementation.ServerErrorTracker 
errorsByServer
 
 
 
@@ -578,7 +578,7 @@ implements 
 
 pool
-private 

[11/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html
index a89df18..ea0bc8c 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html
@@ -49,26 +49,26 @@
 041import 
org.apache.hadoop.hbase.ByteBufferKeyValue;
 042import 
org.apache.hadoop.hbase.SizeCachedKeyValue;
 043import 
org.apache.hadoop.hbase.SizeCachedNoTagsKeyValue;
-044import 
org.apache.yetus.audience.InterfaceAudience;
-045import 
org.apache.hadoop.hbase.fs.HFileSystem;
-046import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
-047import 
org.apache.hadoop.hbase.io.compress.Compression;
-048import 
org.apache.hadoop.hbase.io.crypto.Cipher;
-049import 
org.apache.hadoop.hbase.io.crypto.Encryption;
-050import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
-051import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-052import 
org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
-053import 
org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
-054import 
org.apache.hadoop.hbase.nio.ByteBuff;
-055import 
org.apache.hadoop.hbase.regionserver.KeyValueScanner;
-056import 
org.apache.hadoop.hbase.security.EncryptionUtil;
-057import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-058import 
org.apache.hadoop.hbase.util.Bytes;
-059import 
org.apache.hadoop.hbase.util.IdLock;
-060import 
org.apache.hadoop.hbase.util.ObjectIntPair;
-061import 
org.apache.hadoop.io.WritableUtils;
-062import org.apache.htrace.Trace;
-063import org.apache.htrace.TraceScope;
+044import 
org.apache.hadoop.hbase.trace.TraceUtil;
+045import 
org.apache.yetus.audience.InterfaceAudience;
+046import 
org.apache.hadoop.hbase.fs.HFileSystem;
+047import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
+048import 
org.apache.hadoop.hbase.io.compress.Compression;
+049import 
org.apache.hadoop.hbase.io.crypto.Cipher;
+050import 
org.apache.hadoop.hbase.io.crypto.Encryption;
+051import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
+052import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+053import 
org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
+054import 
org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
+055import 
org.apache.hadoop.hbase.nio.ByteBuff;
+056import 
org.apache.hadoop.hbase.regionserver.KeyValueScanner;
+057import 
org.apache.hadoop.hbase.security.EncryptionUtil;
+058import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+059import 
org.apache.hadoop.hbase.util.Bytes;
+060import 
org.apache.hadoop.hbase.util.IdLock;
+061import 
org.apache.hadoop.hbase.util.ObjectIntPair;
+062import 
org.apache.hadoop.io.WritableUtils;
+063import 
org.apache.htrace.core.TraceScope;
 064
 065import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 066
@@ -263,1235 +263,1235 @@
 255// Prefetch file blocks upon open if 
requested
 256if (cacheConf.shouldPrefetchOnOpen()) 
{
 257  PrefetchExecutor.request(path, new 
Runnable() {
-258public void run() {
-259  long offset = 0;
-260  long end = 0;
-261  try {
-262end = 
getTrailer().getLoadOnOpenDataOffset();
-263if (LOG.isTraceEnabled()) {
-264  LOG.trace("Prefetch start " 
+ getPathOffsetEndStr(path, offset, end));
-265}
-266// TODO: Could we use block 
iterator in here? Would that get stuff into the cache?
-267HFileBlock prevBlock = 
null;
-268while (offset  end) {
-269  if (Thread.interrupted()) 
{
-270break;
-271  }
-272  // Perhaps we got our block 
from cache? Unlikely as this may be, if it happens, then
-273  // the 
internal-to-hfileblock thread local which holds the overread that gets the
-274  // next header, will not 
have happened...so, pass in the onDiskSize gotten from the
-275  // cached block. This 
'optimization' triggers extremely rarely I'd say.
-276  long onDiskSize = prevBlock 
!= null? prevBlock.getNextBlockOnDiskSize(): -1;
-277  HFileBlock block = 
readBlock(offset, onDiskSize, true, false, false, false,
-278  null, null);
-279  // Need not update the 
current block. Ideally here the readBlock won't find the
-280  // block in cache. We call 
this readBlock so that block data is read from FS and
-281  // cached in BC. So there 
is no reference count increment that happens here.
-282  // The return will ideally 
be a noop because the block is not of MemoryType SHARED.
-283  returnBlock(block);

[02/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/ipc/ServerRpcConnection.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/ServerRpcConnection.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/ServerRpcConnection.html
index ffb4e9a..b7013ce 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/ServerRpcConnection.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/ServerRpcConnection.html
@@ -85,762 +85,758 @@
 077import 
org.apache.hadoop.security.authorize.ProxyUsers;
 078import 
org.apache.hadoop.security.token.SecretManager.InvalidToken;
 079import 
org.apache.hadoop.security.token.TokenIdentifier;
-080import org.apache.htrace.TraceInfo;
-081
-082/** Reads calls from a connection and 
queues them for handling. */
-083@edu.umd.cs.findbugs.annotations.SuppressWarnings(
-084value="VO_VOLATILE_INCREMENT",
-085justification="False positive 
according to http://sourceforge.net/p/findbugs/bugs/1032/;)
-086@InterfaceAudience.Private
-087abstract class ServerRpcConnection 
implements Closeable {
-088  /**  */
-089  protected final RpcServer rpcServer;
-090  // If the connection header has been 
read or not.
-091  protected boolean connectionHeaderRead 
= false;
-092
-093  protected CallCleanup callCleanup;
-094
-095  // Cache the remote host  port 
info so that even if the socket is
-096  // disconnected, we can say where it 
used to connect to.
-097  protected String hostAddress;
-098  protected int remotePort;
-099  protected InetAddress addr;
-100  protected ConnectionHeader 
connectionHeader;
-101
-102  /**
-103   * Codec the client asked use.
-104   */
-105  protected Codec codec;
-106  /**
-107   * Compression codec the client asked 
us use.
-108   */
-109  protected CompressionCodec 
compressionCodec;
-110  protected BlockingService service;
-111
-112  protected AuthMethod authMethod;
-113  protected boolean 
saslContextEstablished;
-114  protected boolean 
skipInitialSaslHandshake;
-115  private ByteBuffer unwrappedData;
-116  // When is this set? FindBugs wants to 
know! Says NP
-117  private ByteBuffer 
unwrappedDataLengthBuffer = ByteBuffer.allocate(4);
-118  protected boolean useSasl;
-119  protected HBaseSaslRpcServer 
saslServer;
-120  protected CryptoAES cryptoAES;
-121  protected boolean useWrap = false;
-122  protected boolean useCryptoAesWrap = 
false;
-123
-124  // was authentication allowed with a 
fallback to simple auth
-125  protected boolean 
authenticatedWithFallback;
-126
-127  protected boolean 
retryImmediatelySupported = false;
-128
-129  protected User user = null;
-130  protected UserGroupInformation ugi = 
null;
-131
-132  public ServerRpcConnection(RpcServer 
rpcServer) {
-133this.rpcServer = rpcServer;
-134this.callCleanup = null;
-135  }
-136
-137  @Override
-138  public String toString() {
-139return getHostAddress() + ":" + 
remotePort;
-140  }
-141
-142  public String getHostAddress() {
-143return hostAddress;
-144  }
-145
-146  public InetAddress getHostInetAddress() 
{
-147return addr;
-148  }
-149
-150  public int getRemotePort() {
-151return remotePort;
-152  }
-153
-154  public VersionInfo getVersionInfo() {
-155if 
(connectionHeader.hasVersionInfo()) {
-156  return 
connectionHeader.getVersionInfo();
-157}
-158return null;
-159  }
-160
-161  private String 
getFatalConnectionString(final int version, final byte authByte) {
-162return "serverVersion=" + 
RpcServer.CURRENT_VERSION +
-163", clientVersion=" + version + ", 
authMethod=" + authByte +
-164", authSupported=" + (authMethod != 
null) + " from " + toString();
-165  }
-166
-167  private UserGroupInformation 
getAuthorizedUgi(String authorizedId)
-168  throws IOException {
-169UserGroupInformation authorizedUgi;
-170if (authMethod == AuthMethod.DIGEST) 
{
-171  TokenIdentifier tokenId = 
HBaseSaslRpcServer.getIdentifier(authorizedId,
-172  
this.rpcServer.secretManager);
-173  authorizedUgi = 
tokenId.getUser();
-174  if (authorizedUgi == null) {
-175throw new 
AccessDeniedException(
-176"Can't retrieve username from 
tokenIdentifier.");
-177  }
-178  
authorizedUgi.addTokenIdentifier(tokenId);
-179} else {
-180  authorizedUgi = 
UserGroupInformation.createRemoteUser(authorizedId);
-181}
-182
authorizedUgi.setAuthenticationMethod(authMethod.authenticationMethod.getAuthMethod());
-183return authorizedUgi;
-184  }
-185
-186  /**
-187   * Set up cell block codecs
-188   * @throws FatalConnectionException
-189   */
-190  private void setupCellBlockCodecs(final 
ConnectionHeader header)
-191  throws FatalConnectionException {
-192// TODO: Plug in other supported 
decoders.
-193if (!header.hasCellBlockCodecClass()) 
return;
-194String className = 
header.getCellBlockCodecClass();
-195if (className 

[33/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
index d1b724b..6842d77 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":9,"i43":9,"i44":9,"i45":9,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":9,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":9,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":9,"i106":10,"i107":10,"i108":10,"i109":10
 
,"i110":10,"i111":10,"i112":41,"i113":41,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":9,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":10,"i145":42,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":10,"i152":10,"i153":10,"i154":10,"i155":10,"i156":10,"i157":10,"i158":10,"i159":10,"i160":10,"i161":10,"i162":10,"i163":10,"i164":10,"i165":10,"i166":10,"i167":9,"i168":10,"i169":10,"i170":10,"i171":10,"i172":10,"i173":10,"i174":10,"i175":10,"i176":10,"i177":9,"i178":10,"i179":10,"i180":9,"i181":9,"i182":9,"i183":9,"i184":9,"i185":9,"i186":9,"i187":9,"i188":9,"i189":10,"i190":10,"i191":10,"i192":10,"i193":10,"i194":10,"i195":10,"i196":10,"i197":10,"i198":9,"i199":10,"i200":10,"i201":10,"i202":10,"i203":10,"i204":10,"i205":10,"i206":10,"i207":10,"i208":10,"i209":10,"i210":10
 
,"i211":10,"i212":10,"i213":10,"i214":10,"i215":10,"i216":10,"i217":10,"i218":10,"i219":10,"i220":10,"i221":10,"i222":10,"i223":10,"i224":10,"i225":10,"i226":10,"i227":10,"i228":10,"i229":9,"i230":9,"i231":10,"i232":10,"i233":10,"i234":10,"i235":10,"i236":10,"i237":10,"i238":10,"i239":10,"i240":10,"i241":10,"i242":9,"i243":10,"i244":10,"i245":10,"i246":10,"i247":10,"i248":10,"i249":10,"i250":10,"i251":10,"i252":10,"i253":10,"i254":10,"i255":10,"i256":10,"i257":9,"i258":10,"i259":10,"i260":10,"i261":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":9,"i43":9,"i44":9,"i45":9,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":9,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":9,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":9,"i106":10,"i107":10,"i108":10,"i109":10
 

[04/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/ipc/ServerCall.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/ServerCall.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/ServerCall.html
index e81ef65..64f3e35 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/ServerCall.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/ServerCall.html
@@ -53,477 +53,469 @@
 045import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
 046import 
org.apache.hadoop.hbase.util.Bytes;
 047import 
org.apache.hadoop.util.StringUtils;
-048import org.apache.htrace.TraceInfo;
-049
-050/**
-051 * Datastructure that holds all necessary 
to a method invocation and then afterward, carries
-052 * the result.
-053 */
-054@InterfaceAudience.Private
-055abstract class ServerCallT extends 
ServerRpcConnection implements RpcCall, RpcResponse {
-056
-057  protected final int id; 
// the client's call id
-058  protected final BlockingService 
service;
-059  protected final MethodDescriptor md;
-060  protected final RequestHeader header;
-061  protected Message param;
  // the parameter passed
-062  // Optional cell data passed outside of 
protobufs.
-063  protected final CellScanner 
cellScanner;
-064  protected final T connection;   
   // connection to client
-065  protected final long receiveTime;  
// the time received when response is null
-066 // the 
time served when response is not null
-067  protected final int timeout;
-068  protected long startTime;
-069  protected final long deadline;// the 
deadline to handle this call, if exceed we can drop it.
-070
-071  protected final ByteBufferPool 
reservoir;
-072
-073  protected final CellBlockBuilder 
cellBlockBuilder;
-074
-075  /**
-076   * Chain of buffers to send as 
response.
-077   */
-078  protected BufferChain response;
-079
-080  protected final long size;  
// size of current call
-081  protected boolean isError;
-082  protected final TraceInfo tinfo;
-083  protected ByteBufferListOutputStream 
cellBlockStream = null;
-084  protected CallCleanup reqCleanup = 
null;
-085
-086  protected final User user;
-087  protected final InetAddress 
remoteAddress;
-088  protected RpcCallback rpcCallback;
-089
-090  private long responseCellSize = 0;
-091  private long responseBlockSize = 0;
-092  // cumulative size of serialized 
exceptions
-093  private long exceptionSize = 0;
-094  private final boolean 
retryImmediatelySupported;
-095
-096  
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="NP_NULL_ON_SOME_PATH",
-097  justification="Can't figure why 
this complaint is happening... see below")
-098  ServerCall(int id, BlockingService 
service, MethodDescriptor md, RequestHeader header,
-099  Message param, CellScanner 
cellScanner, T connection, long size, TraceInfo tinfo,
-100  InetAddress remoteAddress, long 
receiveTime, int timeout, ByteBufferPool reservoir,
-101  CellBlockBuilder cellBlockBuilder, 
CallCleanup reqCleanup) {
-102this.id = id;
-103this.service = service;
-104this.md = md;
-105this.header = header;
-106this.param = param;
-107this.cellScanner = cellScanner;
-108this.connection = connection;
-109this.receiveTime = receiveTime;
-110this.response = null;
-111this.isError = false;
-112this.size = size;
-113this.tinfo = tinfo;
-114if (connection != null) {
-115  this.user =  connection.user;
-116  this.retryImmediatelySupported = 
connection.retryImmediatelySupported;
-117} else {
-118  this.user = null;
-119  this.retryImmediatelySupported = 
false;
-120}
-121this.remoteAddress = remoteAddress;
-122this.timeout = timeout;
-123this.deadline = this.timeout  0 ? 
this.receiveTime + this.timeout : Long.MAX_VALUE;
-124this.reservoir = reservoir;
-125this.cellBlockBuilder = 
cellBlockBuilder;
-126this.reqCleanup = reqCleanup;
-127  }
-128
-129  /**
-130   * Call is done. Execution happened and 
we returned results to client. It is
-131   * now safe to cleanup.
-132   */
-133  
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value = 
"IS2_INCONSISTENT_SYNC",
-134  justification = "Presume the lock 
on processing request held by caller is protection enough")
-135  @Override
-136  public void done() {
-137if (this.cellBlockStream != null) {
-138  // This will return back the BBs 
which we got from pool.
-139  
this.cellBlockStream.releaseResources();
-140  this.cellBlockStream = null;
-141}
-142// If the call was run successfuly, 
we might have already returned the BB
-143// back to pool. No worries..Then 
inputCellBlock will be null
-144cleanup();
-145  }
-146
-147  @Override
-148  public void cleanup() {
-149if (this.reqCleanup != null) 

[22/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.html 
b/devapidocs/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.html
index 35b4ae4..2c37b3f 100644
--- a/devapidocs/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.html
+++ b/devapidocs/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.html
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class RecoverableZooKeeper
+public class RecoverableZooKeeper
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 A zookeeper that can handle 'recoverable' errors.
  To handle recoverable errors, developers need to realize that there are two
@@ -432,7 +432,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -441,7 +441,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 zk
-privateorg.apache.zookeeper.ZooKeeper zk
+privateorg.apache.zookeeper.ZooKeeper zk
 
 
 
@@ -450,7 +450,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 retryCounterFactory
-private finalRetryCounterFactory retryCounterFactory
+private finalRetryCounterFactory retryCounterFactory
 
 
 
@@ -459,7 +459,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 identifier
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String identifier
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String identifier
 
 
 
@@ -468,7 +468,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 id
-private finalbyte[] id
+private finalbyte[] id
 
 
 
@@ -477,7 +477,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 watcher
-privateorg.apache.zookeeper.Watcher watcher
+privateorg.apache.zookeeper.Watcher watcher
 
 
 
@@ -486,7 +486,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 sessionTimeout
-privateint sessionTimeout
+privateint sessionTimeout
 
 
 
@@ -495,7 +495,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 quorumServers
-privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String quorumServers
+privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String quorumServers
 
 
 
@@ -504,7 +504,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 metrics
-private finalZooKeeperMetricsListener metrics
+private finalZooKeeperMetricsListener metrics
 
 
 
@@ -521,7 +521,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 RecoverableZooKeeper
-publicRecoverableZooKeeper(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringquorumServers,
+publicRecoverableZooKeeper(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringquorumServers,
 intsessionTimeout,
 org.apache.zookeeper.Watcherwatcher,
 intmaxRetries,
@@ -540,7 +540,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 RecoverableZooKeeper
-publicRecoverableZooKeeper(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringquorumServers,
+publicRecoverableZooKeeper(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringquorumServers,
 intsessionTimeout,
 org.apache.zookeeper.Watcherwatcher,
 intmaxRetries,
@@ -568,7 +568,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 checkZk
-protectedorg.apache.zookeeper.ZooKeepercheckZk()
+protectedorg.apache.zookeeper.ZooKeepercheckZk()
   throws 
org.apache.zookeeper.KeeperException
 Try to create a ZooKeeper connection. Turns any exception 
encountered into a
  KeeperException.OperationTimeoutException so it can retried.
@@ -586,7 +586,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 reconnectAfterExpiration

[46/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
index 5d70970..1511504 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
@@ -182,14 +182,14 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.filter.FilterList.Operator
-org.apache.hadoop.hbase.filter.FuzzyRowFilter.SatisfiesCode
-org.apache.hadoop.hbase.filter.Filter.ReturnCode
-org.apache.hadoop.hbase.filter.FilterWrapper.FilterRowRetCode
 org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
-org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
 org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
+org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
 org.apache.hadoop.hbase.filter.FuzzyRowFilter.Order
+org.apache.hadoop.hbase.filter.FilterWrapper.FilterRowRetCode
+org.apache.hadoop.hbase.filter.FuzzyRowFilter.SatisfiesCode
+org.apache.hadoop.hbase.filter.Filter.ReturnCode
+org.apache.hadoop.hbase.filter.FilterList.Operator
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/io/class-use/ByteBufferPool.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ByteBufferPool.html 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ByteBufferPool.html
index e4ac3fe..5ab7046 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/class-use/ByteBufferPool.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/class-use/ByteBufferPool.html
@@ -185,7 +185,7 @@
 
 
 
-NettyServerCall(intid,
+NettyServerCall(intid,

org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingServiceservice,

org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptormd,

org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeaderheader,
@@ -193,7 +193,6 @@
CellScannercellScanner,
NettyServerRpcConnectionconnection,
longsize,
-   org.apache.htrace.TraceInfotinfo,
http://docs.oracle.com/javase/8/docs/api/java/net/InetAddress.html?is-external=true;
 title="class or interface in java.net">InetAddressremoteAddress,
longreceiveTime,
inttimeout,
@@ -202,7 +201,7 @@
RpcServer.CallCleanupreqCleanup)
 
 
-ServerCall(intid,
+ServerCall(intid,
   
org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingServiceservice,
   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptormd,
   
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeaderheader,
@@ -210,7 +209,6 @@
   CellScannercellScanner,
   Tconnection,
   longsize,
-  org.apache.htrace.TraceInfotinfo,
   http://docs.oracle.com/javase/8/docs/api/java/net/InetAddress.html?is-external=true;
 title="class or interface in java.net">InetAddressremoteAddress,
   longreceiveTime,
   inttimeout,
@@ -219,7 +217,7 @@
   RpcServer.CallCleanupreqCleanup)
 
 
-SimpleServerCall(intid,
+SimpleServerCall(intid,
 
org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingServiceservice,
 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptormd,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeaderheader,
@@ -227,7 +225,6 @@
 CellScannercellScanner,
 SimpleServerRpcConnectionconnection,
 longsize,
-org.apache.htrace.TraceInfotinfo,
 http://docs.oracle.com/javase/8/docs/api/java/net/InetAddress.html?is-external=true;
 title="class or interface in java.net">InetAddressremoteAddress,
 longreceiveTime,
 inttimeout,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.BlockIndexNotLoadedException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.BlockIndexNotLoadedException.html
 

[14/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html
index a89df18..ea0bc8c 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html
@@ -49,26 +49,26 @@
 041import 
org.apache.hadoop.hbase.ByteBufferKeyValue;
 042import 
org.apache.hadoop.hbase.SizeCachedKeyValue;
 043import 
org.apache.hadoop.hbase.SizeCachedNoTagsKeyValue;
-044import 
org.apache.yetus.audience.InterfaceAudience;
-045import 
org.apache.hadoop.hbase.fs.HFileSystem;
-046import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
-047import 
org.apache.hadoop.hbase.io.compress.Compression;
-048import 
org.apache.hadoop.hbase.io.crypto.Cipher;
-049import 
org.apache.hadoop.hbase.io.crypto.Encryption;
-050import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
-051import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-052import 
org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
-053import 
org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
-054import 
org.apache.hadoop.hbase.nio.ByteBuff;
-055import 
org.apache.hadoop.hbase.regionserver.KeyValueScanner;
-056import 
org.apache.hadoop.hbase.security.EncryptionUtil;
-057import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-058import 
org.apache.hadoop.hbase.util.Bytes;
-059import 
org.apache.hadoop.hbase.util.IdLock;
-060import 
org.apache.hadoop.hbase.util.ObjectIntPair;
-061import 
org.apache.hadoop.io.WritableUtils;
-062import org.apache.htrace.Trace;
-063import org.apache.htrace.TraceScope;
+044import 
org.apache.hadoop.hbase.trace.TraceUtil;
+045import 
org.apache.yetus.audience.InterfaceAudience;
+046import 
org.apache.hadoop.hbase.fs.HFileSystem;
+047import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
+048import 
org.apache.hadoop.hbase.io.compress.Compression;
+049import 
org.apache.hadoop.hbase.io.crypto.Cipher;
+050import 
org.apache.hadoop.hbase.io.crypto.Encryption;
+051import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
+052import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+053import 
org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
+054import 
org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
+055import 
org.apache.hadoop.hbase.nio.ByteBuff;
+056import 
org.apache.hadoop.hbase.regionserver.KeyValueScanner;
+057import 
org.apache.hadoop.hbase.security.EncryptionUtil;
+058import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+059import 
org.apache.hadoop.hbase.util.Bytes;
+060import 
org.apache.hadoop.hbase.util.IdLock;
+061import 
org.apache.hadoop.hbase.util.ObjectIntPair;
+062import 
org.apache.hadoop.io.WritableUtils;
+063import 
org.apache.htrace.core.TraceScope;
 064
 065import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 066
@@ -263,1235 +263,1235 @@
 255// Prefetch file blocks upon open if 
requested
 256if (cacheConf.shouldPrefetchOnOpen()) 
{
 257  PrefetchExecutor.request(path, new 
Runnable() {
-258public void run() {
-259  long offset = 0;
-260  long end = 0;
-261  try {
-262end = 
getTrailer().getLoadOnOpenDataOffset();
-263if (LOG.isTraceEnabled()) {
-264  LOG.trace("Prefetch start " 
+ getPathOffsetEndStr(path, offset, end));
-265}
-266// TODO: Could we use block 
iterator in here? Would that get stuff into the cache?
-267HFileBlock prevBlock = 
null;
-268while (offset  end) {
-269  if (Thread.interrupted()) 
{
-270break;
-271  }
-272  // Perhaps we got our block 
from cache? Unlikely as this may be, if it happens, then
-273  // the 
internal-to-hfileblock thread local which holds the overread that gets the
-274  // next header, will not 
have happened...so, pass in the onDiskSize gotten from the
-275  // cached block. This 
'optimization' triggers extremely rarely I'd say.
-276  long onDiskSize = prevBlock 
!= null? prevBlock.getNextBlockOnDiskSize(): -1;
-277  HFileBlock block = 
readBlock(offset, onDiskSize, true, false, false, false,
-278  null, null);
-279  // Need not update the 
current block. Ideally here the readBlock won't find the
-280  // block in cache. We call 
this readBlock so that block data is read from FS and
-281  // cached in BC. So there 
is no reference count increment that happens here.
-282  // The return will ideally 
be a noop because 

[28/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
index 4a9326a..2263026 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
@@ -277,58 +277,54 @@ implements requestCount
 
 
-(package private) http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">LongAdder
-requestRowActionCount
-
-
 private int
 rowSizeWarnThreshold
 Row size threshold for multi requests above which a warning 
is logged
 
 
-
+
 (package private) http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">LongAdder
 rpcGetRequestCount
 
-
+
 (package private) http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">LongAdder
 rpcMultiRequestCount
 
-
+
 (package private) http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">LongAdder
 rpcMutateRequestCount
 
-
+
 (package private) http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">LongAdder
 rpcScanRequestCount
 
-
+
 (package private) RpcServerInterface
 rpcServer
 
-
+
 private int
 rpcTimeout
 The RPC timeout period (milliseconds)
 
 
-
+
 private static http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 SCANNER_ALREADY_CLOSED
 Deprecated.
 
 
-
+
 private ScannerIdGenerator
 scannerIdGenerator
 
-
+
 private int
 scannerLeaseTimeoutPeriod
 The lease timeout period for client scanners 
(milliseconds).
 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true;
 title="class or interface in java.util.concurrent">ConcurrentMaphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,RSRpcServices.RegionScannerHolder
 scanners
 
@@ -999,22 +995,13 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">LongAdder requestCount
 
 
-
-
-
-
-
-requestRowActionCount
-finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">LongAdder requestRowActionCount
-
-
 
 
 
 
 
 rpcGetRequestCount
-finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">LongAdder rpcGetRequestCount
+finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">LongAdder rpcGetRequestCount
 
 
 
@@ -1023,7 +1010,7 @@ implements 
 
 rpcScanRequestCount
-finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">LongAdder rpcScanRequestCount
+finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">LongAdder rpcScanRequestCount
 
 
 
@@ -1032,7 +1019,7 @@ implements 
 
 rpcMultiRequestCount
-finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">LongAdder rpcMultiRequestCount
+finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">LongAdder rpcMultiRequestCount
 
 
 
@@ -1041,7 +1028,7 @@ implements 
 
 rpcMutateRequestCount
-finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">LongAdder rpcMutateRequestCount
+finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">LongAdder rpcMutateRequestCount
 
 
 
@@ -1050,7 +1037,7 @@ implements 
 
 rpcServer
-finalRpcServerInterface rpcServer
+finalRpcServerInterface 

[09/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.html
index b9ac0db..eb74b5b 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.html
@@ -6,302 +6,303 @@
 
 
 
-001/**
-002 * Copyright The Apache Software 
Foundation
-003 *
-004 * Licensed to the Apache Software 
Foundation (ASF) under one or more
-005 * contributor license agreements. See 
the NOTICE file distributed with this
-006 * work for additional information 
regarding copyright ownership. The ASF
-007 * licenses this file to you under the 
Apache License, Version 2.0 (the
-008 * "License"); you may not use this file 
except in compliance with the License.
-009 * You may obtain a copy of the License 
at
-010 *
-011 * 
http://www.apache.org/licenses/LICENSE-2.0
-012 *
-013 * Unless required by applicable law or 
agreed to in writing, software
-014 * distributed under the License is 
distributed on an "AS IS" BASIS, WITHOUT
-015 * WARRANTIES OR CONDITIONS OF ANY KIND, 
either express or implied. See the
-016 * License for the specific language 
governing permissions and limitations
-017 * under the License.
-018 */
-019
-020package 
org.apache.hadoop.hbase.io.hfile;
-021
-022import net.spy.memcached.CachedData;
-023import 
net.spy.memcached.ConnectionFactoryBuilder;
-024import net.spy.memcached.FailureMode;
-025import 
net.spy.memcached.MemcachedClient;
-026import 
net.spy.memcached.transcoders.Transcoder;
-027import org.apache.commons.logging.Log;
-028import 
org.apache.commons.logging.LogFactory;
-029import 
org.apache.hadoop.conf.Configuration;
-030import 
org.apache.hadoop.hbase.HConstants;
-031import 
org.apache.yetus.audience.InterfaceAudience;
-032import 
org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;
-033import 
org.apache.hadoop.hbase.nio.ByteBuff;
-034import 
org.apache.hadoop.hbase.nio.SingleByteBuff;
-035import 
org.apache.hadoop.hbase.util.Addressing;
-036import org.apache.htrace.Trace;
-037import org.apache.htrace.TraceScope;
-038
-039
-040import java.io.IOException;
-041import java.net.InetSocketAddress;
-042import java.nio.ByteBuffer;
-043import java.util.ArrayList;
-044import java.util.Iterator;
-045import java.util.List;
-046import 
java.util.NoSuchElementException;
-047import 
java.util.concurrent.ExecutionException;
-048
-049/**
-050 * Class to store blocks into 
memcached.
-051 * This should only be used on a cluster 
of Memcached daemons that are tuned well and have a
-052 * good network connection to the HBase 
regionservers. Any other use will likely slow down HBase
-053 * greatly.
-054 */
-055@InterfaceAudience.Private
-056public class MemcachedBlockCache 
implements BlockCache {
-057  private static final Log LOG = 
LogFactory.getLog(MemcachedBlockCache.class.getName());
-058
-059  // Some memcache versions won't take 
more than 1024 * 1024. So set the limit below
-060  // that just in case this client is 
used with those versions.
-061  public static final int MAX_SIZE = 1020 
* 1024;
-062
-063  // Config key for what memcached 
servers to use.
-064  // They should be specified in a comma 
sperated list with ports.
-065  // like:
-066  //
-067  // host1:11211,host3:8080,host4:11211
-068  public static final String 
MEMCACHED_CONFIG_KEY = "hbase.cache.memcached.servers";
-069  public static final String 
MEMCACHED_TIMEOUT_KEY = "hbase.cache.memcached.timeout";
-070  public static final String 
MEMCACHED_OPTIMEOUT_KEY = "hbase.cache.memcached.optimeout";
-071  public static final String 
MEMCACHED_OPTIMIZE_KEY = "hbase.cache.memcached.spy.optimze";
-072  public static final long 
MEMCACHED_DEFAULT_TIMEOUT = 500;
-073  public static final boolean 
MEMCACHED_OPTIMIZE_DEFAULT = false;
-074
-075  private final MemcachedClient client;
-076  private final HFileBlockTranscoder tc = 
new HFileBlockTranscoder();
-077  private final CacheStats cacheStats = 
new CacheStats("MemcachedBlockCache");
-078
-079  public 
MemcachedBlockCache(Configuration c) throws IOException {
-080LOG.info("Creating 
MemcachedBlockCache");
-081
-082long opTimeout = 
c.getLong(MEMCACHED_OPTIMEOUT_KEY, MEMCACHED_DEFAULT_TIMEOUT);
-083long queueTimeout = 
c.getLong(MEMCACHED_TIMEOUT_KEY, opTimeout + MEMCACHED_DEFAULT_TIMEOUT);
-084boolean optimize = 
c.getBoolean(MEMCACHED_OPTIMIZE_KEY, MEMCACHED_OPTIMIZE_DEFAULT);
-085
-086ConnectionFactoryBuilder builder = 
new ConnectionFactoryBuilder()
-087.setOpTimeout(opTimeout)
-088
.setOpQueueMaxBlockTime(queueTimeout) // Cap the max time before anything times 
out
-089
.setFailureMode(FailureMode.Redistribute)
-090

[13/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.html
index a89df18..ea0bc8c 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.html
@@ -49,26 +49,26 @@
 041import 
org.apache.hadoop.hbase.ByteBufferKeyValue;
 042import 
org.apache.hadoop.hbase.SizeCachedKeyValue;
 043import 
org.apache.hadoop.hbase.SizeCachedNoTagsKeyValue;
-044import 
org.apache.yetus.audience.InterfaceAudience;
-045import 
org.apache.hadoop.hbase.fs.HFileSystem;
-046import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
-047import 
org.apache.hadoop.hbase.io.compress.Compression;
-048import 
org.apache.hadoop.hbase.io.crypto.Cipher;
-049import 
org.apache.hadoop.hbase.io.crypto.Encryption;
-050import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
-051import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-052import 
org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
-053import 
org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
-054import 
org.apache.hadoop.hbase.nio.ByteBuff;
-055import 
org.apache.hadoop.hbase.regionserver.KeyValueScanner;
-056import 
org.apache.hadoop.hbase.security.EncryptionUtil;
-057import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-058import 
org.apache.hadoop.hbase.util.Bytes;
-059import 
org.apache.hadoop.hbase.util.IdLock;
-060import 
org.apache.hadoop.hbase.util.ObjectIntPair;
-061import 
org.apache.hadoop.io.WritableUtils;
-062import org.apache.htrace.Trace;
-063import org.apache.htrace.TraceScope;
+044import 
org.apache.hadoop.hbase.trace.TraceUtil;
+045import 
org.apache.yetus.audience.InterfaceAudience;
+046import 
org.apache.hadoop.hbase.fs.HFileSystem;
+047import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
+048import 
org.apache.hadoop.hbase.io.compress.Compression;
+049import 
org.apache.hadoop.hbase.io.crypto.Cipher;
+050import 
org.apache.hadoop.hbase.io.crypto.Encryption;
+051import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
+052import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+053import 
org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
+054import 
org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
+055import 
org.apache.hadoop.hbase.nio.ByteBuff;
+056import 
org.apache.hadoop.hbase.regionserver.KeyValueScanner;
+057import 
org.apache.hadoop.hbase.security.EncryptionUtil;
+058import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+059import 
org.apache.hadoop.hbase.util.Bytes;
+060import 
org.apache.hadoop.hbase.util.IdLock;
+061import 
org.apache.hadoop.hbase.util.ObjectIntPair;
+062import 
org.apache.hadoop.io.WritableUtils;
+063import 
org.apache.htrace.core.TraceScope;
 064
 065import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 066
@@ -263,1235 +263,1235 @@
 255// Prefetch file blocks upon open if 
requested
 256if (cacheConf.shouldPrefetchOnOpen()) 
{
 257  PrefetchExecutor.request(path, new 
Runnable() {
-258public void run() {
-259  long offset = 0;
-260  long end = 0;
-261  try {
-262end = 
getTrailer().getLoadOnOpenDataOffset();
-263if (LOG.isTraceEnabled()) {
-264  LOG.trace("Prefetch start " 
+ getPathOffsetEndStr(path, offset, end));
-265}
-266// TODO: Could we use block 
iterator in here? Would that get stuff into the cache?
-267HFileBlock prevBlock = 
null;
-268while (offset  end) {
-269  if (Thread.interrupted()) 
{
-270break;
-271  }
-272  // Perhaps we got our block 
from cache? Unlikely as this may be, if it happens, then
-273  // the 
internal-to-hfileblock thread local which holds the overread that gets the
-274  // next header, will not 
have happened...so, pass in the onDiskSize gotten from the
-275  // cached block. This 
'optimization' triggers extremely rarely I'd say.
-276  long onDiskSize = prevBlock 
!= null? prevBlock.getNextBlockOnDiskSize(): -1;
-277  HFileBlock block = 
readBlock(offset, onDiskSize, true, false, false, false,
-278  null, null);
-279  // Need not update the 
current block. Ideally here the readBlock won't find the
-280  // block in cache. We call 
this readBlock so that block data is read from FS and
-281  // cached in BC. So there 
is no reference count increment that happens here.
-282  // The return will ideally 
be a noop 

[32/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
index 19d195f..45b21a9 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class HRegionServer.CompactionChecker
+private static class HRegionServer.CompactionChecker
 extends ScheduledChore
 
 
@@ -233,7 +233,7 @@ extends 
 
 instance
-private finalHRegionServer instance
+private finalHRegionServer instance
 
 
 
@@ -242,7 +242,7 @@ extends 
 
 majorCompactPriority
-private finalint majorCompactPriority
+private finalint majorCompactPriority
 
 
 
@@ -251,7 +251,7 @@ extends 
 
 DEFAULT_PRIORITY
-private static finalint DEFAULT_PRIORITY
+private static finalint DEFAULT_PRIORITY
 
 See Also:
 Constant
 Field Values
@@ -264,7 +264,7 @@ extends 
 
 iteration
-privatelong iteration
+privatelong iteration
 
 
 
@@ -281,7 +281,7 @@ extends 
 
 CompactionChecker
-CompactionChecker(HRegionServerh,
+CompactionChecker(HRegionServerh,
   intsleepTime,
   Stoppablestopper)
 
@@ -300,7 +300,7 @@ extends 
 
 chore
-protectedvoidchore()
+protectedvoidchore()
 Description copied from 
class:ScheduledChore
 The task to execute on each scheduled execution of the 
Chore
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
index d4c4486..a57fb8f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class HRegionServer.MovedRegionInfo
+private static class HRegionServer.MovedRegionInfo
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 
 
@@ -218,7 +218,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 serverName
-private finalServerName serverName
+private finalServerName serverName
 
 
 
@@ -227,7 +227,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 seqNum
-private finallong seqNum
+private finallong seqNum
 
 
 
@@ -236,7 +236,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 ts
-private finallong ts
+private finallong ts
 
 
 
@@ -253,7 +253,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 MovedRegionInfo
-publicMovedRegionInfo(ServerNameserverName,
+publicMovedRegionInfo(ServerNameserverName,
longcloseSeqNum)
 
 
@@ -271,7 +271,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getServerName
-publicServerNamegetServerName()
+publicServerNamegetServerName()
 
 
 
@@ -280,7 +280,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getSeqNum
-publiclonggetSeqNum()
+publiclonggetSeqNum()
 
 
 
@@ -289,7 +289,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getMoveTime
-publiclonggetMoveTime()
+publiclonggetMoveTime()
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
index 7fd25b4..4b654ce 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-protected static final class HRegionServer.MovedRegionsCleaner
+protected static final class HRegionServer.MovedRegionsCleaner
 extends ScheduledChore
 implements Stoppable
 Creates a Chore thread to clean the moved region 
cache.
@@ -242,7 +242,7 @@ implements 
 
 regionServer
-privateHRegionServer regionServer
+privateHRegionServer regionServer
 
 
 
@@ -251,7 +251,7 @@ implements 
 
 

[39/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html 
b/devapidocs/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
index cd08b4e..c74c2c1 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class HMaster.RedirectServlet
+public static class HMaster.RedirectServlet
 extends javax.servlet.http.HttpServlet
 
 See Also:
@@ -243,7 +243,7 @@ extends javax.servlet.http.HttpServlet
 
 
 serialVersionUID
-private static finallong serialVersionUID
+private static finallong serialVersionUID
 
 See Also:
 Constant
 Field Values
@@ -256,7 +256,7 @@ extends javax.servlet.http.HttpServlet
 
 
 regionServerInfoPort
-private finalint regionServerInfoPort
+private finalint regionServerInfoPort
 
 
 
@@ -265,7 +265,7 @@ extends javax.servlet.http.HttpServlet
 
 
 regionServerHostname
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String regionServerHostname
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String regionServerHostname
 
 
 
@@ -282,7 +282,7 @@ extends javax.servlet.http.HttpServlet
 
 
 RedirectServlet
-publicRedirectServlet(InfoServerinfoServer,
+publicRedirectServlet(InfoServerinfoServer,
http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringhostname)
 
 Parameters:
@@ -305,7 +305,7 @@ extends javax.servlet.http.HttpServlet
 
 
 doGet
-publicvoiddoGet(javax.servlet.http.HttpServletRequestrequest,
+publicvoiddoGet(javax.servlet.http.HttpServletRequestrequest,
   javax.servlet.http.HttpServletResponseresponse)
throws javax.servlet.ServletException,
   http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException



[06/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/ipc/Call.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/Call.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/Call.html
index e2b7111..d2cb27d 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/Call.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/Call.html
@@ -38,8 +38,8 @@
 030import 
org.apache.yetus.audience.InterfaceAudience;
 031import 
org.apache.hadoop.hbase.client.MetricsConnection;
 032import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-033import org.apache.htrace.Span;
-034import org.apache.htrace.Trace;
+033import org.apache.htrace.core.Span;
+034import org.apache.htrace.core.Tracer;
 035
 036/** A call waiting for a value. */
 037@InterfaceAudience.Private
@@ -81,7 +81,7 @@
 073this.timeout = timeout;
 074this.priority = priority;
 075this.callback = callback;
-076this.span = Trace.currentSpan();
+076this.span = 
Tracer.getCurrentSpan();
 077  }
 078
 079  @Override

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/ipc/CallRunner.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/CallRunner.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/CallRunner.html
index 036fe26..0c56c9e 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/CallRunner.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/CallRunner.html
@@ -32,211 +32,204 @@
 024import 
org.apache.hadoop.hbase.CallDroppedException;
 025import 
org.apache.hadoop.hbase.CellScanner;
 026import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-027import 
org.apache.yetus.audience.InterfaceAudience;
-028import 
org.apache.yetus.audience.InterfaceStability;
-029import 
org.apache.hadoop.hbase.exceptions.TimeoutIOException;
-030import 
org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler;
-031import 
org.apache.hadoop.hbase.security.User;
-032import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
-033import 
org.apache.hadoop.hbase.util.Pair;
-034import 
org.apache.hadoop.util.StringUtils;
-035import org.apache.htrace.Trace;
-036import org.apache.htrace.TraceScope;
-037
-038/**
-039 * The request processing logic, which is 
usually executed in thread pools provided by an
-040 * {@link RpcScheduler}.  Call {@link 
#run()} to actually execute the contained
-041 * RpcServer.Call
-042 */
-043@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
 HBaseInterfaceAudience.PHOENIX})
-044@InterfaceStability.Evolving
-045public class CallRunner {
-046
-047  private static final 
CallDroppedException CALL_DROPPED_EXCEPTION
-048= new CallDroppedException();
-049
-050  private RpcCall call;
-051  private RpcServerInterface rpcServer;
-052  private MonitoredRPCHandler status;
-053  private volatile boolean sucessful;
-054
-055  /**
-056   * On construction, adds the size of 
this call to the running count of outstanding call sizes.
-057   * Presumption is that we are put on a 
queue while we wait on an executor to run us.  During this
-058   * time we occupy heap.
-059   */
-060  // The constructor is shutdown so only 
RpcServer in this class can make one of these.
-061  CallRunner(final RpcServerInterface 
rpcServer, final RpcCall call) {
-062this.call = call;
-063this.rpcServer = rpcServer;
-064// Add size of the call to queue 
size.
-065if (call != null  rpcServer 
!= null) {
-066  
this.rpcServer.addCallSize(call.getSize());
-067}
-068  }
-069
-070  public RpcCall getRpcCall() {
-071return call;
-072  }
-073
-074  /**
-075   * Keep for backward compatibility.
-076   * @deprecated As of release 2.0, this 
will be removed in HBase 3.0
-077   */
-078  @Deprecated
-079  public ServerCall? getCall() 
{
-080return (ServerCall?) call;
-081  }
-082
-083  public void 
setStatus(MonitoredRPCHandler status) {
-084this.status = status;
-085  }
-086
-087  /**
-088   * Cleanup after ourselves... let go of 
references.
-089   */
-090  private void cleanup() {
-091this.call = null;
-092this.rpcServer = null;
-093  }
-094
-095  public void run() {
-096try {
-097  if (call.disconnectSince() = 0) 
{
-098if 
(RpcServer.LOG.isDebugEnabled()) {
-099  
RpcServer.LOG.debug(Thread.currentThread().getName() + ": skipped " + call);
-100}
-101return;
-102  }
-103  
call.setStartTime(System.currentTimeMillis());
-104  if (call.getStartTime()  
call.getDeadline()) {
-105RpcServer.LOG.warn("Dropping 
timed out call: " + call);
-106return;
-107  }
-108  this.status.setStatus("Setting up 
call");
-109  
this.status.setConnection(call.getRemoteAddress().getHostAddress(), 
call.getRemotePort());
-110  if 

[37/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
 
b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
index f5a4376..408bc10 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
@@ -132,7 +132,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class HMasterCommandLine.LocalHMaster
+public static class HMasterCommandLine.LocalHMaster
 extends HMaster
 
 
@@ -318,7 +318,7 @@ extends 
 
 zkcluster
-privateMiniZooKeeperCluster zkcluster
+privateMiniZooKeeperCluster zkcluster
 
 
 
@@ -335,7 +335,7 @@ extends 
 
 LocalHMaster
-publicLocalHMaster(org.apache.hadoop.conf.Configurationconf)
+publicLocalHMaster(org.apache.hadoop.conf.Configurationconf)
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException,
 org.apache.zookeeper.KeeperException,
 http://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true;
 title="class or interface in java.lang">InterruptedException
@@ -361,7 +361,7 @@ extends 
 
 run
-publicvoidrun()
+publicvoidrun()
 Description copied from 
class:HRegionServer
 The HRegionServer sticks in this loop until closed.
 
@@ -378,7 +378,7 @@ extends 
 
 setZKCluster
-voidsetZKCluster(MiniZooKeeperClusterzkcluster)
+voidsetZKCluster(MiniZooKeeperClusterzkcluster)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.html 
b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.html
index 0a17997..0a31ab3 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.html
@@ -124,7 +124,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class HMasterCommandLine
+public class HMasterCommandLine
 extends ServerCommandLine
 
 
@@ -282,7 +282,7 @@ extends 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -291,7 +291,7 @@ extends 
 
 USAGE
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String USAGE
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String USAGE
 
 See Also:
 Constant
 Field Values
@@ -304,7 +304,7 @@ extends 
 
 masterClass
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends HMaster masterClass
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends HMaster masterClass
 
 
 
@@ -321,7 +321,7 @@ extends 
 
 HMasterCommandLine
-publicHMasterCommandLine(http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends HMastermasterClass)
+publicHMasterCommandLine(http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends HMastermasterClass)
 
 
 
@@ -338,7 +338,7 @@ extends 
 
 getUsage
-protectedhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetUsage()
+protectedhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetUsage()
 Description copied from 
class:ServerCommandLine
 Implementing subclasses should return a usage string to 
print out.
 
@@ -353,7 +353,7 @@ extends 
 
 run
-publicintrun(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]args)
+publicintrun(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]args)
 throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -367,7 +367,7 @@ extends 
 
 startMaster
-privateintstartMaster()
+privateintstartMaster()
 
 
 
@@ -376,7 +376,7 @@ extends 
 
 stopMaster
-privateintstopMaster()

[51/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
Published site at .


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/abb69192
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/abb69192
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/abb69192

Branch: refs/heads/asf-site
Commit: abb69192f3138a163c6713d410254b15347675e8
Parents: 50a23c0
Author: jenkins 
Authored: Sun Nov 12 15:16:26 2017 +
Committer: jenkins 
Committed: Sun Nov 12 15:16:26 2017 +

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf|10 +-
 .../hbase/mapreduce/TableMapReduceUtil.html | 2 +-
 book.html   | 6 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 22654 -
 checkstyle.rss  |76 +-
 coc.html| 4 +-
 cygwin.html | 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html |91 +-
 dependency-info.html| 4 +-
 dependency-management.html  |10 +-
 devapidocs/allclasses-frame.html| 1 +
 devapidocs/allclasses-noframe.html  | 1 +
 devapidocs/constant-values.html | 6 +-
 devapidocs/index-all.html   |87 +-
 .../hadoop/hbase/backup/package-tree.html   | 4 +-
 .../hadoop/hbase/class-use/CellScanner.html |18 +-
 ...stFutureImpl.ReplicaCallIssuingRunnable.html |14 +-
 ...yncRequestFutureImpl.ReplicaResultState.html |10 +-
 .../client/AsyncRequestFutureImpl.Retry.html|12 +-
 ...tFutureImpl.SingleServerRequestRunnable.html |14 +-
 .../hbase/client/AsyncRequestFutureImpl.html|   102 +-
 .../hadoop/hbase/client/package-tree.html   |24 +-
 .../hadoop/hbase/executor/EventHandler.html |40 +-
 .../hadoop/hbase/executor/package-tree.html | 2 +-
 .../hadoop/hbase/filter/package-tree.html   |10 +-
 .../hbase/io/class-use/ByteBufferPool.html  | 9 +-
 ...ReaderImpl.BlockIndexNotLoadedException.html | 4 +-
 .../hfile/HFileReaderImpl.EncodedScanner.html   |40 +-
 .../hfile/HFileReaderImpl.HFileScannerImpl.html |   126 +-
 .../HFileReaderImpl.NotSeekedException.html | 4 +-
 .../hadoop/hbase/io/hfile/HFileReaderImpl.html  |   104 +-
 ...emcachedBlockCache.HFileBlockTranscoder.html |12 +-
 .../hbase/io/hfile/MemcachedBlockCache.html |60 +-
 .../hadoop/hbase/io/hfile/package-tree.html | 6 +-
 .../ipc/BlockingRpcConnection.CallSender.html   |16 +-
 .../hadoop/hbase/ipc/BlockingRpcConnection.html |58 +-
 .../org/apache/hadoop/hbase/ipc/Call.html   | 4 +-
 .../org/apache/hadoop/hbase/ipc/CallRunner.html |26 +-
 .../org/apache/hadoop/hbase/ipc/IPCUtil.html|12 +-
 .../hadoop/hbase/ipc/NettyServerCall.html   |16 +-
 .../hbase/ipc/NettyServerRpcConnection.html |28 +-
 .../org/apache/hadoop/hbase/ipc/RpcCall.html|63 +-
 .../org/apache/hadoop/hbase/ipc/ServerCall.html |   194 +-
 .../ServerRpcConnection.ByteBuffByteInput.html  |20 +-
 .../hadoop/hbase/ipc/ServerRpcConnection.html   |   112 +-
 .../hadoop/hbase/ipc/SimpleServerCall.html  |22 +-
 .../hbase/ipc/SimpleServerRpcConnection.html|66 +-
 .../hbase/ipc/class-use/CellBlockBuilder.html   | 9 +-
 .../hbase/ipc/class-use/NettyServerCall.html| 3 +-
 .../ipc/class-use/NettyServerRpcConnection.html | 3 +-
 .../ipc/class-use/RpcServer.CallCleanup.html|18 +-
 .../hadoop/hbase/ipc/class-use/ServerCall.html  | 3 +-
 .../ipc/class-use/SimpleRpcServerResponder.html | 3 +-
 .../hbase/ipc/class-use/SimpleServerCall.html   | 3 +-
 .../class-use/SimpleServerRpcConnection.html| 3 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   | 2 +-
 .../hadoop/hbase/mapreduce/package-tree.html| 2 +-
 .../master/HMaster.InitializationMonitor.html   |20 +-
 .../hbase/master/HMaster.RedirectServlet.html   |12 +-
 .../org/apache/hadoop/hbase/master/HMaster.html |   454 +-
 .../master/HMasterCommandLine.LocalHMaster.html |10 +-
 .../hadoop/hbase/master/HMasterCommandLine.html |22 +-
 .../hadoop/hbase/master/package-tree.html   | 4 +-
 .../hadoop/hbase/monitoring/package-tree.html   | 2 +-
 .../org/apache/hadoop/hbase/package-tree.html   |14 +-
 .../hadoop/hbase/procedure2/package-tree.html   | 4 +-
 .../hadoop/hbase/quotas/package-tree.html   | 6 +-
 .../HRegion.BatchOperation.Visitor.html | 4 +-
 .../regionserver/HRegion.BatchOperation.html|78 +-
 

[12/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.NotSeekedException.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.NotSeekedException.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.NotSeekedException.html
index a89df18..ea0bc8c 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.NotSeekedException.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.NotSeekedException.html
@@ -49,26 +49,26 @@
 041import 
org.apache.hadoop.hbase.ByteBufferKeyValue;
 042import 
org.apache.hadoop.hbase.SizeCachedKeyValue;
 043import 
org.apache.hadoop.hbase.SizeCachedNoTagsKeyValue;
-044import 
org.apache.yetus.audience.InterfaceAudience;
-045import 
org.apache.hadoop.hbase.fs.HFileSystem;
-046import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
-047import 
org.apache.hadoop.hbase.io.compress.Compression;
-048import 
org.apache.hadoop.hbase.io.crypto.Cipher;
-049import 
org.apache.hadoop.hbase.io.crypto.Encryption;
-050import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
-051import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-052import 
org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
-053import 
org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
-054import 
org.apache.hadoop.hbase.nio.ByteBuff;
-055import 
org.apache.hadoop.hbase.regionserver.KeyValueScanner;
-056import 
org.apache.hadoop.hbase.security.EncryptionUtil;
-057import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-058import 
org.apache.hadoop.hbase.util.Bytes;
-059import 
org.apache.hadoop.hbase.util.IdLock;
-060import 
org.apache.hadoop.hbase.util.ObjectIntPair;
-061import 
org.apache.hadoop.io.WritableUtils;
-062import org.apache.htrace.Trace;
-063import org.apache.htrace.TraceScope;
+044import 
org.apache.hadoop.hbase.trace.TraceUtil;
+045import 
org.apache.yetus.audience.InterfaceAudience;
+046import 
org.apache.hadoop.hbase.fs.HFileSystem;
+047import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
+048import 
org.apache.hadoop.hbase.io.compress.Compression;
+049import 
org.apache.hadoop.hbase.io.crypto.Cipher;
+050import 
org.apache.hadoop.hbase.io.crypto.Encryption;
+051import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
+052import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+053import 
org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
+054import 
org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
+055import 
org.apache.hadoop.hbase.nio.ByteBuff;
+056import 
org.apache.hadoop.hbase.regionserver.KeyValueScanner;
+057import 
org.apache.hadoop.hbase.security.EncryptionUtil;
+058import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+059import 
org.apache.hadoop.hbase.util.Bytes;
+060import 
org.apache.hadoop.hbase.util.IdLock;
+061import 
org.apache.hadoop.hbase.util.ObjectIntPair;
+062import 
org.apache.hadoop.io.WritableUtils;
+063import 
org.apache.htrace.core.TraceScope;
 064
 065import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 066
@@ -263,1235 +263,1235 @@
 255// Prefetch file blocks upon open if 
requested
 256if (cacheConf.shouldPrefetchOnOpen()) 
{
 257  PrefetchExecutor.request(path, new 
Runnable() {
-258public void run() {
-259  long offset = 0;
-260  long end = 0;
-261  try {
-262end = 
getTrailer().getLoadOnOpenDataOffset();
-263if (LOG.isTraceEnabled()) {
-264  LOG.trace("Prefetch start " 
+ getPathOffsetEndStr(path, offset, end));
-265}
-266// TODO: Could we use block 
iterator in here? Would that get stuff into the cache?
-267HFileBlock prevBlock = 
null;
-268while (offset  end) {
-269  if (Thread.interrupted()) 
{
-270break;
-271  }
-272  // Perhaps we got our block 
from cache? Unlikely as this may be, if it happens, then
-273  // the 
internal-to-hfileblock thread local which holds the overread that gets the
-274  // next header, will not 
have happened...so, pass in the onDiskSize gotten from the
-275  // cached block. This 
'optimization' triggers extremely rarely I'd say.
-276  long onDiskSize = prevBlock 
!= null? prevBlock.getNextBlockOnDiskSize(): -1;
-277  HFileBlock block = 
readBlock(offset, onDiskSize, true, false, false, false,
-278  null, null);
-279  // Need not update the 
current block. Ideally here the readBlock won't find the
-280  // block in cache. We call 
this readBlock so that block data is read from FS and
-281  // cached in BC. So there 
is no reference count increment that happens here.
-282  // The return will ideally 

[38/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html 
b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
index 1f06968..3212da1 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
@@ -128,7 +128,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.LimitedPrivate(value="Tools")
-public class HMaster
+public class HMaster
 extends HRegionServer
 implements MasterServices
 HMaster is the "master server" for HBase. An HBase cluster 
has one active
@@ -1433,7 +1433,7 @@ implements 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -1442,7 +1442,7 @@ implements 
 
 MASTER
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String MASTER
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String MASTER
 
 See Also:
 Constant
 Field Values
@@ -1455,7 +1455,7 @@ implements 
 
 activeMasterManager
-private finalActiveMasterManager activeMasterManager
+private finalActiveMasterManager activeMasterManager
 
 
 
@@ -1464,7 +1464,7 @@ implements 
 
 regionServerTracker
-RegionServerTracker regionServerTracker
+RegionServerTracker regionServerTracker
 
 
 
@@ -1473,7 +1473,7 @@ implements 
 
 drainingServerTracker
-privateDrainingServerTracker 
drainingServerTracker
+privateDrainingServerTracker 
drainingServerTracker
 
 
 
@@ -1482,7 +1482,7 @@ implements 
 
 loadBalancerTracker
-LoadBalancerTracker loadBalancerTracker
+LoadBalancerTracker loadBalancerTracker
 
 
 
@@ -1491,7 +1491,7 @@ implements 
 
 splitOrMergeTracker
-privateSplitOrMergeTracker splitOrMergeTracker
+privateSplitOrMergeTracker splitOrMergeTracker
 
 
 
@@ -1500,7 +1500,7 @@ implements 
 
 regionNormalizerTracker
-privateRegionNormalizerTracker 
regionNormalizerTracker
+privateRegionNormalizerTracker 
regionNormalizerTracker
 
 
 
@@ -1509,7 +1509,7 @@ implements 
 
 maintenanceModeTracker
-privateMasterMaintenanceModeTracker maintenanceModeTracker
+privateMasterMaintenanceModeTracker maintenanceModeTracker
 
 
 
@@ -1518,7 +1518,7 @@ implements 
 
 clusterSchemaService
-privateClusterSchemaService clusterSchemaService
+privateClusterSchemaService clusterSchemaService
 
 
 
@@ -1527,7 +1527,7 @@ implements 
 
 HBASE_MASTER_WAIT_ON_SERVICE_IN_SECONDS
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String HBASE_MASTER_WAIT_ON_SERVICE_IN_SECONDS
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String HBASE_MASTER_WAIT_ON_SERVICE_IN_SECONDS
 
 See Also:
 Constant
 Field Values
@@ -1540,7 +1540,7 @@ implements 
 
 DEFAULT_HBASE_MASTER_WAIT_ON_SERVICE_IN_SECONDS
-public static finalint DEFAULT_HBASE_MASTER_WAIT_ON_SERVICE_IN_SECONDS
+public static finalint DEFAULT_HBASE_MASTER_WAIT_ON_SERVICE_IN_SECONDS
 
 See Also:
 Constant
 Field Values
@@ -1553,7 +1553,7 @@ implements 
 
 metricsMaster
-finalMetricsMaster metricsMaster
+finalMetricsMaster metricsMaster
 
 
 
@@ -1562,7 +1562,7 @@ implements 
 
 fileSystemManager
-privateMasterFileSystem fileSystemManager
+privateMasterFileSystem fileSystemManager
 
 
 
@@ -1571,7 +1571,7 @@ implements 
 
 walManager
-privateMasterWalManager walManager
+privateMasterWalManager walManager
 
 
 
@@ -1580,7 +1580,7 @@ implements 
 
 serverManager
-private volatileServerManager serverManager
+private volatileServerManager serverManager
 
 
 
@@ -1589,7 +1589,7 @@ implements 
 
 assignmentManager
-privateAssignmentManager assignmentManager
+privateAssignmentManager assignmentManager
 
 
 
@@ -1598,7 +1598,7 @@ implements 
 
 replicationManager
-privateReplicationManager replicationManager
+privateReplicationManager replicationManager
 
 
 
@@ -1607,7 +1607,7 @@ implements 
 
 rsFatals
-MemoryBoundedLogMessageBuffer rsFatals
+MemoryBoundedLogMessageBuffer rsFatals
 
 
 
@@ -1616,7 +1616,7 @@ implements 
 
 activeMaster
-private volatileboolean activeMaster
+private volatileboolean activeMaster
 
 
 
@@ -1625,7 +1625,7 @@ implements 
 
 initialized
-private finalProcedureEvent initialized
+private finalProcedureEvent initialized
 
 
 
@@ -1634,7 +1634,7 @@ implements 
 
 serviceStarted
-volatileboolean serviceStarted
+volatileboolean serviceStarted
 
 
 
@@ -1643,7 +1643,7 @@ implements 
 
 serverCrashProcessingEnabled
-private finalProcedureEvent serverCrashProcessingEnabled
+private finalProcedureEvent serverCrashProcessingEnabled
 
 
 
@@ -1652,7 +1652,7 @@ implements 
 
 maxBlancingTime

[24/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/regionserver/wal/FSHLog.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/FSHLog.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/FSHLog.html
index 97b1727..560e87a 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/FSHLog.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/FSHLog.html
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class FSHLog
+public class FSHLog
 extends AbstractFSWALWALProvider.Writer
 The default implementation of FSWAL.
 
@@ -404,16 +404,16 @@ extends 
 private SyncFuture
-publishSyncOnRingBuffer(longsequence,
-   org.apache.htrace.Spanspan)
+publishSyncOnRingBuffer(longsequence,
+   org.apache.htrace.core.Spanspan)
 
 
 private SyncFuture
-publishSyncOnRingBuffer(org.apache.htrace.Spanspan)
+publishSyncOnRingBuffer(org.apache.htrace.core.Spanspan)
 
 
-private org.apache.htrace.Span
-publishSyncThenBlockOnCompletion(org.apache.htrace.Spanspan)
+private void
+publishSyncThenBlockOnCompletion(org.apache.htrace.core.TraceScopescope)
 
 
 private static void
@@ -442,7 +442,7 @@ extends AbstractFSWAL
-abortCacheFlush,
 append,
 atHeadOfRingBufferEventHandlerAppend,
 blockOnSync,
 close,
 completeCacheFlush,
 c
 omputeFilename, findRegionsToForceFlush,
 getCoprocessorHost,
 getCurrentFileName,
 getEarliestMemStoreSeqNum,
 getEarliestMemStoreSeqNum,
 getFilenum,
 
 getFileNumFromFileName, getFiles,
 getLogFileSize,
 getLogFileSizeIfBeingWritten,
 getNumLogFiles,
 getNumRolledLogFiles,
 getOldPath,
 getPreallocatedEventCount,
 getSyncFuture,
 getUnflushedEntriesCount,
 getWALArchivePath,
 isUnflushedEntries,
 postSync,
 registerWALActionsListener,
 replaceWriter,
 requestLogRoll,
 requestLogRoll,
 rollWriter,
 rollWriter,
 shutdown,
 stampSequenceIdAn
 dPublishToRingBuffer, startCacheFlush,
 startCacheFlush,
 toString,
 unregisterWALActionsListener,
 updateStore
+abortCacheFlush,
 append,
 atHeadOfRingBufferEventHandlerAppend,
 blockOnSync,
 close,
 completeCacheFlush,
 c
 omputeFilename, findRegionsToForceFlush,
 getCoprocessorHost,
 getCurrentFileName,
 getEarliestMemStoreSeqNum,
 getEarliestMemStoreSeqNum,
 getFilenum,
 
 getFileNumFromFileName, getFiles,
 getLogFileSize,
 getLogFileSizeIfBeingWritten,
 getNumLogFiles,
 getNumRolledLogFiles,
 getOldPath,
 getPreallocatedEventCount,
 getSyncFuture,
 getUnflushedEntriesCount,
 getWALArchivePath,
 isUnflushedEntries,
 postSync,
 registerWALActionsListener,
 replaceWriter,
 requestLogRoll,
 requestLogRoll,
 rollWriter,
 rollWriter,
 shutdown,
 stampSequenc
 eIdAndPublishToRingBuffer, startCacheFlush,
 startCacheFlush,
 toString,
 unregisterWALActionsListener,
 updateStore
 
 
 
@@ -471,7 +471,7 @@ extends 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -480,7 +480,7 @@ extends 
 
 disruptor
-private finalcom.lmax.disruptor.dsl.DisruptorRingBufferTruck disruptor
+private finalcom.lmax.disruptor.dsl.DisruptorRingBufferTruck disruptor
 The nexus at which all incoming handlers meet. Does appends 
and sync with an ordering. Appends
  and syncs are each put on the ring which means handlers need to smash up 
against the ring twice
  (can we make it once only? ... maybe not since time to append is so different 
from time to sync
@@ -494,7 +494,7 @@ extends 
 
 ringBufferEventHandler
-private finalFSHLog.RingBufferEventHandler ringBufferEventHandler
+private finalFSHLog.RingBufferEventHandler ringBufferEventHandler
 This fellow is run by the above appendExecutor service but 
it is all about batching up appends
  and syncs; it may shutdown without cleaning out the last few appends or 
syncs. To guard against
  this, keep a reference to this handler and do explicit close on way out to 
make sure all
@@ -507,7 +507,7 @@ extends 
 
 hdfs_out
-privateorg.apache.hadoop.fs.FSDataOutputStream hdfs_out
+privateorg.apache.hadoop.fs.FSDataOutputStream hdfs_out
 FSDataOutputStream associated with the current 
SequenceFile.writer
 
 
@@ -517,7 +517,7 @@ extends 
 
 minTolerableReplication
-private finalint minTolerableReplication
+private finalint minTolerableReplication
 
 
 
@@ -526,7 +526,7 @@ extends 
 
 consecutiveLogRolls
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger consecutiveLogRolls
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger consecutiveLogRolls
 
 
 
@@ -535,7 +535,7 @@ extends 
 
 lowReplicationRollLimit
-private finalint lowReplicationRollLimit
+private finalint 

[34/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
index 7a9732d..2f8fff2 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static class HRegion.WriteState
+static class HRegion.WriteState
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 
 
@@ -239,7 +239,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 flushing
-volatileboolean flushing
+volatileboolean flushing
 
 
 
@@ -248,7 +248,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 flushRequested
-volatileboolean flushRequested
+volatileboolean flushRequested
 
 
 
@@ -257,7 +257,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 compacting
-http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger compacting
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger compacting
 
 
 
@@ -266,7 +266,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 writesEnabled
-volatileboolean writesEnabled
+volatileboolean writesEnabled
 
 
 
@@ -275,7 +275,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 readOnly
-volatileboolean readOnly
+volatileboolean readOnly
 
 
 
@@ -284,7 +284,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 readsEnabled
-volatileboolean readsEnabled
+volatileboolean readsEnabled
 
 
 
@@ -293,7 +293,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 HEAP_SIZE
-static finallong HEAP_SIZE
+static finallong HEAP_SIZE
 
 
 
@@ -310,7 +310,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 WriteState
-WriteState()
+WriteState()
 
 
 
@@ -327,7 +327,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 setReadOnly
-voidsetReadOnly(booleanonOff)
+voidsetReadOnly(booleanonOff)
 Set flags that make this region read-only.
 
 Parameters:
@@ -341,7 +341,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 isReadOnly
-booleanisReadOnly()
+booleanisReadOnly()
 
 
 
@@ -350,7 +350,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 isFlushRequested
-booleanisFlushRequested()
+booleanisFlushRequested()
 
 
 
@@ -359,7 +359,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 setReadsEnabled
-voidsetReadsEnabled(booleanreadsEnabled)
+voidsetReadsEnabled(booleanreadsEnabled)
 
 
 



[07/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.html
index 008b7d0..942938b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.html
@@ -32,557 +32,557 @@
 024import static 
org.apache.hadoop.hbase.ipc.IPCUtil.setCancelled;
 025import static 
org.apache.hadoop.hbase.ipc.IPCUtil.write;
 026
-027import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
-028import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message.Builder;
-029import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-030
-031import java.io.BufferedInputStream;
-032import java.io.BufferedOutputStream;
-033import java.io.DataInputStream;
-034import java.io.DataOutputStream;
-035import java.io.IOException;
-036import java.io.InputStream;
-037import java.io.InterruptedIOException;
-038import java.io.OutputStream;
-039import java.net.Socket;
-040import java.net.SocketTimeoutException;
-041import java.net.UnknownHostException;
-042import java.nio.ByteBuffer;
-043import 
java.security.PrivilegedExceptionAction;
-044import java.util.ArrayDeque;
-045import java.util.Locale;
-046import java.util.Queue;
-047import 
java.util.concurrent.ConcurrentHashMap;
-048import 
java.util.concurrent.ConcurrentMap;
-049import 
java.util.concurrent.ThreadLocalRandom;
-050
-051import 
javax.security.sasl.SaslException;
-052
-053import org.apache.commons.logging.Log;
-054import 
org.apache.commons.logging.LogFactory;
-055import 
org.apache.hadoop.conf.Configuration;
-056import 
org.apache.hadoop.hbase.CellScanner;
-057import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-058import 
org.apache.yetus.audience.InterfaceAudience;
-059import 
org.apache.hadoop.hbase.exceptions.ConnectionClosingException;
-060import 
org.apache.hadoop.hbase.io.ByteArrayOutputStream;
-061import 
org.apache.hadoop.hbase.ipc.HBaseRpcController.CancellationCallback;
-062import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader;
-069import 
org.apache.hadoop.hbase.security.HBaseSaslRpcClient;
-070import 
org.apache.hadoop.hbase.security.SaslUtil;
-071import 
org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
-072import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-073import 
org.apache.hadoop.hbase.util.ExceptionUtil;
-074import org.apache.hadoop.io.IOUtils;
-075import 
org.apache.hadoop.ipc.RemoteException;
-076import org.apache.hadoop.net.NetUtils;
-077import 
org.apache.hadoop.security.UserGroupInformation;
-078import org.apache.htrace.Trace;
-079import org.apache.htrace.TraceScope;
-080
-081/**
-082 * Thread that reads responses and 
notifies callers. Each connection owns a socket connected to a
-083 * remote address. Calls are multiplexed 
through this socket: responses may be delivered out of
-084 * order.
-085 */
-086@InterfaceAudience.Private
-087class BlockingRpcConnection extends 
RpcConnection implements Runnable {
-088
-089  private static final Log LOG = 
LogFactory.getLog(BlockingRpcConnection.class);
-090
-091  private final BlockingRpcClient 
rpcClient;
-092
-093  private final String threadName;
-094  
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value = 
"IS2_INCONSISTENT_SYNC",
-095  justification = "We are always 
under lock actually")
-096  private Thread thread;
-097
-098  // connected socket. protected for 
writing UT.
-099  protected Socket socket = null;
-100  private DataInputStream in;
-101  private DataOutputStream out;
-102
-103  private HBaseSaslRpcClient 
saslRpcClient;
-104
-105  // currently active calls
-106  private final ConcurrentMapInteger, 
Call calls = new ConcurrentHashMap();
-107
-108  private final CallSender callSender;
-109
-110  private boolean closed = false;
-111
-112  private byte[] 
connectionHeaderPreamble;
-113
-114  private byte[] 
connectionHeaderWithLength;
-115
-116  private boolean 
waitingConnectionHeaderResponse = false;
-117
-118  /**
-119   * If the client wants to interrupt its 
calls easily (i.e. call Thread#interrupt), it gets into a
-120   * java issue: an interruption during a 
write closes the socket/channel. A way to avoid this 

[21/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html
index 3fa16c1..d3a1e91 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html
@@ -56,1228 +56,1235 @@
 048import 
org.apache.hadoop.hbase.RetryImmediatelyException;
 049import 
org.apache.hadoop.hbase.ServerName;
 050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.yetus.audience.InterfaceAudience;
-052import 
org.apache.hadoop.hbase.client.backoff.ServerStatistics;
-053import 
org.apache.hadoop.hbase.client.coprocessor.Batch;
-054import 
org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil;
-055import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-056import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-057import 
org.apache.hadoop.hbase.util.Bytes;
-058import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-059import org.apache.htrace.Trace;
-060
-061/**
-062 * The context, and return value, for a 
single submit/submitAll call.
-063 * Note on how this class (one AP submit) 
works. Initially, all requests are split into groups
-064 * by server; request is sent to each 
server in parallel; the RPC calls are not async so a
-065 * thread per server is used. Every time 
some actions fail, regions/locations might have
-066 * changed, so we re-group them by server 
and region again and send these groups in parallel
-067 * too. The result, in case of retries, 
is a "tree" of threads, with parent exiting after
-068 * scheduling children. This is why lots 
of code doesn't require any synchronization.
-069 */
-070@InterfaceAudience.Private
-071class 
AsyncRequestFutureImplCResult implements AsyncRequestFuture {
-072
-073  private static final Log LOG = 
LogFactory.getLog(AsyncRequestFutureImpl.class);
-074
-075  private RetryingTimeTracker tracker;
-076
-077  /**
-078   * Runnable (that can be submitted to 
thread pool) that waits for when it's time
-079   * to issue replica calls, finds region 
replicas, groups the requests by replica and
-080   * issues the calls (on separate 
threads, via sendMultiAction).
-081   * This is done on a separate thread 
because we don't want to wait on user thread for
-082   * our asynchronous call, and usually 
we have to wait before making replica calls.
-083   */
-084  private final class 
ReplicaCallIssuingRunnable implements Runnable {
-085private final long startTime;
-086private final ListAction 
initialActions;
-087
-088public 
ReplicaCallIssuingRunnable(ListAction initialActions, long startTime) 
{
-089  this.initialActions = 
initialActions;
-090  this.startTime = startTime;
-091}
-092
-093@Override
-094public void run() {
-095  boolean done = false;
-096  if 
(asyncProcess.primaryCallTimeoutMicroseconds  0) {
-097try {
-098  done = waitUntilDone(startTime 
* 1000L + asyncProcess.primaryCallTimeoutMicroseconds);
-099} catch (InterruptedException ex) 
{
-100  LOG.error("Replica thread was 
interrupted - no replica calls: " + ex.getMessage());
-101  return;
-102}
-103  }
-104  if (done) return; // Done within 
primary timeout
-105  MapServerName, MultiAction 
actionsByServer = new HashMap();
-106  ListAction 
unknownLocActions = new ArrayList();
-107  if (replicaGetIndices == null) {
-108for (int i = 0; i  
results.length; ++i) {
-109  addReplicaActions(i, 
actionsByServer, unknownLocActions);
-110}
-111  } else {
-112for (int replicaGetIndice : 
replicaGetIndices) {
-113  
addReplicaActions(replicaGetIndice, actionsByServer, unknownLocActions);
-114}
-115  }
-116  if (!actionsByServer.isEmpty()) {
-117sendMultiAction(actionsByServer, 
1, null, unknownLocActions.isEmpty());
-118  }
-119  if (!unknownLocActions.isEmpty()) 
{
-120actionsByServer = new 
HashMap();
-121for (Action action : 
unknownLocActions) {
-122  addReplicaActionsAgain(action, 
actionsByServer);
-123}
-124// Some actions may have 
completely failed, they are handled inside addAgain.
-125if (!actionsByServer.isEmpty()) 
{
-126  
sendMultiAction(actionsByServer, 1, null, true);
-127}
-128  }
-129}
-130
-131/**
-132 * Add replica actions to action map 
by server.
-133 * @param index Index of the original 
action.
-134 * @param actionsByServer The map by 

hbase-site git commit: INFRA-10751 Empty commit

2017-11-12 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site abb69192f -> cd9d57650


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/cd9d5765
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/cd9d5765
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/cd9d5765

Branch: refs/heads/asf-site
Commit: cd9d5765043447ba4dba56ace131b32958b45d55
Parents: abb6919
Author: jenkins 
Authored: Sun Nov 12 15:17:13 2017 +
Committer: jenkins 
Committed: Sun Nov 12 15:17:13 2017 +

--

--




[17/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
index 3fa16c1..d3a1e91 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
@@ -56,1228 +56,1235 @@
 048import 
org.apache.hadoop.hbase.RetryImmediatelyException;
 049import 
org.apache.hadoop.hbase.ServerName;
 050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.yetus.audience.InterfaceAudience;
-052import 
org.apache.hadoop.hbase.client.backoff.ServerStatistics;
-053import 
org.apache.hadoop.hbase.client.coprocessor.Batch;
-054import 
org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil;
-055import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-056import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-057import 
org.apache.hadoop.hbase.util.Bytes;
-058import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-059import org.apache.htrace.Trace;
-060
-061/**
-062 * The context, and return value, for a 
single submit/submitAll call.
-063 * Note on how this class (one AP submit) 
works. Initially, all requests are split into groups
-064 * by server; request is sent to each 
server in parallel; the RPC calls are not async so a
-065 * thread per server is used. Every time 
some actions fail, regions/locations might have
-066 * changed, so we re-group them by server 
and region again and send these groups in parallel
-067 * too. The result, in case of retries, 
is a "tree" of threads, with parent exiting after
-068 * scheduling children. This is why lots 
of code doesn't require any synchronization.
-069 */
-070@InterfaceAudience.Private
-071class 
AsyncRequestFutureImplCResult implements AsyncRequestFuture {
-072
-073  private static final Log LOG = 
LogFactory.getLog(AsyncRequestFutureImpl.class);
-074
-075  private RetryingTimeTracker tracker;
-076
-077  /**
-078   * Runnable (that can be submitted to 
thread pool) that waits for when it's time
-079   * to issue replica calls, finds region 
replicas, groups the requests by replica and
-080   * issues the calls (on separate 
threads, via sendMultiAction).
-081   * This is done on a separate thread 
because we don't want to wait on user thread for
-082   * our asynchronous call, and usually 
we have to wait before making replica calls.
-083   */
-084  private final class 
ReplicaCallIssuingRunnable implements Runnable {
-085private final long startTime;
-086private final ListAction 
initialActions;
-087
-088public 
ReplicaCallIssuingRunnable(ListAction initialActions, long startTime) 
{
-089  this.initialActions = 
initialActions;
-090  this.startTime = startTime;
-091}
-092
-093@Override
-094public void run() {
-095  boolean done = false;
-096  if 
(asyncProcess.primaryCallTimeoutMicroseconds  0) {
-097try {
-098  done = waitUntilDone(startTime 
* 1000L + asyncProcess.primaryCallTimeoutMicroseconds);
-099} catch (InterruptedException ex) 
{
-100  LOG.error("Replica thread was 
interrupted - no replica calls: " + ex.getMessage());
-101  return;
-102}
-103  }
-104  if (done) return; // Done within 
primary timeout
-105  MapServerName, MultiAction 
actionsByServer = new HashMap();
-106  ListAction 
unknownLocActions = new ArrayList();
-107  if (replicaGetIndices == null) {
-108for (int i = 0; i  
results.length; ++i) {
-109  addReplicaActions(i, 
actionsByServer, unknownLocActions);
-110}
-111  } else {
-112for (int replicaGetIndice : 
replicaGetIndices) {
-113  
addReplicaActions(replicaGetIndice, actionsByServer, unknownLocActions);
-114}
-115  }
-116  if (!actionsByServer.isEmpty()) {
-117sendMultiAction(actionsByServer, 
1, null, unknownLocActions.isEmpty());
-118  }
-119  if (!unknownLocActions.isEmpty()) 
{
-120actionsByServer = new 
HashMap();
-121for (Action action : 
unknownLocActions) {
-122  addReplicaActionsAgain(action, 
actionsByServer);
-123}
-124// Some actions may have 
completely failed, they are handled inside addAgain.
-125if (!actionsByServer.isEmpty()) 
{
-126  
sendMultiAction(actionsByServer, 1, null, true);
-127}
-128  }
-129}
-130
-131/**
-132 * Add replica actions to action map 
by server.
-133 * @param index Index of the original 
action.
-134 * @param actionsByServer The map by 
server to add it to.
-135 */
-136private void addReplicaActions(int 
index, MapServerName, MultiAction actionsByServer,
-137   

[44/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html
index 99ce39a..1a28624 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html
@@ -942,7 +942,7 @@ implements 
 
 MINOR_VERSION_WITH_CHECKSUM
-public static finalint MINOR_VERSION_WITH_CHECKSUM
+public static finalint MINOR_VERSION_WITH_CHECKSUM
 Minor versions in HFile starting with this number have 
hbase checksums
 
 See Also:
@@ -956,7 +956,7 @@ implements 
 
 MINOR_VERSION_NO_CHECKSUM
-public static finalint MINOR_VERSION_NO_CHECKSUM
+public static finalint MINOR_VERSION_NO_CHECKSUM
 In HFile minor version that does not support checksums
 
 See Also:
@@ -970,7 +970,7 @@ implements 
 
 PBUF_TRAILER_MINOR_VERSION
-public static finalint PBUF_TRAILER_MINOR_VERSION
+public static finalint PBUF_TRAILER_MINOR_VERSION
 HFile minor version that introduced pbuf filetrailer
 
 See Also:
@@ -984,7 +984,7 @@ implements 
 
 KEY_VALUE_LEN_SIZE
-public static finalint KEY_VALUE_LEN_SIZE
+public static finalint KEY_VALUE_LEN_SIZE
 The size of a (key length, value length) tuple that 
prefixes each entry in
  a data block.
 
@@ -999,7 +999,7 @@ implements 
 
 includesMemstoreTS
-privateboolean includesMemstoreTS
+privateboolean includesMemstoreTS
 
 
 
@@ -1008,7 +1008,7 @@ implements 
 
 decodeMemstoreTS
-protectedboolean decodeMemstoreTS
+protectedboolean decodeMemstoreTS
 
 
 
@@ -1086,7 +1086,7 @@ public
 
 getPathOffsetEndStr
-private statichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetPathOffsetEndStr(org.apache.hadoop.fs.Pathpath,
+private statichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetPathOffsetEndStr(org.apache.hadoop.fs.Pathpath,
   longoffset,
   longend)
 
@@ -1097,7 +1097,7 @@ public
 
 checkFileVersion
-privatevoidcheckFileVersion()
+privatevoidcheckFileVersion()
 File version check is a little sloppy. We read v3 files but 
can also read v2 files if their
  content has been pb'd; files written with 0.98.
 
@@ -1108,7 +1108,7 @@ public
 
 toStringFirstKey
-privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">Optionalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoStringFirstKey()
+privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">Optionalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoStringFirstKey()
 
 
 
@@ -1117,7 +1117,7 @@ public
 
 toStringLastKey
-privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">Optionalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoStringLastKey()
+privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">Optionalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoStringLastKey()
 
 
 
@@ -1126,7 +1126,7 @@ public
 
 toString
-publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
+publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
 
 Overrides:
 http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--;
 title="class or interface in java.lang">toStringin 
classhttp://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
@@ -1139,7 +1139,7 @@ public
 
 length
-publiclonglength()
+publiclonglength()
 
 Specified by:
 lengthin
 interfaceHFile.Reader
@@ -1152,7 +1152,7 @@ public
 
 returnBlock
-publicvoidreturnBlock(HFileBlockblock)
+publicvoidreturnBlock(HFileBlockblock)
 Description copied from 
interface:HFile.CachingBlockReader
 Return the given block back to the cache, if it was 
obtained from cache.
 
@@ -1169,7 +1169,7 @@ public
 
 getFirstKey
-publichttp://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in 

[01/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 50a23c0f8 -> abb69192f


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/ipc/SimpleRpcServer.ConnectionManager.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/SimpleRpcServer.ConnectionManager.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/SimpleRpcServer.ConnectionManager.html
index 9d447ca..67e6eae 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/SimpleRpcServer.ConnectionManager.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/SimpleRpcServer.ConnectionManager.html
@@ -497,7 +497,7 @@
 489  Message param, CellScanner 
cellScanner, long receiveTime, MonitoredRPCHandler status,
 490  long startTime, int timeout) throws 
IOException {
 491SimpleServerCall fakeCall = new 
SimpleServerCall(-1, service, md, null, param, cellScanner,
-492null, -1, null, null, 
receiveTime, timeout, reservoir, cellBlockBuilder, null, null);
+492null, -1, null, receiveTime, 
timeout, reservoir, cellBlockBuilder, null, null);
 493return call(fakeCall, status);
 494  }
 495



[48/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index b12bfd0..bcb10d7 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -1818,6 +1818,10 @@
 
 addKeyToZK(AuthenticationKey)
 - Method in class org.apache.hadoop.hbase.security.token.ZKSecretWatcher
 
+addKVAnnotation(String,
 String) - Static method in class org.apache.hadoop.hbase.trace.TraceUtil
+
+Wrapper method to add key-value pair to TraceInfo of actual 
span
+
 addLabel(String)
 - Method in class org.apache.hadoop.hbase.rest.model.ScannerModel
 
 Add a visibility label to the scan
@@ -2039,6 +2043,10 @@
 
 addr
 - Variable in class org.apache.hadoop.hbase.ipc.ServerRpcConnection
 
+addReceiver(SpanReceiver)
 - Static method in class org.apache.hadoop.hbase.trace.TraceUtil
+
+Wrapper method to add receiver to actual tracerpool
+
 addReferenceFiles(SnapshotManifest.RegionVisitor,
 Object, Object, CollectionStoreFileInfo, boolean) - Method 
in class org.apache.hadoop.hbase.snapshot.SnapshotManifest
 
 addRegion(RegionStates.RegionStateNode)
 - Method in class org.apache.hadoop.hbase.master.assignment.RegionStates.ServerStateNode
@@ -2250,6 +2258,10 @@
 
 addRSGroup(RSGroupInfo)
 - Method in class org.apache.hadoop.hbase.rsgroup.RSGroupInfoManagerImpl
 
+addSampler(Sampler)
 - Static method in class org.apache.hadoop.hbase.trace.TraceUtil
+
+Wrapper method to add new sampler to the default 
tracer
+
 addScanner(String,
 RegionScanner, Shipper, HRegion, boolean) - Method in class 
org.apache.hadoop.hbase.regionserver.RSRpcServices
 
 addScanner(RegionScanner)
 - Method in class org.apache.hadoop.hbase.regionserver.RSRpcServices.RegionScannersCloseCallBack
@@ -2379,6 +2391,10 @@
 
 addTimeAnnotation(SyncFuture,
 String) - Method in class 
org.apache.hadoop.hbase.regionserver.wal.AsyncFSWAL
 
+addTimelineAnnotation(String)
 - Static method in class org.apache.hadoop.hbase.trace.TraceUtil
+
+Wrapper method to add timeline annotiation to current span 
with given message
+
 addTimer(String,
 Timer, MetricsRecordBuilder) - Method in class 
org.apache.hadoop.hbase.metrics.impl.HBaseMetrics2HadoopMetricsAdapter
 
 addToBackupSet(String,
 TableName[]) - Method in interface 
org.apache.hadoop.hbase.backup.BackupAdmin
@@ -4114,7 +4130,7 @@
 
 AtomicUtils()
 - Constructor for class org.apache.hadoop.hbase.util.AtomicUtils
 
-attachSpan(Span)
 - Method in class org.apache.hadoop.hbase.regionserver.wal.FSWALEntry
+attachSpan(Span)
 - Method in class org.apache.hadoop.hbase.regionserver.wal.FSWALEntry
 
 attainSafePoint()
 - Method in class org.apache.hadoop.hbase.regionserver.wal.FSHLog.RingBufferEventHandler
 
@@ -15744,6 +15760,8 @@
 
 conf 
- Variable in class org.apache.hadoop.hbase.trace.SpanReceiverHost
 
+conf - 
Static variable in class org.apache.hadoop.hbase.trace.TraceUtil
+
 conf 
- Variable in class org.apache.hadoop.hbase.util.AbstractHBaseTool
 
 conf - 
Variable in class org.apache.hadoop.hbase.util.ConnectionCache
@@ -18252,11 +18270,11 @@
 
 Creates the cache config.
 
-createCall(int,
 BlockingService, Descriptors.MethodDescriptor, RPCProtos.RequestHeader, 
Message, CellScanner, long, TraceInfo, InetAddress, int, 
RpcServer.CallCleanup) - Method in class 
org.apache.hadoop.hbase.ipc.NettyServerRpcConnection
+createCall(int,
 BlockingService, Descriptors.MethodDescriptor, RPCProtos.RequestHeader, 
Message, CellScanner, long, InetAddress, int, RpcServer.CallCleanup) 
- Method in class org.apache.hadoop.hbase.ipc.NettyServerRpcConnection
 
-createCall(int,
 BlockingService, Descriptors.MethodDescriptor, RPCProtos.RequestHeader, 
Message, CellScanner, long, TraceInfo, InetAddress, int, 
RpcServer.CallCleanup) - Method in class 
org.apache.hadoop.hbase.ipc.ServerRpcConnection
+createCall(int,
 BlockingService, Descriptors.MethodDescriptor, RPCProtos.RequestHeader, 
Message, CellScanner, long, InetAddress, int, RpcServer.CallCleanup) 
- Method in class org.apache.hadoop.hbase.ipc.ServerRpcConnection
 
-createCall(int,
 BlockingService, Descriptors.MethodDescriptor, RPCProtos.RequestHeader, 
Message, CellScanner, long, TraceInfo, InetAddress, int, 
RpcServer.CallCleanup) - Method in class 
org.apache.hadoop.hbase.ipc.SimpleServerRpcConnection
+createCall(int,
 BlockingService, Descriptors.MethodDescriptor, RPCProtos.RequestHeader, 
Message, CellScanner, long, InetAddress, int, RpcServer.CallCleanup) 
- Method in class org.apache.hadoop.hbase.ipc.SimpleServerRpcConnection
 
 createCallable(ServerName,
 TableName, MultiAction) - Method in class 
org.apache.hadoop.hbase.client.AsyncRequestFutureImpl
 
@@ -20110,6 +20128,15 @@
 
 createTopReference(byte[])
 - Static method in class org.apache.hadoop.hbase.io.Reference
 
+createTrace(String)
 - Static method in class org.apache.hadoop.hbase.trace.TraceUtil
+
+Wrapper method to create new TraceScope with the 

[42/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/ipc/RpcCall.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/RpcCall.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/RpcCall.html
index 8a7861a..0189da2 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/RpcCall.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/RpcCall.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -111,7 +111,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.LimitedPrivate(value={"Coprocesssor","Phoenix"})
  @InterfaceStability.Evolving
-public interface RpcCall
+public interface RpcCall
 extends RpcCallContext
 Interface of all necessary to carry out a RPC method 
invocation on the server.
 
@@ -191,16 +191,12 @@ extends getTimeout()
 
 
-org.apache.htrace.TraceInfo
-getTraceInfo()
-
-
 void
 sendResponseIfReady()
 Send the response of this RPC call.
 
 
-
+
 void
 setResponse(org.apache.hadoop.hbase.shaded.com.google.protobuf.Messageparam,
CellScannercells,
@@ -209,13 +205,13 @@ extends Set the response resulting from this RPC call.
 
 
-
+
 void
 setStartTime(longstartTime)
 Set the time when the call starts to be executed.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toShortString()
 
@@ -247,7 +243,7 @@ extends 
 
 getService
-org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingServicegetService()
+org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingServicegetService()
 
 Returns:
 The service of this call.
@@ -260,7 +256,7 @@ extends 
 
 getMethod
-org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptorgetMethod()
+org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptorgetMethod()
 
 Returns:
 The service method.
@@ -273,7 +269,7 @@ extends 
 
 getParam
-org.apache.hadoop.hbase.shaded.com.google.protobuf.MessagegetParam()
+org.apache.hadoop.hbase.shaded.com.google.protobuf.MessagegetParam()
 
 Returns:
 The call parameter message.
@@ -286,7 +282,7 @@ extends 
 
 getCellScanner
-CellScannergetCellScanner()
+CellScannergetCellScanner()
 
 Returns:
 The CellScanner that can carry input and result payload.
@@ -299,7 +295,7 @@ extends 
 
 getReceiveTime
-longgetReceiveTime()
+longgetReceiveTime()
 
 Returns:
 The timestamp when the call is constructed.
@@ -312,7 +308,7 @@ extends 
 
 getStartTime
-longgetStartTime()
+longgetStartTime()
 
 Returns:
 The time when the call starts to be executed.
@@ -325,7 +321,7 @@ extends 
 
 setStartTime
-voidsetStartTime(longstartTime)
+voidsetStartTime(longstartTime)
 Set the time when the call starts to be executed.
 
 
@@ -335,7 +331,7 @@ extends 
 
 getTimeout
-intgetTimeout()
+intgetTimeout()
 
 Returns:
 The timeout of this call.
@@ -348,7 +344,7 @@ extends 
 
 getPriority
-intgetPriority()
+intgetPriority()
 
 Returns:
 The Priority of this call.
@@ -361,7 +357,7 @@ extends 
 
 getDeadline
-longgetDeadline()
+longgetDeadline()
 Return the deadline of this call. If we can not complete 
this call in time,
  we can throw a TimeoutIOException and RPCServer will drop it.
 
@@ -376,7 +372,7 @@ extends 
 
 getSize
-longgetSize()
+longgetSize()
 Used to calculate the request call queue size.
  If the total request call size exceeds a limit, the call will be 
rejected.
 
@@ -391,7 +387,7 @@ extends 
 
 getHeader
-org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeadergetHeader()
+org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeadergetHeader()
 
 Returns:
 The request header of this call.
@@ -404,7 +400,7 @@ extends 
 
 getRemotePort
-intgetRemotePort()
+intgetRemotePort()
 
 Returns:
 Port of remote address in this call
@@ -417,7 +413,7 @@ extends 
 
 setResponse
-voidsetResponse(org.apache.hadoop.hbase.shaded.com.google.protobuf.Messageparam,
+voidsetResponse(org.apache.hadoop.hbase.shaded.com.google.protobuf.Messageparam,
  CellScannercells,
  http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">ThrowableerrorThrowable,
  http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringerror)
@@ -437,7 +433,7 @@ extends 
 
 sendResponseIfReady
-voidsendResponseIfReady()
+voidsendResponseIfReady()
   throws 

[16/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/client/ResultBoundedCompletionService.QueueingFuture.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ResultBoundedCompletionService.QueueingFuture.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ResultBoundedCompletionService.QueueingFuture.html
index 2b8..4652557 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ResultBoundedCompletionService.QueueingFuture.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ResultBoundedCompletionService.QueueingFuture.html
@@ -36,9 +36,9 @@
 028
 029import org.apache.commons.logging.Log;
 030import 
org.apache.commons.logging.LogFactory;
-031import 
org.apache.yetus.audience.InterfaceAudience;
-032import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-033import org.apache.htrace.Trace;
+031import 
org.apache.hadoop.hbase.trace.TraceUtil;
+032import 
org.apache.yetus.audience.InterfaceAudience;
+033import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 034
 035/**
 036 * A completion service for the 
RpcRetryingCallerFactory.
@@ -176,7 +176,7 @@
 168
 169  public void 
submit(RetryingCallableV task, int callTimeout, int id) {
 170QueueingFutureV newFuture = 
new QueueingFuture(task, callTimeout, id);
-171
executor.execute(Trace.wrap(newFuture));
+171
executor.execute(TraceUtil.wrap(newFuture, 
"ResultBoundedCompletionService.submit"));
 172tasks[id] = newFuture;
 173  }
 174

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/client/ResultBoundedCompletionService.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ResultBoundedCompletionService.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ResultBoundedCompletionService.html
index 2b8..4652557 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ResultBoundedCompletionService.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ResultBoundedCompletionService.html
@@ -36,9 +36,9 @@
 028
 029import org.apache.commons.logging.Log;
 030import 
org.apache.commons.logging.LogFactory;
-031import 
org.apache.yetus.audience.InterfaceAudience;
-032import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-033import org.apache.htrace.Trace;
+031import 
org.apache.hadoop.hbase.trace.TraceUtil;
+032import 
org.apache.yetus.audience.InterfaceAudience;
+033import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 034
 035/**
 036 * A completion service for the 
RpcRetryingCallerFactory.
@@ -176,7 +176,7 @@
 168
 169  public void 
submit(RetryingCallableV task, int callTimeout, int id) {
 170QueueingFutureV newFuture = 
new QueueingFuture(task, callTimeout, id);
-171
executor.execute(Trace.wrap(newFuture));
+171
executor.execute(TraceUtil.wrap(newFuture, 
"ResultBoundedCompletionService.submit"));
 172tasks[id] = newFuture;
 173  }
 174

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/executor/EventHandler.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/executor/EventHandler.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/executor/EventHandler.html
index 11bc137..abccb29 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/executor/EventHandler.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/executor/EventHandler.html
@@ -31,174 +31,172 @@
 023
 024import org.apache.commons.logging.Log;
 025import 
org.apache.commons.logging.LogFactory;
-026import 
org.apache.yetus.audience.InterfaceAudience;
-027import org.apache.hadoop.hbase.Server;
-028import org.apache.htrace.Span;
-029import org.apache.htrace.Trace;
-030import org.apache.htrace.TraceScope;
-031
-032/**
-033 * Abstract base class for all HBase 
event handlers. Subclasses should
-034 * implement the {@link #process()} and 
{@link #prepare()} methods.  Subclasses
-035 * should also do all necessary checks up 
in their prepare() if possible -- check
-036 * table exists, is disabled, etc. -- so 
they fail fast rather than later when process
-037 * is running.  Do it this way because 
process be invoked directly but event
-038 * handlers are also
-039 * run in an executor context -- i.e. 
asynchronously -- and in this case,
-040 * exceptions thrown at process time will 
not be seen by the invoker, not till
-041 * we implement a call-back mechanism so 
the client can pick them up later.
-042 * p
-043 * Event handlers have an {@link 
EventType}.
-044 * {@link EventType} is a list of ALL 
handler event types.  We need to keep
-045 * a full list in one place -- and as 
enums is a good shorthand for an
-046 * implemenations -- because event 
handlers can be 

[36/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
index 2db218d..fbcfbd3 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class HRegion.FlushResultImpl
+public static class HRegion.FlushResultImpl
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements HRegion.FlushResult
 Objects from this class are created when flushing to 
describe all the different states that
@@ -273,7 +273,7 @@ implements 
 
 result
-finalHRegion.FlushResult.Result result
+finalHRegion.FlushResult.Result result
 
 
 
@@ -282,7 +282,7 @@ implements 
 
 failureReason
-finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String failureReason
+finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String failureReason
 
 
 
@@ -291,7 +291,7 @@ implements 
 
 flushSequenceId
-finallong flushSequenceId
+finallong flushSequenceId
 
 
 
@@ -300,7 +300,7 @@ implements 
 
 wroteFlushWalMarker
-finalboolean wroteFlushWalMarker
+finalboolean wroteFlushWalMarker
 
 
 
@@ -317,7 +317,7 @@ implements 
 
 FlushResultImpl
-FlushResultImpl(HRegion.FlushResult.Resultresult,
+FlushResultImpl(HRegion.FlushResult.Resultresult,
 longflushSequenceId)
 Convenience constructor to use when the flush is 
successful, the failure message is set to
  null.
@@ -335,7 +335,7 @@ implements 
 
 FlushResultImpl
-FlushResultImpl(HRegion.FlushResult.Resultresult,
+FlushResultImpl(HRegion.FlushResult.Resultresult,
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringfailureReason,
 booleanwroteFlushMarker)
 Convenience constructor to use when we cannot flush.
@@ -352,7 +352,7 @@ implements 
 
 FlushResultImpl
-FlushResultImpl(HRegion.FlushResult.Resultresult,
+FlushResultImpl(HRegion.FlushResult.Resultresult,
 longflushSequenceId,
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringfailureReason,
 booleanwroteFlushMarker)
@@ -379,7 +379,7 @@ implements 
 
 isFlushSucceeded
-publicbooleanisFlushSucceeded()
+publicbooleanisFlushSucceeded()
 Convenience method, the equivalent of checking if result is
  FLUSHED_NO_COMPACTION_NEEDED or FLUSHED_NO_COMPACTION_NEEDED.
 
@@ -396,7 +396,7 @@ implements 
 
 isCompactionNeeded
-publicbooleanisCompactionNeeded()
+publicbooleanisCompactionNeeded()
 Convenience method, the equivalent of checking if result is 
FLUSHED_COMPACTION_NEEDED.
 
 Specified by:
@@ -412,7 +412,7 @@ implements 
 
 toString
-publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
+publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
 
 Overrides:
 http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--;
 title="class or interface in java.lang">toStringin 
classhttp://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
@@ -425,7 +425,7 @@ implements 
 
 getResult
-publicHRegion.FlushResult.ResultgetResult()
+publicHRegion.FlushResult.ResultgetResult()
 
 Specified by:
 getResultin
 interfaceHRegion.FlushResult

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html
index 38812f3..00eeb77 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static class HRegion.MutationBatchOperation
+static class HRegion.MutationBatchOperation
 extends HRegion.BatchOperationMutation
 Batch of mutation operations. Base class is shared with 

[27/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index 76e6e91..f8f5877 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -705,17 +705,17 @@
 
 org.apache.hadoop.hbase.regionserver.ScanType
 org.apache.hadoop.hbase.regionserver.ScannerContext.NextState
-org.apache.hadoop.hbase.regionserver.Region.Operation
-org.apache.hadoop.hbase.regionserver.BloomType
-org.apache.hadoop.hbase.regionserver.HRegion.FlushResult.Result
 org.apache.hadoop.hbase.regionserver.MemStoreCompactionStrategy.Action
 org.apache.hadoop.hbase.regionserver.TimeRangeTracker.Type
 org.apache.hadoop.hbase.regionserver.SplitLogWorker.TaskExecutor.Status
-org.apache.hadoop.hbase.regionserver.DefaultHeapMemoryTuner.StepDirection
-org.apache.hadoop.hbase.regionserver.MetricsRegionServerSourceFactoryImpl.FactoryStorage
-org.apache.hadoop.hbase.regionserver.FlushType
+org.apache.hadoop.hbase.regionserver.BloomType
 org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope
+org.apache.hadoop.hbase.regionserver.HRegion.FlushResult.Result
+org.apache.hadoop.hbase.regionserver.MetricsRegionServerSourceFactoryImpl.FactoryStorage
+org.apache.hadoop.hbase.regionserver.DefaultHeapMemoryTuner.StepDirection
 org.apache.hadoop.hbase.regionserver.CompactingMemStore.IndexType
+org.apache.hadoop.hbase.regionserver.Region.Operation
+org.apache.hadoop.hbase.regionserver.FlushType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
index 313f199..9264ee0 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
@@ -130,8 +130,8 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.regionserver.querymatcher.StripeCompactionScanQueryMatcher.DropDeletesInOutput
 org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker.DeleteResult
+org.apache.hadoop.hbase.regionserver.querymatcher.StripeCompactionScanQueryMatcher.DropDeletesInOutput
 org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.WalProps.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.WalProps.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.WalProps.html
index 455ddf2..9521fca 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.WalProps.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.WalProps.html
@@ -107,7 +107,7 @@
 
 
 
-private static final class AbstractFSWAL.WalProps
+private static final class AbstractFSWAL.WalProps
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 
 
@@ -193,7 +193,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 encodedName2HighestSequenceId
-public finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long encodedName2HighestSequenceId
+public finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long encodedName2HighestSequenceId
 Map the encoded region name to the highest sequence id. 
Contain all the regions it has
  entries of
 
@@ -204,7 +204,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 logSize
-public 

[15/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.BlockIndexNotLoadedException.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.BlockIndexNotLoadedException.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.BlockIndexNotLoadedException.html
index a89df18..ea0bc8c 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.BlockIndexNotLoadedException.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.BlockIndexNotLoadedException.html
@@ -49,26 +49,26 @@
 041import 
org.apache.hadoop.hbase.ByteBufferKeyValue;
 042import 
org.apache.hadoop.hbase.SizeCachedKeyValue;
 043import 
org.apache.hadoop.hbase.SizeCachedNoTagsKeyValue;
-044import 
org.apache.yetus.audience.InterfaceAudience;
-045import 
org.apache.hadoop.hbase.fs.HFileSystem;
-046import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
-047import 
org.apache.hadoop.hbase.io.compress.Compression;
-048import 
org.apache.hadoop.hbase.io.crypto.Cipher;
-049import 
org.apache.hadoop.hbase.io.crypto.Encryption;
-050import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
-051import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-052import 
org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
-053import 
org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
-054import 
org.apache.hadoop.hbase.nio.ByteBuff;
-055import 
org.apache.hadoop.hbase.regionserver.KeyValueScanner;
-056import 
org.apache.hadoop.hbase.security.EncryptionUtil;
-057import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-058import 
org.apache.hadoop.hbase.util.Bytes;
-059import 
org.apache.hadoop.hbase.util.IdLock;
-060import 
org.apache.hadoop.hbase.util.ObjectIntPair;
-061import 
org.apache.hadoop.io.WritableUtils;
-062import org.apache.htrace.Trace;
-063import org.apache.htrace.TraceScope;
+044import 
org.apache.hadoop.hbase.trace.TraceUtil;
+045import 
org.apache.yetus.audience.InterfaceAudience;
+046import 
org.apache.hadoop.hbase.fs.HFileSystem;
+047import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
+048import 
org.apache.hadoop.hbase.io.compress.Compression;
+049import 
org.apache.hadoop.hbase.io.crypto.Cipher;
+050import 
org.apache.hadoop.hbase.io.crypto.Encryption;
+051import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
+052import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+053import 
org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
+054import 
org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
+055import 
org.apache.hadoop.hbase.nio.ByteBuff;
+056import 
org.apache.hadoop.hbase.regionserver.KeyValueScanner;
+057import 
org.apache.hadoop.hbase.security.EncryptionUtil;
+058import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+059import 
org.apache.hadoop.hbase.util.Bytes;
+060import 
org.apache.hadoop.hbase.util.IdLock;
+061import 
org.apache.hadoop.hbase.util.ObjectIntPair;
+062import 
org.apache.hadoop.io.WritableUtils;
+063import 
org.apache.htrace.core.TraceScope;
 064
 065import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 066
@@ -263,1235 +263,1235 @@
 255// Prefetch file blocks upon open if 
requested
 256if (cacheConf.shouldPrefetchOnOpen()) 
{
 257  PrefetchExecutor.request(path, new 
Runnable() {
-258public void run() {
-259  long offset = 0;
-260  long end = 0;
-261  try {
-262end = 
getTrailer().getLoadOnOpenDataOffset();
-263if (LOG.isTraceEnabled()) {
-264  LOG.trace("Prefetch start " 
+ getPathOffsetEndStr(path, offset, end));
-265}
-266// TODO: Could we use block 
iterator in here? Would that get stuff into the cache?
-267HFileBlock prevBlock = 
null;
-268while (offset  end) {
-269  if (Thread.interrupted()) 
{
-270break;
-271  }
-272  // Perhaps we got our block 
from cache? Unlikely as this may be, if it happens, then
-273  // the 
internal-to-hfileblock thread local which holds the overread that gets the
-274  // next header, will not 
have happened...so, pass in the onDiskSize gotten from the
-275  // cached block. This 
'optimization' triggers extremely rarely I'd say.
-276  long onDiskSize = prevBlock 
!= null? prevBlock.getNextBlockOnDiskSize(): -1;
-277  HFileBlock block = 
readBlock(offset, onDiskSize, true, false, false, false,
-278  null, null);
-279  // Need not update the 
current block. Ideally here the readBlock won't find the
-280  // block in cache. We call 
this readBlock so that block data is read from FS and
-281  // cached in BC. So there 
is no reference count increment that happens 

[45/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.html
index 519b0f1..b5f9102 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.html
@@ -121,7 +121,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-protected static class HFileReaderImpl.HFileScannerImpl
+protected static class HFileReaderImpl.HFileScannerImpl
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements HFileScanner
 
@@ -503,7 +503,7 @@ implements 
 
 blockBuffer
-privateByteBuff blockBuffer
+privateByteBuff blockBuffer
 
 
 
@@ -512,7 +512,7 @@ implements 
 
 cacheBlocks
-protected finalboolean cacheBlocks
+protected finalboolean cacheBlocks
 
 
 
@@ -521,7 +521,7 @@ implements 
 
 pread
-protected finalboolean pread
+protected finalboolean pread
 
 
 
@@ -530,7 +530,7 @@ implements 
 
 isCompaction
-protected finalboolean isCompaction
+protected finalboolean isCompaction
 
 
 
@@ -539,7 +539,7 @@ implements 
 
 currKeyLen
-privateint currKeyLen
+privateint currKeyLen
 
 
 
@@ -548,7 +548,7 @@ implements 
 
 currValueLen
-privateint currValueLen
+privateint currValueLen
 
 
 
@@ -557,7 +557,7 @@ implements 
 
 currMemstoreTSLen
-privateint currMemstoreTSLen
+privateint currMemstoreTSLen
 
 
 
@@ -566,7 +566,7 @@ implements 
 
 currMemstoreTS
-privatelong currMemstoreTS
+privatelong currMemstoreTS
 
 
 
@@ -575,7 +575,7 @@ implements 
 
 blockFetches
-protectedhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger blockFetches
+protectedhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicInteger blockFetches
 
 
 
@@ -584,7 +584,7 @@ implements 
 
 reader
-protected finalHFile.Reader reader
+protected finalHFile.Reader reader
 
 
 
@@ -593,7 +593,7 @@ implements 
 
 currTagsLen
-privateint currTagsLen
+privateint currTagsLen
 
 
 
@@ -602,7 +602,7 @@ implements 
 
 bufBackedKeyOnlyKv
-privateByteBufferKeyOnlyKeyValue bufBackedKeyOnlyKv
+privateByteBufferKeyOnlyKeyValue bufBackedKeyOnlyKv
 
 
 
@@ -611,7 +611,7 @@ implements 
 
 pair
-finalObjectIntPairhttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer pair
+finalObjectIntPairhttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer pair
 
 
 
@@ -620,7 +620,7 @@ implements 
 
 nextIndexedKey
-protectedCell nextIndexedKey
+protectedCell nextIndexedKey
 The next indexed key is to keep track of the indexed key of 
the next data block.
  If the nextIndexedKey is HConstants.NO_NEXT_INDEXED_KEY, it means that the
  current data block is the last data block.
@@ -634,7 +634,7 @@ implements 
 
 curBlock
-protectedHFileBlock curBlock
+protectedHFileBlock curBlock
 
 
 
@@ -643,7 +643,7 @@ implements 
 
 prevBlocks
-protected finalhttp://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in java.util">ArrayListHFileBlock prevBlocks
+protected finalhttp://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in java.util">ArrayListHFileBlock prevBlocks
 
 
 
@@ -660,7 +660,7 @@ implements 
 
 HFileScannerImpl
-publicHFileScannerImpl(HFile.Readerreader,
+publicHFileScannerImpl(HFile.Readerreader,
 booleancacheBlocks,
 booleanpread,
 booleanisCompaction)
@@ -680,7 +680,7 @@ implements 
 
 updateCurrBlockRef
-voidupdateCurrBlockRef(HFileBlockblock)
+voidupdateCurrBlockRef(HFileBlockblock)
 
 
 
@@ -689,7 +689,7 @@ implements 
 
 reset
-voidreset()
+voidreset()
 
 
 
@@ -698,7 +698,7 @@ implements 
 
 returnBlockToCache
-privatevoidreturnBlockToCache(HFileBlockblock)
+privatevoidreturnBlockToCache(HFileBlockblock)
 
 
 
@@ -707,7 +707,7 @@ implements 
 
 returnBlocks
-privatevoidreturnBlocks(booleanreturnAll)
+privatevoidreturnBlocks(booleanreturnAll)
 
 
 
@@ -716,7 +716,7 @@ implements 
 
 isSeeked
-publicbooleanisSeeked()
+publicbooleanisSeeked()
 
 Specified by:
 isSeekedin
 interfaceHFileScanner
@@ -733,7 +733,7 @@ implements 
 
 toString
-publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 

[50/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 0b9b801..e4ace30 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -286,10 +286,10 @@
 Warnings
 Errors
 
-3424
+3426
 0
 0
-21406
+21363
 
 Files
 
@@ -702,7 +702,7 @@
 org/apache/hadoop/hbase/PerformanceEvaluation.java
 0
 0
-38
+33
 
 org/apache/hadoop/hbase/PerformanceEvaluationCommons.java
 0
@@ -2619,6980 +2619,6970 @@
 0
 7
 
-org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
-0
-0
-1
-
 org/apache/hadoop/hbase/client/TestScan.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/TestScannerTimeout.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/client/TestScannersFromClientSide.java
 0
 0
 20
-
+
 org/apache/hadoop/hbase/client/TestScannersFromClientSide2.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/TestServerBusyException.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/TestShellNoCluster.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestShortCircuitConnection.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestSimpleRequestController.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/TestSizeFailures.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/TestSmallReversedScanner.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/TestSnapshotFromClient.java
 0
 0
 12
-
+
 org/apache/hadoop/hbase/client/TestSnapshotMetadata.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestSnapshotWithAcl.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/TestSplitOrMergeStatus.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/client/TestTableFavoredNodes.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/TestTableSnapshotScanner.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/TestTimestampsFilter.java
 0
 0
 18
-
+
 org/apache/hadoop/hbase/client/TestUpdateConfiguration.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/TestZKAsyncRegistry.java
 0
 0
 15
-
+
 org/apache/hadoop/hbase/client/VersionInfoUtil.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/ZKAsyncRegistry.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/backoff/ClientBackoffPolicyFactory.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/backoff/ServerStatistics.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
 0
 0
 94
-
+
 org/apache/hadoop/hbase/client/coprocessor/AggregationHelper.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/coprocessor/DoubleColumnInterpreter.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/coprocessor/RowProcessorClient.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/coprocessor/package-info.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/client/example/BufferedMutatorExample.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/example/ExportEndpointExample.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/example/HttpProxyExample.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java
 0
 0
 16
-
+
 org/apache/hadoop/hbase/client/example/TestHttpProxyExample.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/locking/EntityLock.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/locking/LockServiceClient.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/locking/TestEntityLocks.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/metrics/ScanMetrics.java
 0
 0
 12
-
+
 org/apache/hadoop/hbase/client/metrics/ServerSideScanMetrics.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java
 0
 0
 38
-
+
 org/apache/hadoop/hbase/client/replication/ReplicationSerDeHelper.java
 0
 0
 12
-
+
 org/apache/hadoop/hbase/client/replication/TableCFs.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/codec/BaseDecoder.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/codec/BaseEncoder.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/codec/CellCodec.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/codec/CellCodecWithTags.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/codec/Codec.java
 0
 0
 1
-
+
 

[25/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.html
index bd09df9..d4b8c27 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.html
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.LimitedPrivate(value="Configuration")
-public class AsyncFSWAL
+public class AsyncFSWAL
 extends AbstractFSWALWALProvider.AsyncWriter
 An asynchronous implementation of FSWAL.
  
@@ -479,7 +479,7 @@ extends AbstractFSWAL
-abortCacheFlush,
 append,
 atHeadOfRingBufferEventHandlerAppend,
 blockOnSync,
 close,
 completeCacheFlush,
 c
 omputeFilename, findRegionsToForceFlush,
 getCoprocessorHost,
 getCurrentFileName,
 getEarliestMemStoreSeqNum,
 getEarliestMemStoreSeqNum,
 getFilenum,
 
 getFileNumFromFileName, getFiles,
 getLogFileSize,
 getLogFileSizeIfBeingWritten,
 getNumLogFiles,
 getNumRolledLogFiles,
 getOldPath,
 getPreallocatedEventCount,
 getSyncFuture,
 getUnflushedEntriesCount,
 getWALArchivePath,
 isUnflushedEntries,
 postSync,
 registerWALActionsListener,
 replaceWriter,
 requestLogRoll,
 requestLogRoll,
 rollWriter,
 rollWriter,
 shutdown,
 stampSequenceIdAn
 dPublishToRingBuffer, startCacheFlush,
 startCacheFlush,
 toString,
 unregisterWALActionsListener,
 updateStore
+abortCacheFlush,
 append,
 atHeadOfRingBufferEventHandlerAppend,
 blockOnSync,
 close,
 completeCacheFlush,
 c
 omputeFilename, findRegionsToForceFlush,
 getCoprocessorHost,
 getCurrentFileName,
 getEarliestMemStoreSeqNum,
 getEarliestMemStoreSeqNum,
 getFilenum,
 
 getFileNumFromFileName, getFiles,
 getLogFileSize,
 getLogFileSizeIfBeingWritten,
 getNumLogFiles,
 getNumRolledLogFiles,
 getOldPath,
 getPreallocatedEventCount,
 getSyncFuture,
 getUnflushedEntriesCount,
 getWALArchivePath,
 isUnflushedEntries,
 postSync,
 registerWALActionsListener,
 replaceWriter,
 requestLogRoll,
 requestLogRoll,
 rollWriter,
 rollWriter,
 shutdown,
 stampSequenc
 eIdAndPublishToRingBuffer, startCacheFlush,
 startCacheFlush,
 toString,
 unregisterWALActionsListener,
 updateStore
 
 
 
@@ -508,7 +508,7 @@ extends 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -517,7 +517,7 @@ extends 
 
 SEQ_COMPARATOR
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">ComparatorSyncFuture 
SEQ_COMPARATOR
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">ComparatorSyncFuture 
SEQ_COMPARATOR
 
 
 
@@ -526,7 +526,7 @@ extends 
 
 WAL_BATCH_SIZE
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String WAL_BATCH_SIZE
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String WAL_BATCH_SIZE
 
 See Also:
 Constant
 Field Values
@@ -539,7 +539,7 @@ extends 
 
 DEFAULT_WAL_BATCH_SIZE
-public static finallong DEFAULT_WAL_BATCH_SIZE
+public static finallong DEFAULT_WAL_BATCH_SIZE
 
 See Also:
 Constant
 Field Values
@@ -552,7 +552,7 @@ extends 
 
 ASYNC_WAL_CREATE_MAX_RETRIES
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String ASYNC_WAL_CREATE_MAX_RETRIES
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String ASYNC_WAL_CREATE_MAX_RETRIES
 
 See Also:
 Constant
 Field Values
@@ -565,7 +565,7 @@ extends 
 
 DEFAULT_ASYNC_WAL_CREATE_MAX_RETRIES
-public static finalint DEFAULT_ASYNC_WAL_CREATE_MAX_RETRIES
+public static finalint DEFAULT_ASYNC_WAL_CREATE_MAX_RETRIES
 
 See Also:
 Constant
 Field Values
@@ -578,7 +578,7 @@ extends 
 
 eventLoop
-private 
finalorg.apache.hadoop.hbase.shaded.io.netty.channel.EventLoop eventLoop
+private 
finalorg.apache.hadoop.hbase.shaded.io.netty.channel.EventLoop eventLoop
 
 
 
@@ -587,7 +587,7 @@ extends 
 
 channelClass
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends 
org.apache.hadoop.hbase.shaded.io.netty.channel.Channel channelClass
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends 
org.apache.hadoop.hbase.shaded.io.netty.channel.Channel channelClass
 
 
 
@@ -596,7 +596,7 @@ extends 
 
 consumeLock

[05/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcCall.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcCall.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcCall.html
index 9290999..4165f7f 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcCall.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcCall.html
@@ -38,115 +38,109 @@
 030import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor;
 031import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
 032import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader;
-033import org.apache.htrace.TraceInfo;
-034
-035/**
-036 * Interface of all necessary to carry 
out a RPC method invocation on the server.
-037 */
-038@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
 HBaseInterfaceAudience.PHOENIX})
-039@InterfaceStability.Evolving
-040public interface RpcCall extends 
RpcCallContext {
-041
-042  /**
-043   * @return The service of this call.
-044   */
-045  BlockingService getService();
-046
-047  /**
-048   * @return The service method.
-049   */
-050  MethodDescriptor getMethod();
-051
-052  /**
-053   * @return The call parameter 
message.
-054   */
-055  Message getParam();
-056
-057  /**
-058   * @return The CellScanner that can 
carry input and result payload.
-059   */
-060  CellScanner getCellScanner();
-061
-062  /**
-063   * @return The timestamp when the call 
is constructed.
-064   */
-065  long getReceiveTime();
-066
-067  /**
-068   * @return The time when the call 
starts to be executed.
-069   */
-070  long getStartTime();
-071
-072  /**
-073   * Set the time when the call starts to 
be executed.
-074   */
-075  void setStartTime(long startTime);
-076
-077  /**
-078   * @return The timeout of this call.
-079   */
-080  int getTimeout();
-081
-082  /**
-083   * @return The Priority of this call.
-084   */
-085  int getPriority();
-086
-087  /**
-088   * Return the deadline of this call. If 
we can not complete this call in time,
-089   * we can throw a TimeoutIOException 
and RPCServer will drop it.
-090   * @return The system timestamp of 
deadline.
-091   */
-092  long getDeadline();
-093
-094  /**
-095   * Used to calculate the request call 
queue size.
-096   * If the total request call size 
exceeds a limit, the call will be rejected.
-097   * @return The raw size of this call.
-098   */
-099  long getSize();
-100
-101  /**
-102   * @return The request header of this 
call.
-103   */
-104  RequestHeader getHeader();
-105
-106  /**
-107   * @return Port of remote address in 
this call
-108   */
-109  int getRemotePort();
-110
-111  /**
-112   * Set the response resulting from this 
RPC call.
-113   * @param param The result message as 
response.
-114   * @param cells The CellScanner that 
possibly carries the payload.
-115   * @param errorThrowable The error 
Throwable resulting from the call.
-116   * @param error Extra error message.
-117   */
-118  void setResponse(Message param, 
CellScanner cells, Throwable errorThrowable, String error);
-119
-120  /**
-121   * Send the response of this RPC 
call.
-122   * Implementation provides the 
underlying facility (connection, etc) to send.
-123   * @throws IOException
-124   */
-125  void sendResponseIfReady() throws 
IOException;
-126
-127  /**
-128   * Do the necessary cleanup after the 
call if needed.
-129   */
-130  void cleanup();
-131
-132  /**
-133   * @return A short string format of 
this call without possibly lengthy params
-134   */
-135  String toShortString();
-136
-137  /**
-138   * @return TraceInfo attached to this 
call.
-139   */
-140  TraceInfo getTraceInfo();
-141}
+033
+034/**
+035 * Interface of all necessary to carry 
out a RPC method invocation on the server.
+036 */
+037@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
 HBaseInterfaceAudience.PHOENIX})
+038@InterfaceStability.Evolving
+039public interface RpcCall extends 
RpcCallContext {
+040
+041  /**
+042   * @return The service of this call.
+043   */
+044  BlockingService getService();
+045
+046  /**
+047   * @return The service method.
+048   */
+049  MethodDescriptor getMethod();
+050
+051  /**
+052   * @return The call parameter 
message.
+053   */
+054  Message getParam();
+055
+056  /**
+057   * @return The CellScanner that can 
carry input and result payload.
+058   */
+059  CellScanner getCellScanner();
+060
+061  /**
+062   * @return The timestamp when the call 
is constructed.
+063   */
+064  long getReceiveTime();
+065
+066  /**
+067   * @return The time when the call 
starts to be executed.
+068   */
+069  long getStartTime();
+070
+071  /**
+072   * Set the time when the call starts to 
be executed.
+073   */
+074  void setStartTime(long startTime);
+075
+076  /**
+077   * @return The timeout of this call.
+078   */
+079  

[35/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
index e04d11d..0c95fe3 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
@@ -121,7 +121,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-class HRegion.RegionScannerImpl
+class HRegion.RegionScannerImpl
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements RegionScanner, Shipper, RpcCallback
 RegionScannerImpl is used to combine scanners from multiple 
Stores (aka column families).
@@ -425,7 +425,7 @@ implements 
 
 storeHeap
-KeyValueHeap storeHeap
+KeyValueHeap storeHeap
 
 
 
@@ -434,7 +434,7 @@ implements 
 
 joinedHeap
-KeyValueHeap joinedHeap
+KeyValueHeap joinedHeap
 Heap of key-values that are not essential for the provided 
filters and are thus read
  on demand, if on-demand column family loading is enabled.
 
@@ -445,7 +445,7 @@ implements 
 
 joinedContinuationRow
-protectedCell joinedContinuationRow
+protectedCell joinedContinuationRow
 If the joined heap data gathering is interrupted due to 
scan limits, this will
  contain the row for which we are populating the values.
 
@@ -456,7 +456,7 @@ implements 
 
 filterClosed
-privateboolean filterClosed
+privateboolean filterClosed
 
 
 
@@ -465,7 +465,7 @@ implements 
 
 stopRow
-protected finalbyte[] stopRow
+protected finalbyte[] stopRow
 
 
 
@@ -474,7 +474,7 @@ implements 
 
 includeStopRow
-protected finalboolean includeStopRow
+protected finalboolean includeStopRow
 
 
 
@@ -483,7 +483,7 @@ implements 
 
 region
-protected finalHRegion region
+protected finalHRegion region
 
 
 
@@ -492,7 +492,7 @@ implements 
 
 comparator
-protected finalCellComparator comparator
+protected finalCellComparator comparator
 
 
 
@@ -501,7 +501,7 @@ implements 
 
 readPt
-private finallong readPt
+private finallong readPt
 
 
 
@@ -510,7 +510,7 @@ implements 
 
 maxResultSize
-private finallong maxResultSize
+private finallong maxResultSize
 
 
 
@@ -519,7 +519,7 @@ implements 
 
 defaultScannerContext
-private finalScannerContext defaultScannerContext
+private finalScannerContext defaultScannerContext
 
 
 
@@ -528,7 +528,7 @@ implements 
 
 filter
-private finalFilterWrapper filter
+private finalFilterWrapper filter
 
 
 
@@ -545,7 +545,7 @@ implements 
 
 RegionScannerImpl
-RegionScannerImpl(Scanscan,
+RegionScannerImpl(Scanscan,
   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScanneradditionalScanners,
   HRegionregion)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
@@ -561,7 +561,7 @@ implements 
 
 RegionScannerImpl
-RegionScannerImpl(Scanscan,
+RegionScannerImpl(Scanscan,
   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScanneradditionalScanners,
   HRegionregion,
   longnonceGroup,
@@ -587,7 +587,7 @@ implements 
 
 getRegionInfo
-publicRegionInfogetRegionInfo()
+publicRegionInfogetRegionInfo()
 
 Specified by:
 getRegionInfoin
 interfaceRegionScanner
@@ -602,7 +602,7 @@ implements 
 
 initializeScanners
-protectedvoidinitializeScanners(Scanscan,
+protectedvoidinitializeScanners(Scanscan,
   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScanneradditionalScanners)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
@@ -617,7 +617,7 @@ implements 
 
 initializeKVHeap
-protectedvoidinitializeKVHeap(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScannerscanners,
+protectedvoidinitializeKVHeap(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScannerscanners,
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScannerjoinedScanners,
 HRegionregion)
  throws 

[08/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.CallSender.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.CallSender.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.CallSender.html
index 008b7d0..942938b 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.CallSender.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.CallSender.html
@@ -32,557 +32,557 @@
 024import static 
org.apache.hadoop.hbase.ipc.IPCUtil.setCancelled;
 025import static 
org.apache.hadoop.hbase.ipc.IPCUtil.write;
 026
-027import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
-028import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message.Builder;
-029import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-030
-031import java.io.BufferedInputStream;
-032import java.io.BufferedOutputStream;
-033import java.io.DataInputStream;
-034import java.io.DataOutputStream;
-035import java.io.IOException;
-036import java.io.InputStream;
-037import java.io.InterruptedIOException;
-038import java.io.OutputStream;
-039import java.net.Socket;
-040import java.net.SocketTimeoutException;
-041import java.net.UnknownHostException;
-042import java.nio.ByteBuffer;
-043import 
java.security.PrivilegedExceptionAction;
-044import java.util.ArrayDeque;
-045import java.util.Locale;
-046import java.util.Queue;
-047import 
java.util.concurrent.ConcurrentHashMap;
-048import 
java.util.concurrent.ConcurrentMap;
-049import 
java.util.concurrent.ThreadLocalRandom;
-050
-051import 
javax.security.sasl.SaslException;
-052
-053import org.apache.commons.logging.Log;
-054import 
org.apache.commons.logging.LogFactory;
-055import 
org.apache.hadoop.conf.Configuration;
-056import 
org.apache.hadoop.hbase.CellScanner;
-057import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-058import 
org.apache.yetus.audience.InterfaceAudience;
-059import 
org.apache.hadoop.hbase.exceptions.ConnectionClosingException;
-060import 
org.apache.hadoop.hbase.io.ByteArrayOutputStream;
-061import 
org.apache.hadoop.hbase.ipc.HBaseRpcController.CancellationCallback;
-062import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader;
-066import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader;
-069import 
org.apache.hadoop.hbase.security.HBaseSaslRpcClient;
-070import 
org.apache.hadoop.hbase.security.SaslUtil;
-071import 
org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
-072import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-073import 
org.apache.hadoop.hbase.util.ExceptionUtil;
-074import org.apache.hadoop.io.IOUtils;
-075import 
org.apache.hadoop.ipc.RemoteException;
-076import org.apache.hadoop.net.NetUtils;
-077import 
org.apache.hadoop.security.UserGroupInformation;
-078import org.apache.htrace.Trace;
-079import org.apache.htrace.TraceScope;
-080
-081/**
-082 * Thread that reads responses and 
notifies callers. Each connection owns a socket connected to a
-083 * remote address. Calls are multiplexed 
through this socket: responses may be delivered out of
-084 * order.
-085 */
-086@InterfaceAudience.Private
-087class BlockingRpcConnection extends 
RpcConnection implements Runnable {
-088
-089  private static final Log LOG = 
LogFactory.getLog(BlockingRpcConnection.class);
-090
-091  private final BlockingRpcClient 
rpcClient;
-092
-093  private final String threadName;
-094  
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value = 
"IS2_INCONSISTENT_SYNC",
-095  justification = "We are always 
under lock actually")
-096  private Thread thread;
-097
-098  // connected socket. protected for 
writing UT.
-099  protected Socket socket = null;
-100  private DataInputStream in;
-101  private DataOutputStream out;
-102
-103  private HBaseSaslRpcClient 
saslRpcClient;
-104
-105  // currently active calls
-106  private final ConcurrentMapInteger, 
Call calls = new ConcurrentHashMap();
-107
-108  private final CallSender callSender;
-109
-110  private boolean closed = false;
-111
-112  private byte[] 
connectionHeaderPreamble;
-113
-114  private byte[] 
connectionHeaderWithLength;
-115
-116  private boolean 
waitingConnectionHeaderResponse = false;
-117
-118  /**
-119   * If the client wants to interrupt its 
calls easily (i.e. call Thread#interrupt), it gets into a
-120   * java issue: an interruption 

[49/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 18107de..16a4879 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
 en-us
 2007 - 2017 The Apache Software Foundation
 
-  File: 3424,
- Errors: 21406,
+  File: 3426,
+ Errors: 21363,
  Warnings: 0,
  Infos: 0
   
@@ -4479,7 +4479,7 @@ under the License.
   0
 
 
-  38
+  33
 
   
   
@@ -4801,7 +4801,7 @@ under the License.
   0
 
 
-  34
+  24
 
   
   
@@ -5599,7 +5599,7 @@ under the License.
   0
 
 
-  3
+  4
 
   
   
@@ -7237,7 +7237,7 @@ under the License.
   0
 
 
-  3
+  5
 
   
   
@@ -9295,7 +9295,7 @@ under the License.
   0
 
 
-  17
+  1
 
   
   
@@ -10275,7 +10275,7 @@ under the License.
   0
 
 
-  2
+  1
 
   
   
@@ -11003,7 +11003,7 @@ under the License.
   0
 
 
-  31
+  30
 
   
   
@@ -11171,7 +11171,7 @@ under the License.
   0
 
 
-  14
+  13
 
   
   
@@ -12893,7 +12893,7 @@ under the License.
   0
 
 
-  2
+  3
 
   
   
@@ -13845,7 +13845,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -14461,7 +14461,7 @@ under the License.
   0
 
 
-  5
+  2
 
   
   
@@ -14956,6 +14956,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.trace.TraceTree.java;>org/apache/hadoop/hbase/trace/TraceTree.java
+
+
+  0
+
+
+  0
+
+
+  1
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.coprocessor.example.ValueRewritingObserver.java;>org/apache/hadoop/hbase/coprocessor/example/ValueRewritingObserver.java
 
 
@@ -17513,7 +17527,7 @@ under the License.
   0
 
 
-  2
+  1
 
   
   
@@ -18694,6 +18708,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.trace.TraceUtil.java;>org/apache/hadoop/hbase/trace/TraceUtil.java
+
+
+  0
+
+
+  0
+
+
+  1
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.master.balancer.StochasticLoadBalancer.java;>org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java
 
 
@@ -20677,7 +20705,7 @@ under the License.
   0
 
 
-  11
+  7
 
   
   
@@ -24135,7 +24163,7 @@ under the License.
   0
 
 
-  4
+  3
 
   
   
@@ -29609,7 +29637,7 @@ under the License.
   0
 
 
-  12
+   

[29/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerShippedCallBack.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerShippedCallBack.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerShippedCallBack.html
index 381d6d1..86b632e 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerShippedCallBack.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerShippedCallBack.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private class RSRpcServices.RegionScannerShippedCallBack
+private class RSRpcServices.RegionScannerShippedCallBack
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements RpcCallback
 An Rpc callback for doing shipped() call on a 
RegionScanner.
@@ -219,7 +219,7 @@ implements 
 
 scannerName
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String scannerName
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String scannerName
 
 
 
@@ -228,7 +228,7 @@ implements 
 
 shipper
-private finalShipper shipper
+private finalShipper shipper
 
 
 
@@ -237,7 +237,7 @@ implements 
 
 lease
-private finalLeases.Lease lease
+private finalLeases.Lease lease
 
 
 
@@ -254,7 +254,7 @@ implements 
 
 RegionScannerShippedCallBack
-publicRegionScannerShippedCallBack(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringscannerName,
+publicRegionScannerShippedCallBack(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringscannerName,
 Shippershipper,
 Leases.Leaselease)
 
@@ -273,7 +273,7 @@ implements 
 
 run
-publicvoidrun()
+publicvoidrun()
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:RpcCallback
 Called at the end of an Rpc Call RpcCallContext

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
index ec997f6..497ecb2 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static class RSRpcServices.RegionScannersCloseCallBack
+static class RSRpcServices.RegionScannersCloseCallBack
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements RpcCallback
 An RpcCallBack that creates a list of scanners that needs 
to perform callBack operation on
@@ -214,7 +214,7 @@ implements 
 
 scanners
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionScanner 
scanners
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRegionScanner 
scanners
 
 
 
@@ -231,7 +231,7 @@ implements 
 
 RegionScannersCloseCallBack
-RegionScannersCloseCallBack()
+RegionScannersCloseCallBack()
 
 
 
@@ -248,7 +248,7 @@ implements 
 
 addScanner
-publicvoidaddScanner(RegionScannerscanner)
+publicvoidaddScanner(RegionScannerscanner)
 
 
 
@@ -257,7 +257,7 @@ implements 
 
 run
-publicvoidrun()
+publicvoidrun()
 Description copied from 
interface:RpcCallback
 Called at the end of an Rpc Call RpcCallContext
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.ScannerListener.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.ScannerListener.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.ScannerListener.html
index 2d5755e..8d0df25 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.ScannerListener.html
+++ 

[40/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/ipc/SimpleServerRpcConnection.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/SimpleServerRpcConnection.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/SimpleServerRpcConnection.html
index 3ebaa27..035be4e 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/SimpleServerRpcConnection.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/SimpleServerRpcConnection.html
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-class SimpleServerRpcConnection
+class SimpleServerRpcConnection
 extends ServerRpcConnection
 Reads calls from a connection and queues them for 
handling.
 
@@ -240,14 +240,13 @@ extends 
 SimpleServerCall
-createCall(intid,
+createCall(intid,
   
org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingServiceservice,
   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptormd,
   
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeaderheader,
   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Messageparam,
   CellScannercellScanner,
   longsize,
-  org.apache.htrace.TraceInfotinfo,
   http://docs.oracle.com/javase/8/docs/api/java/net/InetAddress.html?is-external=true;
 title="class or interface in java.net">InetAddressremoteAddress,
   inttimeout,
   RpcServer.CallCleanupreqCleanup)
@@ -339,7 +338,7 @@ extends 
 
 channel
-finalhttp://docs.oracle.com/javase/8/docs/api/java/nio/channels/SocketChannel.html?is-external=true;
 title="class or interface in java.nio.channels">SocketChannel channel
+finalhttp://docs.oracle.com/javase/8/docs/api/java/nio/channels/SocketChannel.html?is-external=true;
 title="class or interface in java.nio.channels">SocketChannel channel
 
 
 
@@ -348,7 +347,7 @@ extends 
 
 data
-privateByteBuff data
+privateByteBuff data
 
 
 
@@ -357,7 +356,7 @@ extends 
 
 dataLengthBuffer
-privatehttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer dataLengthBuffer
+privatehttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer dataLengthBuffer
 
 
 
@@ -366,7 +365,7 @@ extends 
 
 preambleBuffer
-privatehttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer preambleBuffer
+privatehttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer preambleBuffer
 
 
 
@@ -375,7 +374,7 @@ extends 
 
 rpcCount
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">LongAdder rpcCount
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">LongAdder rpcCount
 
 
 
@@ -384,7 +383,7 @@ extends 
 
 lastContact
-privatelong lastContact
+privatelong lastContact
 
 
 
@@ -393,7 +392,7 @@ extends 
 
 socket
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/net/Socket.html?is-external=true;
 title="class or interface in java.net">Socket socket
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/net/Socket.html?is-external=true;
 title="class or interface in java.net">Socket socket
 
 
 
@@ -402,7 +401,7 @@ extends 
 
 responder
-finalSimpleRpcServerResponder responder
+finalSimpleRpcServerResponder responder
 
 
 
@@ -411,7 +410,7 @@ extends 
 
 connectionPreambleRead
-privateboolean connectionPreambleRead
+privateboolean connectionPreambleRead
 
 
 
@@ -420,7 +419,7 @@ extends 
 
 responseQueue
-finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentLinkedDeque.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ConcurrentLinkedDequeRpcResponse responseQueue
+finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentLinkedDeque.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ConcurrentLinkedDequeRpcResponse responseQueue
 
 
 
@@ -429,7 +428,7 @@ extends 
 
 responseWriteLock
-finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/Lock.html?is-external=true;
 title="class or interface in java.util.concurrent.locks">Lock responseWriteLock
+finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/Lock.html?is-external=true;
 title="class or interface in java.util.concurrent.locks">Lock responseWriteLock
 
 
 
@@ -438,7 +437,7 @@ extends 
 
 lastSentTime
-long lastSentTime
+long lastSentTime
 
 
 
@@ -455,7 +454,7 @@ extends 
 
 SimpleServerRpcConnection

[10/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.HFileBlockTranscoder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.HFileBlockTranscoder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.HFileBlockTranscoder.html
index b9ac0db..eb74b5b 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.HFileBlockTranscoder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.HFileBlockTranscoder.html
@@ -6,302 +6,303 @@
 
 
 
-001/**
-002 * Copyright The Apache Software 
Foundation
-003 *
-004 * Licensed to the Apache Software 
Foundation (ASF) under one or more
-005 * contributor license agreements. See 
the NOTICE file distributed with this
-006 * work for additional information 
regarding copyright ownership. The ASF
-007 * licenses this file to you under the 
Apache License, Version 2.0 (the
-008 * "License"); you may not use this file 
except in compliance with the License.
-009 * You may obtain a copy of the License 
at
-010 *
-011 * 
http://www.apache.org/licenses/LICENSE-2.0
-012 *
-013 * Unless required by applicable law or 
agreed to in writing, software
-014 * distributed under the License is 
distributed on an "AS IS" BASIS, WITHOUT
-015 * WARRANTIES OR CONDITIONS OF ANY KIND, 
either express or implied. See the
-016 * License for the specific language 
governing permissions and limitations
-017 * under the License.
-018 */
-019
-020package 
org.apache.hadoop.hbase.io.hfile;
-021
-022import net.spy.memcached.CachedData;
-023import 
net.spy.memcached.ConnectionFactoryBuilder;
-024import net.spy.memcached.FailureMode;
-025import 
net.spy.memcached.MemcachedClient;
-026import 
net.spy.memcached.transcoders.Transcoder;
-027import org.apache.commons.logging.Log;
-028import 
org.apache.commons.logging.LogFactory;
-029import 
org.apache.hadoop.conf.Configuration;
-030import 
org.apache.hadoop.hbase.HConstants;
-031import 
org.apache.yetus.audience.InterfaceAudience;
-032import 
org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;
-033import 
org.apache.hadoop.hbase.nio.ByteBuff;
-034import 
org.apache.hadoop.hbase.nio.SingleByteBuff;
-035import 
org.apache.hadoop.hbase.util.Addressing;
-036import org.apache.htrace.Trace;
-037import org.apache.htrace.TraceScope;
-038
-039
-040import java.io.IOException;
-041import java.net.InetSocketAddress;
-042import java.nio.ByteBuffer;
-043import java.util.ArrayList;
-044import java.util.Iterator;
-045import java.util.List;
-046import 
java.util.NoSuchElementException;
-047import 
java.util.concurrent.ExecutionException;
-048
-049/**
-050 * Class to store blocks into 
memcached.
-051 * This should only be used on a cluster 
of Memcached daemons that are tuned well and have a
-052 * good network connection to the HBase 
regionservers. Any other use will likely slow down HBase
-053 * greatly.
-054 */
-055@InterfaceAudience.Private
-056public class MemcachedBlockCache 
implements BlockCache {
-057  private static final Log LOG = 
LogFactory.getLog(MemcachedBlockCache.class.getName());
-058
-059  // Some memcache versions won't take 
more than 1024 * 1024. So set the limit below
-060  // that just in case this client is 
used with those versions.
-061  public static final int MAX_SIZE = 1020 
* 1024;
-062
-063  // Config key for what memcached 
servers to use.
-064  // They should be specified in a comma 
sperated list with ports.
-065  // like:
-066  //
-067  // host1:11211,host3:8080,host4:11211
-068  public static final String 
MEMCACHED_CONFIG_KEY = "hbase.cache.memcached.servers";
-069  public static final String 
MEMCACHED_TIMEOUT_KEY = "hbase.cache.memcached.timeout";
-070  public static final String 
MEMCACHED_OPTIMEOUT_KEY = "hbase.cache.memcached.optimeout";
-071  public static final String 
MEMCACHED_OPTIMIZE_KEY = "hbase.cache.memcached.spy.optimze";
-072  public static final long 
MEMCACHED_DEFAULT_TIMEOUT = 500;
-073  public static final boolean 
MEMCACHED_OPTIMIZE_DEFAULT = false;
-074
-075  private final MemcachedClient client;
-076  private final HFileBlockTranscoder tc = 
new HFileBlockTranscoder();
-077  private final CacheStats cacheStats = 
new CacheStats("MemcachedBlockCache");
-078
-079  public 
MemcachedBlockCache(Configuration c) throws IOException {
-080LOG.info("Creating 
MemcachedBlockCache");
-081
-082long opTimeout = 
c.getLong(MEMCACHED_OPTIMEOUT_KEY, MEMCACHED_DEFAULT_TIMEOUT);
-083long queueTimeout = 
c.getLong(MEMCACHED_TIMEOUT_KEY, opTimeout + MEMCACHED_DEFAULT_TIMEOUT);
-084boolean optimize = 
c.getBoolean(MEMCACHED_OPTIMIZE_KEY, MEMCACHED_OPTIMIZE_DEFAULT);
-085
-086ConnectionFactoryBuilder builder = 
new ConnectionFactoryBuilder()
-087.setOpTimeout(opTimeout)
-088
.setOpQueueMaxBlockTime(queueTimeout) // Cap 

[18/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.SingleServerRequestRunnable.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.SingleServerRequestRunnable.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.SingleServerRequestRunnable.html
index 3fa16c1..d3a1e91 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.SingleServerRequestRunnable.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.SingleServerRequestRunnable.html
@@ -56,1228 +56,1235 @@
 048import 
org.apache.hadoop.hbase.RetryImmediatelyException;
 049import 
org.apache.hadoop.hbase.ServerName;
 050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.yetus.audience.InterfaceAudience;
-052import 
org.apache.hadoop.hbase.client.backoff.ServerStatistics;
-053import 
org.apache.hadoop.hbase.client.coprocessor.Batch;
-054import 
org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil;
-055import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-056import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-057import 
org.apache.hadoop.hbase.util.Bytes;
-058import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-059import org.apache.htrace.Trace;
-060
-061/**
-062 * The context, and return value, for a 
single submit/submitAll call.
-063 * Note on how this class (one AP submit) 
works. Initially, all requests are split into groups
-064 * by server; request is sent to each 
server in parallel; the RPC calls are not async so a
-065 * thread per server is used. Every time 
some actions fail, regions/locations might have
-066 * changed, so we re-group them by server 
and region again and send these groups in parallel
-067 * too. The result, in case of retries, 
is a "tree" of threads, with parent exiting after
-068 * scheduling children. This is why lots 
of code doesn't require any synchronization.
-069 */
-070@InterfaceAudience.Private
-071class 
AsyncRequestFutureImplCResult implements AsyncRequestFuture {
-072
-073  private static final Log LOG = 
LogFactory.getLog(AsyncRequestFutureImpl.class);
-074
-075  private RetryingTimeTracker tracker;
-076
-077  /**
-078   * Runnable (that can be submitted to 
thread pool) that waits for when it's time
-079   * to issue replica calls, finds region 
replicas, groups the requests by replica and
-080   * issues the calls (on separate 
threads, via sendMultiAction).
-081   * This is done on a separate thread 
because we don't want to wait on user thread for
-082   * our asynchronous call, and usually 
we have to wait before making replica calls.
-083   */
-084  private final class 
ReplicaCallIssuingRunnable implements Runnable {
-085private final long startTime;
-086private final ListAction 
initialActions;
-087
-088public 
ReplicaCallIssuingRunnable(ListAction initialActions, long startTime) 
{
-089  this.initialActions = 
initialActions;
-090  this.startTime = startTime;
-091}
-092
-093@Override
-094public void run() {
-095  boolean done = false;
-096  if 
(asyncProcess.primaryCallTimeoutMicroseconds  0) {
-097try {
-098  done = waitUntilDone(startTime 
* 1000L + asyncProcess.primaryCallTimeoutMicroseconds);
-099} catch (InterruptedException ex) 
{
-100  LOG.error("Replica thread was 
interrupted - no replica calls: " + ex.getMessage());
-101  return;
-102}
-103  }
-104  if (done) return; // Done within 
primary timeout
-105  MapServerName, MultiAction 
actionsByServer = new HashMap();
-106  ListAction 
unknownLocActions = new ArrayList();
-107  if (replicaGetIndices == null) {
-108for (int i = 0; i  
results.length; ++i) {
-109  addReplicaActions(i, 
actionsByServer, unknownLocActions);
-110}
-111  } else {
-112for (int replicaGetIndice : 
replicaGetIndices) {
-113  
addReplicaActions(replicaGetIndice, actionsByServer, unknownLocActions);
-114}
-115  }
-116  if (!actionsByServer.isEmpty()) {
-117sendMultiAction(actionsByServer, 
1, null, unknownLocActions.isEmpty());
-118  }
-119  if (!unknownLocActions.isEmpty()) 
{
-120actionsByServer = new 
HashMap();
-121for (Action action : 
unknownLocActions) {
-122  addReplicaActionsAgain(action, 
actionsByServer);
-123}
-124// Some actions may have 
completely failed, they are handled inside addAgain.
-125if (!actionsByServer.isEmpty()) 
{
-126  
sendMultiAction(actionsByServer, 1, null, true);
-127}
-128  }
-129}
-130
-131/**
-132 * Add replica actions to action map 
by server.
-133 * @param index Index of the original 
action.
-134 * @param actionsByServer The 

[30/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.html
index 05ce7b3..0ed5843 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.html
@@ -124,7 +124,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class HRegionServerCommandLine
+public class HRegionServerCommandLine
 extends ServerCommandLine
 Class responsible for parsing the command line and starting 
the
  RegionServer.
@@ -253,7 +253,7 @@ extends 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -262,7 +262,7 @@ extends 
 
 regionServerClass
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends HRegionServer regionServerClass
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends HRegionServer regionServerClass
 
 
 
@@ -271,7 +271,7 @@ extends 
 
 USAGE
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String USAGE
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String USAGE
 
 See Also:
 Constant
 Field Values
@@ -292,7 +292,7 @@ extends 
 
 HRegionServerCommandLine
-publicHRegionServerCommandLine(http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends HRegionServerclazz)
+publicHRegionServerCommandLine(http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true;
 title="class or interface in java.lang">Class? extends HRegionServerclazz)
 
 
 
@@ -309,7 +309,7 @@ extends 
 
 getUsage
-protectedhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetUsage()
+protectedhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetUsage()
 Description copied from 
class:ServerCommandLine
 Implementing subclasses should return a usage string to 
print out.
 
@@ -324,7 +324,7 @@ extends 
 
 start
-privateintstart()
+privateintstart()
throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -338,7 +338,7 @@ extends 
 
 run
-publicintrun(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]args)
+publicintrun(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]args)
 throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.FlushHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.FlushHandler.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.FlushHandler.html
index 504174d..9bc9e33 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.FlushHandler.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.FlushHandler.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private class MemStoreFlusher.FlushHandler
+private class MemStoreFlusher.FlushHandler
 extends HasThread
 
 
@@ -200,7 +200,7 @@ extends 
 
 FlushHandler
-privateFlushHandler(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+privateFlushHandler(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 
 
 
@@ -217,7 +217,7 @@ extends 
 
 run
-publicvoidrun()
+publicvoidrun()
 
 Specified by:
 http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true#run--;
 title="class or interface in java.lang">runin 
interfacehttp://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
 title="class or interface in java.lang">Runnable


[20/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html
index 3fa16c1..d3a1e91 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html
@@ -56,1228 +56,1235 @@
 048import 
org.apache.hadoop.hbase.RetryImmediatelyException;
 049import 
org.apache.hadoop.hbase.ServerName;
 050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.yetus.audience.InterfaceAudience;
-052import 
org.apache.hadoop.hbase.client.backoff.ServerStatistics;
-053import 
org.apache.hadoop.hbase.client.coprocessor.Batch;
-054import 
org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil;
-055import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-056import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-057import 
org.apache.hadoop.hbase.util.Bytes;
-058import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-059import org.apache.htrace.Trace;
-060
-061/**
-062 * The context, and return value, for a 
single submit/submitAll call.
-063 * Note on how this class (one AP submit) 
works. Initially, all requests are split into groups
-064 * by server; request is sent to each 
server in parallel; the RPC calls are not async so a
-065 * thread per server is used. Every time 
some actions fail, regions/locations might have
-066 * changed, so we re-group them by server 
and region again and send these groups in parallel
-067 * too. The result, in case of retries, 
is a "tree" of threads, with parent exiting after
-068 * scheduling children. This is why lots 
of code doesn't require any synchronization.
-069 */
-070@InterfaceAudience.Private
-071class 
AsyncRequestFutureImplCResult implements AsyncRequestFuture {
-072
-073  private static final Log LOG = 
LogFactory.getLog(AsyncRequestFutureImpl.class);
-074
-075  private RetryingTimeTracker tracker;
-076
-077  /**
-078   * Runnable (that can be submitted to 
thread pool) that waits for when it's time
-079   * to issue replica calls, finds region 
replicas, groups the requests by replica and
-080   * issues the calls (on separate 
threads, via sendMultiAction).
-081   * This is done on a separate thread 
because we don't want to wait on user thread for
-082   * our asynchronous call, and usually 
we have to wait before making replica calls.
-083   */
-084  private final class 
ReplicaCallIssuingRunnable implements Runnable {
-085private final long startTime;
-086private final ListAction 
initialActions;
-087
-088public 
ReplicaCallIssuingRunnable(ListAction initialActions, long startTime) 
{
-089  this.initialActions = 
initialActions;
-090  this.startTime = startTime;
-091}
-092
-093@Override
-094public void run() {
-095  boolean done = false;
-096  if 
(asyncProcess.primaryCallTimeoutMicroseconds  0) {
-097try {
-098  done = waitUntilDone(startTime 
* 1000L + asyncProcess.primaryCallTimeoutMicroseconds);
-099} catch (InterruptedException ex) 
{
-100  LOG.error("Replica thread was 
interrupted - no replica calls: " + ex.getMessage());
-101  return;
-102}
-103  }
-104  if (done) return; // Done within 
primary timeout
-105  MapServerName, MultiAction 
actionsByServer = new HashMap();
-106  ListAction 
unknownLocActions = new ArrayList();
-107  if (replicaGetIndices == null) {
-108for (int i = 0; i  
results.length; ++i) {
-109  addReplicaActions(i, 
actionsByServer, unknownLocActions);
-110}
-111  } else {
-112for (int replicaGetIndice : 
replicaGetIndices) {
-113  
addReplicaActions(replicaGetIndice, actionsByServer, unknownLocActions);
-114}
-115  }
-116  if (!actionsByServer.isEmpty()) {
-117sendMultiAction(actionsByServer, 
1, null, unknownLocActions.isEmpty());
-118  }
-119  if (!unknownLocActions.isEmpty()) 
{
-120actionsByServer = new 
HashMap();
-121for (Action action : 
unknownLocActions) {
-122  addReplicaActionsAgain(action, 
actionsByServer);
-123}
-124// Some actions may have 
completely failed, they are handled inside addAgain.
-125if (!actionsByServer.isEmpty()) 
{
-126  
sendMultiAction(actionsByServer, 1, null, true);
-127}
-128  }
-129}
-130
-131/**
-132 * Add replica actions to action map 
by server.
-133 * @param index Index of the original 
action.
-134 * @param actionsByServer The map by 
server to add it to.
-135 */
-136   

[19/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html
index 3fa16c1..d3a1e91 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html
@@ -56,1228 +56,1235 @@
 048import 
org.apache.hadoop.hbase.RetryImmediatelyException;
 049import 
org.apache.hadoop.hbase.ServerName;
 050import 
org.apache.hadoop.hbase.TableName;
-051import 
org.apache.yetus.audience.InterfaceAudience;
-052import 
org.apache.hadoop.hbase.client.backoff.ServerStatistics;
-053import 
org.apache.hadoop.hbase.client.coprocessor.Batch;
-054import 
org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil;
-055import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-056import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-057import 
org.apache.hadoop.hbase.util.Bytes;
-058import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-059import org.apache.htrace.Trace;
-060
-061/**
-062 * The context, and return value, for a 
single submit/submitAll call.
-063 * Note on how this class (one AP submit) 
works. Initially, all requests are split into groups
-064 * by server; request is sent to each 
server in parallel; the RPC calls are not async so a
-065 * thread per server is used. Every time 
some actions fail, regions/locations might have
-066 * changed, so we re-group them by server 
and region again and send these groups in parallel
-067 * too. The result, in case of retries, 
is a "tree" of threads, with parent exiting after
-068 * scheduling children. This is why lots 
of code doesn't require any synchronization.
-069 */
-070@InterfaceAudience.Private
-071class 
AsyncRequestFutureImplCResult implements AsyncRequestFuture {
-072
-073  private static final Log LOG = 
LogFactory.getLog(AsyncRequestFutureImpl.class);
-074
-075  private RetryingTimeTracker tracker;
-076
-077  /**
-078   * Runnable (that can be submitted to 
thread pool) that waits for when it's time
-079   * to issue replica calls, finds region 
replicas, groups the requests by replica and
-080   * issues the calls (on separate 
threads, via sendMultiAction).
-081   * This is done on a separate thread 
because we don't want to wait on user thread for
-082   * our asynchronous call, and usually 
we have to wait before making replica calls.
-083   */
-084  private final class 
ReplicaCallIssuingRunnable implements Runnable {
-085private final long startTime;
-086private final ListAction 
initialActions;
-087
-088public 
ReplicaCallIssuingRunnable(ListAction initialActions, long startTime) 
{
-089  this.initialActions = 
initialActions;
-090  this.startTime = startTime;
-091}
-092
-093@Override
-094public void run() {
-095  boolean done = false;
-096  if 
(asyncProcess.primaryCallTimeoutMicroseconds  0) {
-097try {
-098  done = waitUntilDone(startTime 
* 1000L + asyncProcess.primaryCallTimeoutMicroseconds);
-099} catch (InterruptedException ex) 
{
-100  LOG.error("Replica thread was 
interrupted - no replica calls: " + ex.getMessage());
-101  return;
-102}
-103  }
-104  if (done) return; // Done within 
primary timeout
-105  MapServerName, MultiAction 
actionsByServer = new HashMap();
-106  ListAction 
unknownLocActions = new ArrayList();
-107  if (replicaGetIndices == null) {
-108for (int i = 0; i  
results.length; ++i) {
-109  addReplicaActions(i, 
actionsByServer, unknownLocActions);
-110}
-111  } else {
-112for (int replicaGetIndice : 
replicaGetIndices) {
-113  
addReplicaActions(replicaGetIndice, actionsByServer, unknownLocActions);
-114}
-115  }
-116  if (!actionsByServer.isEmpty()) {
-117sendMultiAction(actionsByServer, 
1, null, unknownLocActions.isEmpty());
-118  }
-119  if (!unknownLocActions.isEmpty()) 
{
-120actionsByServer = new 
HashMap();
-121for (Action action : 
unknownLocActions) {
-122  addReplicaActionsAgain(action, 
actionsByServer);
-123}
-124// Some actions may have 
completely failed, they are handled inside addAgain.
-125if (!actionsByServer.isEmpty()) 
{
-126  
sendMultiAction(actionsByServer, 1, null, true);
-127}
-128  }
-129}
-130
-131/**
-132 * Add replica actions to action map 
by server.
-133 * @param index Index of the original 
action.
-134 * @param actionsByServer The map by 
server to add it to.
-135 */
-136private void addReplicaActions(int 
index, MapServerName, 

[03/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/src-html/org/apache/hadoop/hbase/ipc/ServerRpcConnection.ByteBuffByteInput.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/ServerRpcConnection.ByteBuffByteInput.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/ServerRpcConnection.ByteBuffByteInput.html
index ffb4e9a..b7013ce 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/ServerRpcConnection.ByteBuffByteInput.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/ServerRpcConnection.ByteBuffByteInput.html
@@ -85,762 +85,758 @@
 077import 
org.apache.hadoop.security.authorize.ProxyUsers;
 078import 
org.apache.hadoop.security.token.SecretManager.InvalidToken;
 079import 
org.apache.hadoop.security.token.TokenIdentifier;
-080import org.apache.htrace.TraceInfo;
-081
-082/** Reads calls from a connection and 
queues them for handling. */
-083@edu.umd.cs.findbugs.annotations.SuppressWarnings(
-084value="VO_VOLATILE_INCREMENT",
-085justification="False positive 
according to http://sourceforge.net/p/findbugs/bugs/1032/;)
-086@InterfaceAudience.Private
-087abstract class ServerRpcConnection 
implements Closeable {
-088  /**  */
-089  protected final RpcServer rpcServer;
-090  // If the connection header has been 
read or not.
-091  protected boolean connectionHeaderRead 
= false;
-092
-093  protected CallCleanup callCleanup;
-094
-095  // Cache the remote host  port 
info so that even if the socket is
-096  // disconnected, we can say where it 
used to connect to.
-097  protected String hostAddress;
-098  protected int remotePort;
-099  protected InetAddress addr;
-100  protected ConnectionHeader 
connectionHeader;
-101
-102  /**
-103   * Codec the client asked use.
-104   */
-105  protected Codec codec;
-106  /**
-107   * Compression codec the client asked 
us use.
-108   */
-109  protected CompressionCodec 
compressionCodec;
-110  protected BlockingService service;
-111
-112  protected AuthMethod authMethod;
-113  protected boolean 
saslContextEstablished;
-114  protected boolean 
skipInitialSaslHandshake;
-115  private ByteBuffer unwrappedData;
-116  // When is this set? FindBugs wants to 
know! Says NP
-117  private ByteBuffer 
unwrappedDataLengthBuffer = ByteBuffer.allocate(4);
-118  protected boolean useSasl;
-119  protected HBaseSaslRpcServer 
saslServer;
-120  protected CryptoAES cryptoAES;
-121  protected boolean useWrap = false;
-122  protected boolean useCryptoAesWrap = 
false;
-123
-124  // was authentication allowed with a 
fallback to simple auth
-125  protected boolean 
authenticatedWithFallback;
-126
-127  protected boolean 
retryImmediatelySupported = false;
-128
-129  protected User user = null;
-130  protected UserGroupInformation ugi = 
null;
-131
-132  public ServerRpcConnection(RpcServer 
rpcServer) {
-133this.rpcServer = rpcServer;
-134this.callCleanup = null;
-135  }
-136
-137  @Override
-138  public String toString() {
-139return getHostAddress() + ":" + 
remotePort;
-140  }
-141
-142  public String getHostAddress() {
-143return hostAddress;
-144  }
-145
-146  public InetAddress getHostInetAddress() 
{
-147return addr;
-148  }
-149
-150  public int getRemotePort() {
-151return remotePort;
-152  }
-153
-154  public VersionInfo getVersionInfo() {
-155if 
(connectionHeader.hasVersionInfo()) {
-156  return 
connectionHeader.getVersionInfo();
-157}
-158return null;
-159  }
-160
-161  private String 
getFatalConnectionString(final int version, final byte authByte) {
-162return "serverVersion=" + 
RpcServer.CURRENT_VERSION +
-163", clientVersion=" + version + ", 
authMethod=" + authByte +
-164", authSupported=" + (authMethod != 
null) + " from " + toString();
-165  }
-166
-167  private UserGroupInformation 
getAuthorizedUgi(String authorizedId)
-168  throws IOException {
-169UserGroupInformation authorizedUgi;
-170if (authMethod == AuthMethod.DIGEST) 
{
-171  TokenIdentifier tokenId = 
HBaseSaslRpcServer.getIdentifier(authorizedId,
-172  
this.rpcServer.secretManager);
-173  authorizedUgi = 
tokenId.getUser();
-174  if (authorizedUgi == null) {
-175throw new 
AccessDeniedException(
-176"Can't retrieve username from 
tokenIdentifier.");
-177  }
-178  
authorizedUgi.addTokenIdentifier(tokenId);
-179} else {
-180  authorizedUgi = 
UserGroupInformation.createRemoteUser(authorizedId);
-181}
-182
authorizedUgi.setAuthenticationMethod(authMethod.authenticationMethod.getAuthMethod());
-183return authorizedUgi;
-184  }
-185
-186  /**
-187   * Set up cell block codecs
-188   * @throws FatalConnectionException
-189   */
-190  private void setupCellBlockCodecs(final 
ConnectionHeader header)
-191  throws FatalConnectionException {
-192// TODO: Plug in other supported 
decoders.
-193if (!header.hasCellBlockCodecClass()) 

[23/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
index d776b93..bec1bbc 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
@@ -191,9 +191,9 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
+org.apache.hadoop.hbase.security.AuthMethod
 org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection
 org.apache.hadoop.hbase.security.SaslStatus
-org.apache.hadoop.hbase.security.AuthMethod
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
index 59ab0ce..cd08056 100644
--- a/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
@@ -198,8 +198,8 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.thrift.MetricsThriftServerSourceFactoryImpl.FactoryStorage
 org.apache.hadoop.hbase.thrift.ThriftServerRunner.ImplType
+org.apache.hadoop.hbase.thrift.MetricsThriftServerSourceFactoryImpl.FactoryStorage
 org.apache.hadoop.hbase.thrift.ThriftMetrics.ThriftServerType
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.html 
b/devapidocs/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.html
index d9096d9..4598905 100644
--- a/devapidocs/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.html
+++ b/devapidocs/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.html
@@ -100,7 +100,7 @@ var activeTableTab = "activeTableTab";
 http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
 
 
-org.apache.htrace.HTraceConfiguration
+org.apache.htrace.core.HTraceConfiguration
 
 
 org.apache.hadoop.hbase.trace.HBaseHTraceConfiguration
@@ -116,7 +116,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Private
 public class HBaseHTraceConfiguration
-extends org.apache.htrace.HTraceConfiguration
+extends org.apache.htrace.core.HTraceConfiguration
 
 
 
@@ -149,10 +149,10 @@ extends org.apache.htrace.HTraceConfiguration
 
 
 
-
+
 
 
-Fields inherited from classorg.apache.htrace.HTraceConfiguration
+Fields inherited from 
classorg.apache.htrace.core.HTraceConfiguration
 EMPTY
 
 
@@ -206,11 +206,11 @@ extends org.apache.htrace.HTraceConfiguration
 
 
 
-
+
 
 
-Methods inherited from 
classorg.apache.htrace.HTraceConfiguration
-fromMap, getInt
+Methods inherited from 
classorg.apache.htrace.core.HTraceConfiguration
+fromKeyValuePairs, fromMap, getInt
 
 
 
@@ -248,7 +248,7 @@ extends org.apache.htrace.HTraceConfiguration
 
 
 KEY_PREFIX
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String KEY_PREFIX
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String KEY_PREFIX
 
 See Also:
 Constant
 Field Values
@@ -261,7 +261,7 @@ extends org.apache.htrace.HTraceConfiguration
 
 
 conf
-privateorg.apache.hadoop.conf.Configuration conf
+privateorg.apache.hadoop.conf.Configuration conf
 
 
 
@@ -278,7 +278,7 @@ extends org.apache.htrace.HTraceConfiguration
 
 
 HBaseHTraceConfiguration
-publicHBaseHTraceConfiguration(org.apache.hadoop.conf.Configurationconf)
+publicHBaseHTraceConfiguration(org.apache.hadoop.conf.Configurationconf)
 
 
 
@@ -295,7 +295,7 @@ extends 

[31/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
index 72c20d4..a90b47d 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
@@ -123,7 +123,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.LimitedPrivate(value="Tools")
-public class HRegionServer
+public class HRegionServer
 extends HasThread
 implements RegionServerServices, LastSequenceId, 
ConfigurationObserver
 HRegionServer makes a set of HRegions available to clients. 
It checks in with
@@ -1497,7 +1497,7 @@ implements 
 
 INIT_PAUSE_TIME_MS
-private static finalint INIT_PAUSE_TIME_MS
+private static finalint INIT_PAUSE_TIME_MS
 
 See Also:
 Constant
 Field Values
@@ -1510,7 +1510,7 @@ implements 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -1519,7 +1519,7 @@ implements 
 
 TEST_SKIP_REPORTING_TRANSITION
-public staticboolean TEST_SKIP_REPORTING_TRANSITION
+public staticboolean TEST_SKIP_REPORTING_TRANSITION
 For testing only!  Set to true to skip notifying region 
assignment to master .
 
 
@@ -1529,7 +1529,7 @@ implements 
 
 regionsInTransitionInRS
-protected finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ConcurrentMapbyte[],http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean regionsInTransitionInRS
+protected finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ConcurrentMapbyte[],http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean regionsInTransitionInRS
 
 
 
@@ -1538,7 +1538,7 @@ implements 
 
 cacheFlusher
-protectedMemStoreFlusher cacheFlusher
+protectedMemStoreFlusher cacheFlusher
 
 
 
@@ -1547,7 +1547,7 @@ implements 
 
 hMemManager
-protectedHeapMemoryManager hMemManager
+protectedHeapMemoryManager hMemManager
 
 
 
@@ -1556,7 +1556,7 @@ implements 
 
 initLatch
-protectedhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CountDownLatch.html?is-external=true;
 title="class or interface in java.util.concurrent">CountDownLatch initLatch
+protectedhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CountDownLatch.html?is-external=true;
 title="class or interface in java.util.concurrent">CountDownLatch initLatch
 
 
 
@@ -1565,7 +1565,7 @@ implements 
 
 clusterConnection
-protectedClusterConnection clusterConnection
+protectedClusterConnection clusterConnection
 Cluster connection to be shared by services.
  Initialized at server startup and closed when server shuts down.
  Clients must never close it explicitly.
@@ -1577,7 +1577,7 @@ implements 
 
 metaTableLocator
-protectedMetaTableLocator metaTableLocator
+protectedMetaTableLocator metaTableLocator
 
 
 
@@ -1586,7 +1586,7 @@ implements 
 
 tableDescriptors
-protectedTableDescriptors tableDescriptors
+protectedTableDescriptors tableDescriptors
 Go here to get table descriptors.
 
 
@@ -1596,7 +1596,7 @@ implements 
 
 replicationSourceHandler
-protectedReplicationSourceService replicationSourceHandler
+protectedReplicationSourceService replicationSourceHandler
 
 
 
@@ -1605,7 +1605,7 @@ implements 
 
 replicationSinkHandler
-protectedReplicationSinkService replicationSinkHandler
+protectedReplicationSinkService replicationSinkHandler
 
 
 
@@ -1614,7 +1614,7 @@ implements 
 
 compactSplitThread
-publicCompactSplit compactSplitThread
+publicCompactSplit compactSplitThread
 
 
 
@@ -1623,7 +1623,7 @@ implements 
 
 onlineRegions
-protected finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,HRegion onlineRegions
+protected finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,HRegion onlineRegions
 Map of regions currently being served by this region 
server. Key is the
  encoded region name.  All access should be synchronized.
 
@@ -1634,7 +1634,7 @@ implements 
 
 regionFavoredNodesMap
-protected finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface 

[43/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
index b173cff..3f3fcaa 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
@@ -273,12 +273,12 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.io.hfile.CacheConfig.ExternalBlockCaches
 org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType
-org.apache.hadoop.hbase.io.hfile.BlockType
-org.apache.hadoop.hbase.io.hfile.HFileBlock.Writer.State
 org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory
+org.apache.hadoop.hbase.io.hfile.HFileBlock.Writer.State
 org.apache.hadoop.hbase.io.hfile.BlockPriority
+org.apache.hadoop.hbase.io.hfile.BlockType
+org.apache.hadoop.hbase.io.hfile.CacheConfig.ExternalBlockCaches
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.CallSender.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.CallSender.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.CallSender.html
index 1485b05..623d635 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.CallSender.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.CallSender.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private class BlockingRpcConnection.CallSender
+private class BlockingRpcConnection.CallSender
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true;
 title="class or interface in java.lang">Thread
 If the client wants to interrupt its calls easily (i.e. 
call Thread#interrupt), it gets into a
  java issue: an interruption during a write closes the socket/channel. A way 
to avoid this is to
@@ -275,7 +275,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?
 
 
 callsToWrite
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Queue.html?is-external=true;
 title="class or interface in java.util">QueueCall callsToWrite
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Queue.html?is-external=true;
 title="class or interface in java.util">QueueCall callsToWrite
 
 
 
@@ -284,7 +284,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?
 
 
 maxQueueSize
-private finalint maxQueueSize
+private finalint maxQueueSize
 
 
 
@@ -301,7 +301,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?
 
 
 CallSender
-publicCallSender(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
+publicCallSender(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
   org.apache.hadoop.conf.Configurationconf)
 
 
@@ -319,7 +319,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?
 
 
 sendCall
-publicvoidsendCall(Callcall)
+publicvoidsendCall(Callcall)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -333,7 +333,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?
 
 
 remove
-publicvoidremove(Callcall)
+publicvoidremove(Callcall)
 
 
 
@@ -342,7 +342,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?
 
 
 run
-publicvoidrun()
+publicvoidrun()
 Reads the call from the queue, write them on the 
socket.
 
 Specified by:
@@ -358,7 +358,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?
 
 
 cleanup
-publicvoidcleanup(http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOExceptione)
+publicvoidcleanup(http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOExceptione)
 Cleans the call not yet sent when we finish.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.html

[41/51] [partial] hbase-site git commit: Published site at .

2017-11-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/ipc/ServerRpcConnection.ByteBuffByteInput.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/ServerRpcConnection.ByteBuffByteInput.html
 
b/devapidocs/org/apache/hadoop/hbase/ipc/ServerRpcConnection.ByteBuffByteInput.html
index 7f0d42c..cf17eea 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/ipc/ServerRpcConnection.ByteBuffByteInput.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/ipc/ServerRpcConnection.ByteBuffByteInput.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class ServerRpcConnection.ByteBuffByteInput
+private static class ServerRpcConnection.ByteBuffByteInput
 extends org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteInput
 
 
@@ -243,7 +243,7 @@ extends 
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteInput
 
 
 buf
-privateByteBuff buf
+privateByteBuff buf
 
 
 
@@ -252,7 +252,7 @@ extends 
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteInput
 
 
 offset
-privateint offset
+privateint offset
 
 
 
@@ -261,7 +261,7 @@ extends 
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteInput
 
 
 length
-privateint length
+privateint length
 
 
 
@@ -278,7 +278,7 @@ extends 
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteInput
 
 
 ByteBuffByteInput
-ByteBuffByteInput(ByteBuffbuf,
+ByteBuffByteInput(ByteBuffbuf,
   intoffset,
   intlength)
 
@@ -297,7 +297,7 @@ extends 
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteInput
 
 
 read
-publicbyteread(intoffset)
+publicbyteread(intoffset)
 
 Specified by:
 readin 
classorg.apache.hadoop.hbase.shaded.com.google.protobuf.ByteInput
@@ -310,7 +310,7 @@ extends 
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteInput
 
 
 getAbsoluteOffset
-privateintgetAbsoluteOffset(intoffset)
+privateintgetAbsoluteOffset(intoffset)
 
 
 
@@ -319,7 +319,7 @@ extends 
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteInput
 
 
 read
-publicintread(intoffset,
+publicintread(intoffset,
 byte[]out,
 intoutOffset,
 intlen)
@@ -335,7 +335,7 @@ extends 
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteInput
 
 
 read
-publicintread(intoffset,
+publicintread(intoffset,
 http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBufferout)
 
 Specified by:
@@ -349,7 +349,7 @@ extends 
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteInput
 
 
 size
-publicintsize()
+publicintsize()
 
 Specified by:
 sizein 
classorg.apache.hadoop.hbase.shaded.com.google.protobuf.ByteInput

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/abb69192/devapidocs/org/apache/hadoop/hbase/ipc/ServerRpcConnection.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/ServerRpcConnection.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/ServerRpcConnection.html
index eb99f57..e1d04fd 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/ServerRpcConnection.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/ServerRpcConnection.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-abstract class ServerRpcConnection
+abstract class ServerRpcConnection
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable
 Reads calls from a connection and queues them for 
handling.
@@ -297,14 +297,13 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 abstract ServerCall?
-createCall(intid,
+createCall(intid,
   
org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingServiceservice,
   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptormd,
   
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeaderheader,
   
org.apache.hadoop.hbase.shaded.com.google.protobuf.Messageparam,
   CellScannercellScanner,
   longsize,
-  org.apache.htrace.TraceInfotinfo,
   http://docs.oracle.com/javase/8/docs/api/java/net/InetAddress.html?is-external=true;
 title="class or interface in java.net">InetAddressremoteAddress,
   inttimeout,
   RpcServer.CallCleanupreqCleanup)
@@ -459,7 +458,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 rpcServer
-protected finalRpcServer rpcServer
+protected finalRpcServer rpcServer
 
 
 
@@ -468,7 +467,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 connectionHeaderRead
-protectedboolean connectionHeaderRead
+protectedboolean