[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-08-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/346adc37/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.html
index 291256a..752a26f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.html
@@ -30,151 +30,151 @@
 022import java.io.IOException;
 023import java.io.InputStream;
 024import java.io.OutputStream;
-025import 
java.util.concurrent.atomic.AtomicBoolean;
-026
-027import org.apache.commons.logging.Log;
-028import 
org.apache.commons.logging.LogFactory;
-029import 
org.apache.hadoop.hbase.HConstants;
-030import 
org.apache.hadoop.hbase.HRegionInfo;
-031import 
org.apache.hadoop.hbase.NotServingRegionException;
-032import 
org.apache.hadoop.hbase.ServerName;
-033import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-034import 
org.apache.hadoop.hbase.exceptions.UnexpectedStateException;
-035import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-036import 
org.apache.hadoop.hbase.master.assignment.RegionStates.RegionStateNode;
-037import 
org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
-038import 
org.apache.hadoop.hbase.master.procedure.ServerCrashException;
-039import 
org.apache.hadoop.hbase.master.procedure.RSProcedureDispatcher.RegionCloseOperation;
-040import 
org.apache.hadoop.hbase.master.RegionState.State;
-041import 
org.apache.hadoop.hbase.procedure2.ProcedureMetrics;
-042import 
org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.RemoteOperation;
-043import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-044import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RegionTransitionState;
-045import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.UnassignRegionStateData;
-046import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
-047import 
org.apache.hadoop.hbase.regionserver.RegionServerAbortedException;
-048import 
org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
-049
+025import java.net.ConnectException;
+026import 
java.util.concurrent.atomic.AtomicBoolean;
+027
+028import org.apache.commons.logging.Log;
+029import 
org.apache.commons.logging.LogFactory;
+030import 
org.apache.hadoop.hbase.HConstants;
+031import 
org.apache.hadoop.hbase.HRegionInfo;
+032import 
org.apache.hadoop.hbase.NotServingRegionException;
+033import 
org.apache.hadoop.hbase.ServerName;
+034import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+035import 
org.apache.hadoop.hbase.exceptions.UnexpectedStateException;
+036import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
+037import 
org.apache.hadoop.hbase.master.assignment.RegionStates.RegionStateNode;
+038import 
org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
+039import 
org.apache.hadoop.hbase.master.procedure.ServerCrashException;
+040import 
org.apache.hadoop.hbase.master.procedure.RSProcedureDispatcher.RegionCloseOperation;
+041import 
org.apache.hadoop.hbase.master.RegionState.State;
+042import 
org.apache.hadoop.hbase.procedure2.ProcedureMetrics;
+043import 
org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.RemoteOperation;
+044import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+045import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RegionTransitionState;
+046import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.UnassignRegionStateData;
+047import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
+048import 
org.apache.hadoop.hbase.regionserver.RegionServerAbortedException;
+049import 
org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
 050
-051/**
-052 * Procedure that describe the 
unassignment of a single region.
-053 * There can only be one 
RegionTransitionProcedure per region running at the time,
-054 * since each procedure takes a lock on 
the region.
-055 *
-056 * 

The Unassign starts by placing a "close region" request in the Remote Dispatcher -057 * queue, and the procedure will then go into a "waiting state". -058 * The Remote Dispatcher will batch the various requests for that server and -059 * they will be sent to the RS for execution. -060 * The RS will complete the open operation by calling master.reportRegionStateTransition(). -061 * The AM will intercept the transition report, and notify the procedure. -062 * The procedure will finish the unassign by publishing its new state on meta -063 * or it will retry the unassign. -064 */ -065@Inte


[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-08-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2aec596e/devapidocs/org/apache/hadoop/hbase/client/class-use/RawAsyncTable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RawAsyncTable.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RawAsyncTable.html
index a902aae..88b3ae1 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/RawAsyncTable.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/RawAsyncTable.html
@@ -250,14 +250,14 @@
 
 
 AsyncTableBuilder
-AsyncConnectionImpl.getRawTableBuilder(TableName tableName) 
-
-
-AsyncTableBuilder
 AsyncConnection.getRawTableBuilder(TableName tableName)
 Returns an AsyncTableBuilder for creating 
RawAsyncTable.
 
 
+
+AsyncTableBuilder
+AsyncConnectionImpl.getRawTableBuilder(TableName tableName) 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2aec596e/devapidocs/org/apache/hadoop/hbase/client/class-use/RawScanResultConsumer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RawScanResultConsumer.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RawScanResultConsumer.html
index 86f4e21..3d57124 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RawScanResultConsumer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RawScanResultConsumer.html
@@ -143,15 +143,15 @@
 
 
 private RawScanResultConsumer
-AsyncScanSingleRegionRpcRetryingCaller.consumer 
+AsyncClientScanner.consumer 
 
 
 private RawScanResultConsumer
-AsyncClientScanner.consumer 
+AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.consumer 
 
 
 private RawScanResultConsumer
-AsyncRpcRetryingCallerFactory.ScanSingleRegionCallerBuilder.consumer 
+AsyncScanSingleRegionRpcRetryingCaller.consumer 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2aec596e/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocateType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocateType.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocateType.html
index 35803b1..0c5df6f 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocateType.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocateType.html
@@ -106,7 +106,7 @@
 
 
 private RegionLocateType
-AsyncSingleRequestRpcRetryingCaller.locateType 
+AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.locateType 
 
 
 RegionLocateType
@@ -114,7 +114,7 @@
 
 
 private RegionLocateType
-AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.locateType 
+AsyncSingleRequestRpcRetryingCaller.locateType 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2aec596e/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html
index 404c773..12654b6 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html
@@ -204,14 +204,14 @@ service.
 
 
 private RegionLocator
-HFileOutputFormat2.TableInfo.regionLocator 
-
-
-private RegionLocator
 TableInputFormatBase.regionLocator
 The RegionLocator of the 
table.
 
 
+
+private RegionLocator
+HFileOutputFormat2.TableInfo.regionLocator 
+
 
 
 
@@ -222,15 +222,15 @@ service.
 
 
 
-RegionLocator
-HFileOutputFormat2.TableInfo.getRegionLocator() 
-
-
 protected RegionLocator
 TableInputFormatBase.getRegionLocator()
 Allows subclasses to get the RegionLocator.
 
 
+
+RegionLocator
+HFileOutputFormat2.TableInfo.getRegionLocator() 
+
 
 
 



[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-31 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1837997e/hbase-archetypes/hbase-client-project/source-repository.html
--
diff --git a/hbase-archetypes/hbase-client-project/source-repository.html 
b/hbase-archetypes/hbase-client-project/source-repository.html
index cba61ff..cafdf5f 100644
--- a/hbase-archetypes/hbase-client-project/source-repository.html
+++ b/hbase-archetypes/hbase-client-project/source-repository.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd";>
-
+
 http://www.w3.org/1999/xhtml"; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-30
+Last Published: 2017-07-31
    | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Exemplar for 
hbase-client archetype

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1837997e/hbase-archetypes/hbase-client-project/team-list.html
--
diff --git a/hbase-archetypes/hbase-client-project/team-list.html 
b/hbase-archetypes/hbase-client-project/team-list.html
index 28c86a8..a95265c 100644
--- a/hbase-archetypes/hbase-client-project/team-list.html
+++ b/hbase-archetypes/hbase-client-project/team-list.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd";>
-
+
 http://www.w3.org/1999/xhtml"; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-30
+Last Published: 2017-07-31
    | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Exemplar for 
hbase-client archetype

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1837997e/hbase-archetypes/hbase-shaded-client-project/checkstyle.html
--
diff --git a/hbase-archetypes/hbase-shaded-client-project/checkstyle.html 
b/hbase-archetypes/hbase-shaded-client-project/checkstyle.html
index 0c9a254..5b42e3f 100644
--- a/hbase-archetypes/hbase-shaded-client-project/checkstyle.html
+++ b/hbase-archetypes/hbase-shaded-client-project/checkstyle.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd";>
-
+
 http://www.w3.org/1999/xhtml"; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-30
+Last Published: 2017-07-31
    | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Exemplar for 
hbase-shaded-client archetype

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1837997e/hbase-archetypes/hbase-shaded-client-project/dependencies.html
--
diff --git a/hbase-archetypes/hbase-shaded-client-project/dependencies.html 
b/hbase-archetypes/hbase-shaded-client-project/dependencies.html
index ef6ff83..70f75a8 100644
--- a/hbase-archetypes/hbase-shaded-client-project/dependencies.html
+++ b/hbase-archetypes/hbase-shaded-client-project/dependencies.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd";>
-
+
 http://www.w3.org/1999/xhtml"; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-30
+Last Published: 2017-07-31
    | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Exemplar for 
hbase-shaded-client archetype

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1837997e/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
--
diff --git 
a/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html 
b/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
index b1b8d29..40a76d5 100644
--- a/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
+++ b/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd";>
-
+
 http://www.w3.org/1999/xhtml"; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a2b2dd19/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
index c9a18a3..c80f6d8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
@@ -2492,2617 +2492,2627 @@
 2484  return;
 2485}
 2486  }
-2487  
errors.reportError(ERROR_CODE.LINGERING_SPLIT_PARENT, "Region "
-2488  + descriptiveName + " is a 
split parent in META, in HDFS, "
-2489  + "and not deployed on any 
region server. This could be transient, "
-2490  + "consider to run the catalog 
janitor first!");
-2491  if (shouldFixSplitParents()) {
-2492setShouldRerun();
-2493resetSplitParent(hbi);
-2494  }
-2495} else if (inMeta && !inHdfs 
&& !isDeployed) {
-2496  
errors.reportError(ERROR_CODE.NOT_IN_HDFS_OR_DEPLOYED, "Region "
-2497  + descriptiveName + " found in 
META, but not in HDFS "
-2498  + "or deployed on any region 
server.");
-2499  if (shouldFixMeta()) {
-2500deleteMetaRegion(hbi);
-2501  }
-2502} else if (inMeta && !inHdfs 
&& isDeployed) {
-2503  
errors.reportError(ERROR_CODE.NOT_IN_HDFS, "Region " + descriptiveName
-2504  + " found in META, but not in 
HDFS, " +
-2505  "and deployed on " + 
Joiner.on(", ").join(hbi.deployedOn));
-2506  // We treat HDFS as ground truth.  
Any information in meta is transient
-2507  // and equivalent data can be 
regenerated.  So, lets unassign and remove
-2508  // these problems from META.
-2509  if (shouldFixAssignments()) {
-2510errors.print("Trying to fix 
unassigned region...");
-2511undeployRegions(hbi);
-2512  }
-2513  if (shouldFixMeta()) {
-2514// wait for it to complete
-2515deleteMetaRegion(hbi);
-2516  }
-2517} else if (inMeta && inHdfs 
&& !isDeployed && shouldBeDeployed) {
-2518  
errors.reportError(ERROR_CODE.NOT_DEPLOYED, "Region " + descriptiveName
-2519  + " not deployed on any region 
server.");
-2520  tryAssignmentRepair(hbi, "Trying 
to fix unassigned region...");
-2521} else if (inMeta && inHdfs 
&& isDeployed && !shouldBeDeployed) {
-2522  
errors.reportError(ERROR_CODE.SHOULD_NOT_BE_DEPLOYED,
-2523  "Region " + descriptiveName + 
" should not be deployed according " +
-2524  "to META, but is deployed on " 
+ Joiner.on(", ").join(hbi.deployedOn));
-2525  if (shouldFixAssignments()) {
-2526errors.print("Trying to close 
the region " + descriptiveName);
-2527setShouldRerun();
-2528
HBaseFsckRepair.fixMultiAssignment(connection, hbi.metaEntry, 
hbi.deployedOn);
-2529  }
-2530} else if (inMeta && inHdfs 
&& isMultiplyDeployed) {
-2531  
errors.reportError(ERROR_CODE.MULTI_DEPLOYED, "Region " + descriptiveName
-2532  + " is listed in hbase:meta on 
region server " + hbi.metaEntry.regionServer
-2533  + " but is multiply assigned 
to region servers " +
-2534  Joiner.on(", 
").join(hbi.deployedOn));
-2535  // If we are trying to fix the 
errors
-2536  if (shouldFixAssignments()) {
-2537errors.print("Trying to fix 
assignment error...");
-2538setShouldRerun();
-2539
HBaseFsckRepair.fixMultiAssignment(connection, hbi.metaEntry, 
hbi.deployedOn);
-2540  }
-2541} else if (inMeta && inHdfs 
&& isDeployed && !deploymentMatchesMeta) {
-2542  
errors.reportError(ERROR_CODE.SERVER_DOES_NOT_MATCH_META, "Region "
-2543  + descriptiveName + " listed 
in hbase:meta on region server " +
-2544  hbi.metaEntry.regionServer + " 
but found on region server " +
-2545  hbi.deployedOn.get(0));
-2546  // If we are trying to fix the 
errors
-2547  if (shouldFixAssignments()) {
-2548errors.print("Trying to fix 
assignment error...");
-2549setShouldRerun();
-2550
HBaseFsckRepair.fixMultiAssignment(connection, hbi.metaEntry, 
hbi.deployedOn);
-2551
HBaseFsckRepair.waitUntilAssigned(admin, hbi.getHdfsHRI());
-2552  }
-2553} else {
-2554  
errors.reportError(ERROR_CODE.UNKNOWN, "Region " + descriptiveName +
-2555  " is in an unforeseen state:" 
+
-2556  " inMeta=" + inMeta +
-2557  " inHdfs=" + inHdfs +
-2558  " isDeployed=" + isDeployed 
+
-2559  " isMultiplyDeployed=" + 
isMultiplyDeployed +
-2560  " deploymentMatchesMeta=" + 
deploymentMatchesMeta +
-2561  " shouldBeDeployed=" + 
shouldBeDeployed);
-2562}
-2563  }
-2564
-2565  /**
-2566   * Checks tables int

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-26 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/21766f4a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/querymatcher/NormalUserScanQueryMatcher.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/querymatcher/NormalUserScanQueryMatcher.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/querymatcher/NormalUserScanQueryMatcher.html
index 1d02fea..8032a10 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/querymatcher/NormalUserScanQueryMatcher.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/querymatcher/NormalUserScanQueryMatcher.html
@@ -32,121 +32,119 @@
 024import 
org.apache.hadoop.hbase.KeepDeletedCells;
 025import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 026import 
org.apache.hadoop.hbase.client.Scan;
-027import 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
-028import 
org.apache.hadoop.hbase.regionserver.ScanInfo;
-029
-030/**
-031 * Query matcher for normal user scan.
-032 */
-033@InterfaceAudience.Private
-034public abstract class 
NormalUserScanQueryMatcher extends UserScanQueryMatcher {
-035
-036  /** Keeps track of deletes */
-037  private final DeleteTracker deletes;
-038
-039  /** True if we are doing a 'Get' Scan. 
Every Get is actually a one-row Scan. */
-040  private final boolean get;
-041
-042  /** whether time range queries can see 
rows "behind" a delete */
-043  private final boolean 
seePastDeleteMarkers;
-044
-045  protected 
NormalUserScanQueryMatcher(Scan scan, ScanInfo scanInfo, ColumnTracker 
columns,
-046  boolean hasNullColumn, 
DeleteTracker deletes, long oldestUnexpiredTS, long now) {
-047super(scan, scanInfo, columns, 
hasNullColumn, oldestUnexpiredTS, now);
-048this.deletes = deletes;
-049this.get = scan.isGetScan();
-050this.seePastDeleteMarkers = 
scanInfo.getKeepDeletedCells() != KeepDeletedCells.FALSE;
-051  }
-052
-053  @Override
-054  public void beforeShipped() throws 
IOException {
-055super.beforeShipped();
-056deletes.beforeShipped();
-057  }
-058
-059  @Override
-060  public MatchCode match(Cell cell) 
throws IOException {
-061if (filter != null && 
filter.filterAllRemaining()) {
-062  return MatchCode.DONE_SCAN;
-063}
-064MatchCode returnCode = 
preCheck(cell);
-065if (returnCode != null) {
-066  return returnCode;
-067}
-068long timestamp = 
cell.getTimestamp();
-069byte typeByte = cell.getTypeByte();
-070if (CellUtil.isDelete(typeByte)) {
-071  boolean includeDeleteMarker = 
seePastDeleteMarkers ? tr.withinTimeRange(timestamp)
-072  : 
tr.withinOrAfterTimeRange(timestamp);
-073  if (includeDeleteMarker) {
-074this.deletes.add(cell);
-075  }
-076  return MatchCode.SKIP;
-077}
-078returnCode = checkDeleted(deletes, 
cell);
-079if (returnCode != null) {
-080  return returnCode;
-081}
-082return matchColumn(cell, timestamp, 
typeByte);
-083  }
-084
-085  @Override
-086  protected void reset() {
-087deletes.reset();
-088  }
-089
-090  @Override
-091  protected boolean isGet() {
-092return get;
-093  }
-094
-095  public static 
NormalUserScanQueryMatcher create(Scan scan, ScanInfo scanInfo,
-096  ColumnTracker columns, boolean 
hasNullColumn, long oldestUnexpiredTS, long now,
-097  RegionCoprocessorHost 
regionCoprocessorHost) throws IOException {
-098DeleteTracker deletes = 
instantiateDeleteTracker(regionCoprocessorHost);
-099if (scan.isReversed()) {
-100  if (scan.includeStopRow()) {
-101return new 
NormalUserScanQueryMatcher(scan, scanInfo, columns, hasNullColumn, deletes,
-102oldestUnexpiredTS, now) {
-103
-104  @Override
-105  protected boolean 
moreRowsMayExistsAfter(int cmpToStopRow) {
-106return cmpToStopRow >= 
0;
-107  }
-108};
-109  } else {
-110return new 
NormalUserScanQueryMatcher(scan, scanInfo, columns, hasNullColumn, deletes,
-111oldestUnexpiredTS, now) {
-112
-113  @Override
-114  protected boolean 
moreRowsMayExistsAfter(int cmpToStopRow) {
-115return cmpToStopRow > 0;
-116  }
-117};
-118  }
-119} else {
-120  if (scan.includeStopRow()) {
-121return new 
NormalUserScanQueryMatcher(scan, scanInfo, columns, hasNullColumn, deletes,
-122oldestUnexpiredTS, now) {
-123
-124  @Override
-125  protected boolean 
moreRowsMayExistsAfter(int cmpToStopRow) {
-126return cmpToStopRow <= 
0;
-127  }
-128};
-129  } else {
-130return new 
NormalUserScanQueryMatcher(scan, scanInfo, columns, hasNullColumn, deletes,
-131oldestUnexpiredTS, now) {
-132
-133  @Override
-134  protected boolean 
moreRowsMayExistsAfter(int cmpToStopRow) {
-135return cmpToStopRow < 0;
-136

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2d5075d7/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/ProcedureStore.ProcedureIterator.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/ProcedureStore.ProcedureIterator.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/ProcedureStore.ProcedureIterator.html
index f16084a..7621348 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/ProcedureStore.ProcedureIterator.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/procedure2/store/ProcedureStore.ProcedureIterator.html
@@ -28,209 +28,203 @@
 020
 021import java.io.IOException;
 022
-023import 
org.apache.hadoop.hbase.ProcedureInfo;
-024import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-025import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-026import 
org.apache.hadoop.hbase.procedure2.Procedure;
-027
-028/**
-029 * The ProcedureStore is used by the 
executor to persist the state of each procedure execution.
-030 * This allows to resume the execution of 
pending/in-progress procedures in case
-031 * of machine failure or service 
shutdown.
-032 */
-033@InterfaceAudience.Private
-034@InterfaceStability.Evolving
-035public interface ProcedureStore {
-036  /**
-037   * Store listener interface.
-038   * The main process should register a 
listener and respond to the store events.
-039   */
-040  public interface ProcedureStoreListener 
{
-041/**
-042 * triggered when the store sync is 
completed.
-043 */
-044void postSync();
-045
-046/**
-047 * triggered when the store is not 
able to write out data.
-048 * the main process should abort.
-049 */
-050void abortProcess();
-051  }
-052
-053  /**
-054   * An Iterator over a collection of 
Procedure
-055   */
-056  public interface ProcedureIterator {
-057/**
-058 * Reset the Iterator by seeking to 
the beginning of the list.
-059 */
-060void reset();
-061
-062/**
-063 * Returns true if the iterator has 
more elements.
-064 * (In other words, returns true if 
next() would return a Procedure
-065 * rather than throwing an 
exception.)
-066 * @return true if the iterator has 
more procedures
-067 */
-068boolean hasNext();
-069
-070/**
-071 * @return true if the iterator next 
element is a completed procedure.
-072 */
-073boolean isNextFinished();
-074
-075/**
-076 * Skip the next procedure
-077 */
-078void skipNext();
-079
-080/**
-081 * Returns the next procedure in the 
iteration.
-082 * @throws IOException if there was 
an error fetching/deserializing the procedure
-083 * @return the next procedure in the 
iteration.
-084 */
-085Procedure nextAsProcedure() throws 
IOException;
+023import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+024import 
org.apache.hadoop.hbase.classification.InterfaceStability;
+025import 
org.apache.hadoop.hbase.procedure2.Procedure;
+026
+027/**
+028 * The ProcedureStore is used by the 
executor to persist the state of each procedure execution.
+029 * This allows to resume the execution of 
pending/in-progress procedures in case
+030 * of machine failure or service 
shutdown.
+031 */
+032@InterfaceAudience.Private
+033@InterfaceStability.Evolving
+034public interface ProcedureStore {
+035  /**
+036   * Store listener interface.
+037   * The main process should register a 
listener and respond to the store events.
+038   */
+039  public interface ProcedureStoreListener 
{
+040/**
+041 * triggered when the store sync is 
completed.
+042 */
+043void postSync();
+044
+045/**
+046 * triggered when the store is not 
able to write out data.
+047 * the main process should abort.
+048 */
+049void abortProcess();
+050  }
+051
+052  /**
+053   * An Iterator over a collection of 
Procedure
+054   */
+055  public interface ProcedureIterator {
+056/**
+057 * Reset the Iterator by seeking to 
the beginning of the list.
+058 */
+059void reset();
+060
+061/**
+062 * Returns true if the iterator has 
more elements.
+063 * (In other words, returns true if 
next() would return a Procedure
+064 * rather than throwing an 
exception.)
+065 * @return true if the iterator has 
more procedures
+066 */
+067boolean hasNext();
+068
+069/**
+070 * @return true if the iterator next 
element is a completed procedure.
+071 */
+072boolean isNextFinished();
+073
+074/**
+075 * Skip the next procedure
+076 */
+077void skipNext();
+078
+079/**
+080 * Returns the next procedure in the 
iteration.
+081 * @throws IOException if there was 
an error fetching/deserializing the procedure
+082 * @return the next procedure in the 
iteration.
+083 */
+084Procedure next() throws 
IOException;
+085  }
 086
-087

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0383a9c2/export_control.html
--
diff --git a/export_control.html b/export_control.html
index 6fe3b1e..4f3356c 100644
--- a/export_control.html
+++ b/export_control.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – 
   Export Control
@@ -336,7 +336,7 @@ for more details.
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-07-22
+  Last Published: 
2017-07-23
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0383a9c2/hbase-annotations/checkstyle.html
--
diff --git a/hbase-annotations/checkstyle.html 
b/hbase-annotations/checkstyle.html
index 920..5a26d8e 100644
--- a/hbase-annotations/checkstyle.html
+++ b/hbase-annotations/checkstyle.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd";>
-
+
 http://www.w3.org/1999/xhtml"; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-22
+Last Published: 2017-07-23
    | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Annotations

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0383a9c2/hbase-annotations/dependencies.html
--
diff --git a/hbase-annotations/dependencies.html 
b/hbase-annotations/dependencies.html
index 5518f91..cd30803 100644
--- a/hbase-annotations/dependencies.html
+++ b/hbase-annotations/dependencies.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd";>
-
+
 http://www.w3.org/1999/xhtml"; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-22
+Last Published: 2017-07-23
    | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Annotations

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0383a9c2/hbase-annotations/dependency-convergence.html
--
diff --git a/hbase-annotations/dependency-convergence.html 
b/hbase-annotations/dependency-convergence.html
index 8ccf878..6fb3be5 100644
--- a/hbase-annotations/dependency-convergence.html
+++ b/hbase-annotations/dependency-convergence.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd";>
-
+
 http://www.w3.org/1999/xhtml"; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-22
+Last Published: 2017-07-23
    | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Annotations

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0383a9c2/hbase-annotations/dependency-info.html
--
diff --git a/hbase-annotations/dependency-info.html 
b/hbase-annotations/dependency-info.html
index 1575b93..1810629 100644
--- a/hbase-annotations/dependency-info.html
+++ b/hbase-annotations/dependency-info.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd";>
-
+
 http://www.w3.org/1999/xhtml"; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-22
+Last Published: 2017-07-23
    | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Annotations

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0383a9c2/hbase-annotations/dependency-management.html
--
diff --git a/hbase-annotations/dependency-management.html 
b/hbase-annotations/dependency-management.html
index 9f0bb09..8386a04 100644
--- a/hbase-annotations/dependency-management.html
+++ b/hbase-annotations/dependency-management.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd";>
-
+
 http://www.w3.org/1999/xhtml"; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f391bcef/devapidocs/src-html/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.html
index 41de383..e1f7fd7 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.html
@@ -384,976 +384,984 @@
 376return new 
ModifyableColumnFamilyDescriptor(desc);
 377  }
 378
-379  private 
ColumnFamilyDescriptorBuilder(final byte[] name) {
-380this.desc = new 
ModifyableColumnFamilyDescriptor(name);
+379  public static ColumnFamilyDescriptor 
of(String name) {
+380return of(Bytes.toBytes(name));
 381  }
 382
-383  private 
ColumnFamilyDescriptorBuilder(final ColumnFamilyDescriptor desc) {
-384this.desc = new 
ModifyableColumnFamilyDescriptor(desc);
+383  public static ColumnFamilyDescriptor 
of(byte[] name) {
+384return newBuilder(name).build();
 385  }
 386
-387  /**
-388   * @param desc The table descriptor to 
serialize
-389   * @return This instance serialized 
with pb with pb magic prefix
-390   */
-391  public static byte[] 
toByteArray(ColumnFamilyDescriptor desc) {
-392if (desc instanceof 
ModifyableColumnFamilyDescriptor) {
-393  return 
((ModifyableColumnFamilyDescriptor) desc).toByteArray();
-394}
-395return new 
ModifyableColumnFamilyDescriptor(desc).toByteArray();
-396  }
-397
-398  public ColumnFamilyDescriptor build() 
{
-399return new 
ModifyableColumnFamilyDescriptor(desc);
-400  }
-401
-402  public ColumnFamilyDescriptorBuilder 
removeConfiguration(String key) {
-403desc.removeConfiguration(key);
-404return this;
-405  }
-406
-407  public ColumnFamilyDescriptorBuilder 
setBlockCacheEnabled(boolean value) {
-408desc.setBlockCacheEnabled(value);
-409return this;
-410  }
-411
-412  public ColumnFamilyDescriptorBuilder 
setBlocksize(int value) {
-413desc.setBlocksize(value);
-414return this;
-415  }
-416
-417  public ColumnFamilyDescriptorBuilder 
setBloomFilterType(final BloomType value) {
-418desc.setBloomFilterType(value);
-419return this;
-420  }
-421
-422  public ColumnFamilyDescriptorBuilder 
setCacheBloomsOnWrite(boolean value) {
-423desc.setCacheBloomsOnWrite(value);
-424return this;
-425  }
-426
-427  public ColumnFamilyDescriptorBuilder 
setCacheDataInL1(boolean value) {
-428desc.setCacheDataInL1(value);
-429return this;
-430  }
-431
-432  public ColumnFamilyDescriptorBuilder 
setCacheDataOnWrite(boolean value) {
-433desc.setCacheDataOnWrite(value);
-434return this;
-435  }
-436
-437  public ColumnFamilyDescriptorBuilder 
setCacheIndexesOnWrite(final boolean value) {
-438desc.setCacheIndexesOnWrite(value);
-439return this;
-440  }
-441
-442  public ColumnFamilyDescriptorBuilder 
setCompactionCompressionType(Compression.Algorithm value) {
-443
desc.setCompactionCompressionType(value);
-444return this;
-445  }
-446
-447  public ColumnFamilyDescriptorBuilder 
setCompressTags(boolean value) {
-448desc.setCompressTags(value);
-449return this;
-450  }
-451
-452  public ColumnFamilyDescriptorBuilder 
setCompressionType(Compression.Algorithm value) {
-453desc.setCompressionType(value);
-454return this;
-455  }
-456
-457  public ColumnFamilyDescriptorBuilder 
setConfiguration(final String key, final String value) {
-458desc.setConfiguration(key, value);
-459return this;
-460  }
-461
-462  public ColumnFamilyDescriptorBuilder 
setDFSReplication(short value) {
-463desc.setDFSReplication(value);
-464return this;
-465  }
-466
-467  public ColumnFamilyDescriptorBuilder 
setDataBlockEncoding(DataBlockEncoding value) {
-468desc.setDataBlockEncoding(value);
-469return this;
-470  }
-471
-472  public ColumnFamilyDescriptorBuilder 
setEncryptionKey(final byte[] value) {
-473desc.setEncryptionKey(value);
-474return this;
-475  }
-476
-477  public ColumnFamilyDescriptorBuilder 
setEncryptionType(String value) {
-478desc.setEncryptionType(value);
-479return this;
-480  }
-481
-482  public ColumnFamilyDescriptorBuilder 
setEvictBlocksOnClose(boolean value) {
-483desc.setEvictBlocksOnClose(value);
-484return this;
-485  }
-486
-487  public ColumnFamilyDescriptorBuilder 
setInMemory(final boolean value) {
-488desc.setInMemory(value);
-489return this;
-490  }
-491
-492  public ColumnFamilyDescriptorBuilder 
setInMemoryCompaction(final MemoryCompactionPolicy value) {
-493desc.setInMemoryCompaction(value);
-494return this;
-495  }
-496
-497  public ColumnFamilyDescriptorBuilder 
setKeepDeletedCells(KeepDeletedCells value) {
-498desc.setKeepDeletedCells(value);
-499return this;
-

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-21 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca5b0275/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.Callback.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.Callback.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.Callback.html
index 0c07a2f..c90d203 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.Callback.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.Callback.html
@@ -34,553 +34,554 @@
 026import static 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.getStatus;
 027import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
 028
-029import 
com.google.common.annotations.VisibleForTesting;
-030
-031import io.netty.buffer.ByteBuf;
-032import 
io.netty.buffer.ByteBufAllocator;
-033import io.netty.channel.Channel;
-034import 
io.netty.channel.ChannelHandler.Sharable;
-035import 
io.netty.channel.ChannelHandlerContext;
-036import io.netty.channel.EventLoop;
-037import 
io.netty.channel.SimpleChannelInboundHandler;
-038import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
-039import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
-040import 
io.netty.handler.timeout.IdleStateEvent;
-041import 
io.netty.handler.timeout.IdleStateHandler;
-042import io.netty.util.concurrent.Future;
-043import 
io.netty.util.concurrent.Promise;
-044import 
io.netty.util.concurrent.PromiseCombiner;
-045
-046import java.io.IOException;
-047import java.nio.ByteBuffer;
-048import java.util.ArrayDeque;
-049import java.util.Collection;
-050import java.util.Collections;
-051import java.util.Deque;
-052import java.util.IdentityHashMap;
-053import java.util.List;
-054import java.util.Set;
-055import 
java.util.concurrent.CompletableFuture;
-056import java.util.concurrent.TimeUnit;
-057import java.util.function.Supplier;
-058
-059import 
org.apache.hadoop.conf.Configuration;
-060import 
org.apache.hadoop.crypto.Encryptor;
-061import org.apache.hadoop.fs.Path;
-062import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-063import 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose;
-064import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-065import 
org.apache.hadoop.hbase.util.FSUtils;
-066import 
org.apache.hadoop.hdfs.DFSClient;
-067import 
org.apache.hadoop.hdfs.DistributedFileSystem;
-068import 
org.apache.hadoop.hdfs.protocol.ClientProtocol;
-069import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-070import 
org.apache.hadoop.hdfs.protocol.LocatedBlock;
-071import 
org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader;
-072import 
org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;
-073import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;
-074import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
-075import 
org.apache.hadoop.util.DataChecksum;
-076
-077/**
-078 * An asynchronous HDFS output stream 
implementation which fans out data to datanode and only
-079 * supports writing file with only one 
block.
-080 * 

-081 * Use the createOutput method in {@link FanOutOneBlockAsyncDFSOutputHelper} to create. The mainly -082 * usage of this class is implementing WAL, so we only expose a little HDFS configurations in the -083 * method. And we place it here under util package because we want to make it independent of WAL -084 * implementation thus easier to move it to HDFS project finally. -085 *

-086 * Note that, all connections to datanode will run in the same {@link EventLoop} which means we only -087 * need one thread here. But be careful, we do some blocking operations in {@link #close()} and -088 * {@link #recoverAndClose(CancelableProgressable)} methods, so do not call them inside -089 * {@link EventLoop}. And for {@link #write(byte[])} {@link #write(byte[], int, int)}, -090 * {@link #buffered()} and {@link #flush(boolean)}, if you call them outside {@link EventLoop}, -091 * there will be an extra context-switch. -092 *

-093 * Advantages compare to DFSOutputStream: -094 *

    -095 *
  1. The fan out mechanism. This will reduce the latency.
  2. -096 *
  3. The asynchronous WAL could also run in the same EventLoop, we could just call write and flush -097 * inside the EventLoop thread, so generally we only have one thread to do all the things.
  4. -098 *
  5. Fail-fast when connection to datanode error. The WAL implementation could open new writer -099 * ASAP.
  6. -100 *
  7. We could benefit from netty's ByteBuf management mechanism.
  8. -101 *
-102 */ -103@InterfaceAudience.Private -104public class FanOutOneBlockAsyncDFSOutput implements AsyncFSOutput { -105 -106 // The MAX_PACKET_SIZE is 16MB but it include the header

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-19 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9eba7fcf/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.StdOutSink.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.StdOutSink.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.StdOutSink.html
index e1fbce4..873e17f 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.StdOutSink.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/tool/Canary.StdOutSink.html
@@ -1089,497 +1089,498 @@
 1081}
 1082  }
 1083  Map 
actualReadTableLatency = regionSink.getReadLatencyMap();
-1084  for (String tableName : 
this.configuredReadTableTimeouts.keySet()) {
-1085if 
(actualReadTableLatency.containsKey(tableName)) {
-1086  Long actual = 
actualReadTableLatency.get(tableName).longValue();
-1087  Long configured = 
this.configuredReadTableTimeouts.get(tableName);
-1088  LOG.info("Read operation 
for " + tableName + " took " + actual +
-1089" ms. The configured 
read timeout was " + configured + " ms.");
-1090  if (actual > 
configured) {
-1091LOG.error("Read 
operation for " + tableName + " exceeded the configured read timeout.");
-1092  }
-1093} else {
-1094  LOG.error("Read operation 
for " + tableName + " failed!");
-1095}
-1096  }
-1097  if (this.writeSniffing) {
-1098String writeTableStringName 
= this.writeTableName.getNameAsString();
-1099long actualWriteLatency = 
regionSink.getWriteLatency().longValue();
-1100LOG.info("Write operation 
for " + writeTableStringName + " took " + actualWriteLatency + " ms. The 
configured write timeout was " +
-1101  
this.configuredWriteTableTimeout + " ms.");
-1102// Check that the writeTable 
write operation latency does not exceed the configured timeout.
-1103if (actualWriteLatency > 
this.configuredWriteTableTimeout) {
-1104  LOG.error("Write operation 
for " + writeTableStringName + " exceeded the configured write timeout.");
-1105}
-1106  }
-1107} catch (Exception e) {
-1108  LOG.error("Run regionMonitor 
failed", e);
-1109  this.errorCode = 
ERROR_EXIT_CODE;
-1110}
-  }
-1112  this.done = true;
-1113}
-1114
-1115private String[] 
generateMonitorTables(String[] monitorTargets) throws IOException {
-1116  String[] returnTables = null;
-1117
-1118  if (this.useRegExp) {
-1119Pattern pattern = null;
-1120HTableDescriptor[] tds = null;
-1121Set tmpTables = 
new TreeSet<>();
-1122try {
-1123  if (LOG.isDebugEnabled()) {
-1124
LOG.debug(String.format("reading list of tables"));
-1125  }
-1126  tds = 
this.admin.listTables(pattern);
-1127  if (tds == null) {
-1128tds = new 
HTableDescriptor[0];
-1129  }
-1130  for (String monitorTarget : 
monitorTargets) {
-1131pattern = 
Pattern.compile(monitorTarget);
-1132for (HTableDescriptor td : 
tds) {
-1133  if 
(pattern.matcher(td.getNameAsString()).matches()) {
-1134
tmpTables.add(td.getNameAsString());
-1135  }
-1136}
-1137  }
-1138} catch (IOException e) {
-1139  LOG.error("Communicate with 
admin failed", e);
-1140  throw e;
-1141}
-1142
-1143if (tmpTables.size() > 0) {
-1144  returnTables = 
tmpTables.toArray(new String[tmpTables.size()]);
-1145} else {
-1146  String msg = "No HTable found, 
tablePattern:" + Arrays.toString(monitorTargets);
-1147  LOG.error(msg);
-1148  this.errorCode = 
INIT_ERROR_EXIT_CODE;
-1149  throw new 
TableNotFoundException(msg);
-1150}
-1151  } else {
-1152returnTables = monitorTargets;
-1153  }
-1154
-1155  return returnTables;
-1156}
-1157
-1158/*
-1159 * canary entry point to monitor all 
the tables.
-1160 */
-1161private 
List> sniff(TaskType taskType, RegionStdOutSink 
regionSink) throws Exception {
-1162  if (LOG.isDebugEnabled()) {
-1163LOG.debug(String.format("reading 
list of tables"));
-1164  }
-1165  List> 
taskFutures = new LinkedList<>();
-1166  for (HTableDescriptor table : 
admin.listTables()) {
-1167if 
(admin.isTableEnabled(table.getTableName())
-1168&& 
(!table.getTableName().equals(writeTableName))) {
-1169  AtomicLong readLatency = 
regionSink.initializeAndGetReadLatencyForTable(table.getNameAsString());
-1170  
taskFutures.addAll(Canary.sniff(admin, sink, table, executor, taskType, 
this.rawScanEnabled, readLatency));
-117

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-15 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/17128d27/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
index eb9099e..35d5549 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.AddColumnFamilyProcedureBiConsumer.html
@@ -232,2671 +232,2699 @@
 224import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse;
 225import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest;
 226import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse;
-227import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest;
-228import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse;
-229import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest;
-230import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse;
-231import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest;
-232import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse;
-233import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest;
-234import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse;
-235import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest;
-236import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerResponse;
-237import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest;
-238import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerResponse;
-239import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest;
-240import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse;
-241import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersRequest;
-242import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersResponse;
-243import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest;
-244import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerResponse;
-245import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest;
-246import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse;
-247import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
-248import 
org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
-249import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-250import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-251import 
org.apache.hadoop.hbase.util.Bytes;
-252import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-253import 
org.apache.hadoop.hbase.util.ForeignExceptionUtil;
-254import 
org.apache.hadoop.hbase.util.Pair;
-255
-256/**
-257 * The implementation of AsyncAdmin.
-258 */
-259@InterfaceAudience.Private
-260public class RawAsyncHBaseAdmin 
implements AsyncAdmin {
-261  public static final String 
FLUSH_TABLE_PROCEDURE_SIGNATURE = "flush-table-proc";
-262
-263  private static final Log LOG = 
LogFactory.getLog(AsyncHBaseAdmin.class);
+227import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionRequest;
+228import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionResponse;
+229import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest;
+230import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse;
+231import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest;
+232import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse;
+233import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest;
+234import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse;
+235import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest;
+236import 
org.apache.hadoop.hbase.shaded.protobuf.generated.Replicati

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2777c693/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestStore.MyMemStoreCompactor.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestStore.MyMemStoreCompactor.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestStore.MyMemStoreCompactor.html
index 4196a6c..6c65fd1 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestStore.MyMemStoreCompactor.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestStore.MyMemStoreCompactor.html
@@ -114,1476 +114,1636 @@
 106import org.mockito.Mockito;
 107
 108import com.google.common.collect.Lists;
-109
-110/**
-111 * Test class for the Store
-112 */
-113@Category({RegionServerTests.class, 
MediumTests.class})
-114public class TestStore {
-115  private static final Log LOG = 
LogFactory.getLog(TestStore.class);
-116  @Rule public TestName name = new 
TestName();
-117
-118  HStore store;
-119  byte [] table = 
Bytes.toBytes("table");
-120  byte [] family = 
Bytes.toBytes("family");
+109import 
org.apache.hadoop.hbase.filter.Filter;
+110import 
org.apache.hadoop.hbase.filter.FilterBase;
+111import static 
org.junit.Assert.assertEquals;
+112import static 
org.junit.Assert.assertTrue;
+113
+114/**
+115 * Test class for the Store
+116 */
+117@Category({RegionServerTests.class, 
MediumTests.class})
+118public class TestStore {
+119  private static final Log LOG = 
LogFactory.getLog(TestStore.class);
+120  @Rule public TestName name = new 
TestName();
 121
-122  byte [] row = Bytes.toBytes("row");
-123  byte [] row2 = Bytes.toBytes("row2");
-124  byte [] qf1 = Bytes.toBytes("qf1");
-125  byte [] qf2 = Bytes.toBytes("qf2");
-126  byte [] qf3 = Bytes.toBytes("qf3");
-127  byte [] qf4 = Bytes.toBytes("qf4");
-128  byte [] qf5 = Bytes.toBytes("qf5");
-129  byte [] qf6 = Bytes.toBytes("qf6");
-130
-131  NavigableSet qualifiers = 
new ConcurrentSkipListSet<>(Bytes.BYTES_COMPARATOR);
-132
-133  List expected = new 
ArrayList<>();
-134  List result = new 
ArrayList<>();
-135
-136  long id = System.currentTimeMillis();
-137  Get get = new Get(row);
-138
-139  private HBaseTestingUtility TEST_UTIL = 
new HBaseTestingUtility();
-140  private final String DIR = 
TEST_UTIL.getDataTestDir("TestStore").toString();
-141
+122  HStore store;
+123  byte [] table = 
Bytes.toBytes("table");
+124  byte [] family = 
Bytes.toBytes("family");
+125
+126  byte [] row = Bytes.toBytes("row");
+127  byte [] row2 = Bytes.toBytes("row2");
+128  byte [] qf1 = Bytes.toBytes("qf1");
+129  byte [] qf2 = Bytes.toBytes("qf2");
+130  byte [] qf3 = Bytes.toBytes("qf3");
+131  byte [] qf4 = Bytes.toBytes("qf4");
+132  byte [] qf5 = Bytes.toBytes("qf5");
+133  byte [] qf6 = Bytes.toBytes("qf6");
+134
+135  NavigableSet qualifiers = 
new ConcurrentSkipListSet<>(Bytes.BYTES_COMPARATOR);
+136
+137  List expected = new 
ArrayList<>();
+138  List result = new 
ArrayList<>();
+139
+140  long id = System.currentTimeMillis();
+141  Get get = new Get(row);
 142
-143  /**
-144   * Setup
-145   * @throws IOException
-146   */
-147  @Before
-148  public void setUp() throws IOException 
{
-149qualifiers.add(qf1);
-150qualifiers.add(qf3);
-151qualifiers.add(qf5);
-152
-153Iterator iter = 
qualifiers.iterator();
-154while(iter.hasNext()){
-155  byte [] next = iter.next();
-156  expected.add(new KeyValue(row, 
family, next, 1, (byte[])null));
-157  get.addColumn(family, next);
-158}
-159  }
-160
-161  private void init(String methodName) 
throws IOException {
-162init(methodName, 
TEST_UTIL.getConfiguration());
+143  private HBaseTestingUtility TEST_UTIL = 
new HBaseTestingUtility();
+144  private final String DIR = 
TEST_UTIL.getDataTestDir("TestStore").toString();
+145
+146
+147  /**
+148   * Setup
+149   * @throws IOException
+150   */
+151  @Before
+152  public void setUp() throws IOException 
{
+153qualifiers.add(qf1);
+154qualifiers.add(qf3);
+155qualifiers.add(qf5);
+156
+157Iterator iter = 
qualifiers.iterator();
+158while(iter.hasNext()){
+159  byte [] next = iter.next();
+160  expected.add(new KeyValue(row, 
family, next, 1, (byte[])null));
+161  get.addColumn(family, next);
+162}
 163  }
 164
-165  private Store init(String methodName, 
Configuration conf)
-166  throws IOException {
-167HColumnDescriptor hcd = new 
HColumnDescriptor(family);
-168// some of the tests write 4 versions 
and then flush
-169// (with HBASE-4241, lower versions 
are collected on flush)
-170hcd.setMaxVersions(4);
-171return init(methodName, conf, hcd);
-172  }
-173
-174  private Store init(String methodName, 
Configuration conf,
-175  HColumnDescriptor hcd) throws 
IOException {
-176HTableDescriptor htd = new 
HTableDescriptor(TableName.valueOf(table))

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-11 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/90c7dfe4/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperation.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperation.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperation.html
index 25d6b70..635798d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperation.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionOperation.html
@@ -1234,540 +1234,541 @@
 1226   * @param result the result returned 
by the append
 1227   * @throws IOException if an error 
occurred on the coprocessor
 1228   */
-1229  public void postAppend(final Append 
append, final Result result) throws IOException {
-1230execOperation(coprocessors.isEmpty() 
? null : new RegionOperation() {
-1231  @Override
-1232  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1233  throws IOException {
-1234oserver.postAppend(ctx, append, 
result);
-1235  }
-1236});
-1237  }
-1238
-1239  /**
-1240   * @param increment increment object
-1241   * @param result the result returned 
by postIncrement
-1242   * @throws IOException if an error 
occurred on the coprocessor
-1243   */
-1244  public Result postIncrement(final 
Increment increment, Result result) throws IOException {
-1245return 
execOperationWithResult(result,
-1246coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1247  @Override
-1248  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1249  throws IOException {
-1250
setResult(oserver.postIncrement(ctx, increment, getResult()));
-1251  }
-1252});
-1253  }
-1254
-1255  /**
-1256   * @param scan the Scan 
specification
-1257   * @return scanner id to return to 
client if default operation should be
-1258   * bypassed, null otherwise
-1259   * @exception IOException Exception
-1260   */
-1261  public RegionScanner 
preScannerOpen(final Scan scan) throws IOException {
-1262return execOperationWithResult(true, 
null,
-1263coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1264  @Override
-1265  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1266  throws IOException {
-1267
setResult(oserver.preScannerOpen(ctx, scan, getResult()));
-1268  }
-1269});
-1270  }
-1271
-1272  /**
-1273   * See
-1274   * {@link 
RegionObserver#preStoreScannerOpen(ObserverContext,
-1275   *Store, Scan, NavigableSet, 
KeyValueScanner)}
-1276   */
-1277  public KeyValueScanner 
preStoreScannerOpen(final Store store, final Scan scan,
-1278  final NavigableSet 
targetCols, final long readPt) throws IOException {
-1279return 
execOperationWithResult(null,
-1280coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1281  @Override
-1282  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1283  throws IOException {
-1284
setResult(oserver.preStoreScannerOpen(ctx, store, scan, targetCols, 
getResult(), readPt));
-1285  }
-1286});
-1287  }
-1288
-1289  /**
-1290   * @param scan the Scan 
specification
-1291   * @param s the scanner
-1292   * @return the scanner instance to 
use
-1293   * @exception IOException Exception
-1294   */
-1295  public RegionScanner 
postScannerOpen(final Scan scan, RegionScanner s) throws IOException {
-1296return execOperationWithResult(s,
-1297coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1298  @Override
-1299  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1300  throws IOException {
-1301
setResult(oserver.postScannerOpen(ctx, scan, getResult()));
-1302  }
-1303});
-1304  }
-1305
-1306  /**
-1307   * @param s the scanner
-1308   * @param results the result set 
returned by the region server
-1309   * @param limit the maximum number of 
results to return
-1310   * @return 'has next' indication to 
client if bypassing default behavior, or
-1311   * null otherwise
-1312   * @exception IOException Exception
-1313   */
-1314  public Boolean preScannerNext(final 
InternalScanner s,
-1315  final List results, 
final int limit) throws IOException {
-1316return execOperationWithResult(true, 
false,
-1317coprocessors.isEmpty() ? null : 
new RegionOperationWithResult() {
-1318  @Override
-1319  public void call(RegionObserver 
oserver, ObserverContext ctx)
-1320 

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/0821e51a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
index f729c99..0a32350 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
@@ -54,757 +54,756 @@
 046import 
io.netty.channel.ChannelPipeline;
 047import io.netty.channel.EventLoop;
 048import 
io.netty.channel.SimpleChannelInboundHandler;
-049import 
io.netty.channel.socket.nio.NioSocketChannel;
-050import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
-051import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
-052import 
io.netty.handler.timeout.IdleStateEvent;
-053import 
io.netty.handler.timeout.IdleStateHandler;
-054import io.netty.util.concurrent.Future;
-055import 
io.netty.util.concurrent.FutureListener;
-056import 
io.netty.util.concurrent.Promise;
-057
-058import java.io.IOException;
-059import 
java.lang.reflect.InvocationTargetException;
-060import java.lang.reflect.Method;
-061import java.util.ArrayList;
-062import java.util.EnumSet;
-063import java.util.List;
-064import java.util.concurrent.TimeUnit;
-065
-066import org.apache.commons.logging.Log;
-067import 
org.apache.commons.logging.LogFactory;
-068import 
org.apache.hadoop.conf.Configuration;
-069import 
org.apache.hadoop.crypto.CryptoProtocolVersion;
-070import 
org.apache.hadoop.crypto.Encryptor;
-071import org.apache.hadoop.fs.CreateFlag;
-072import org.apache.hadoop.fs.FileSystem;
-073import 
org.apache.hadoop.fs.FileSystemLinkResolver;
-074import org.apache.hadoop.fs.Path;
-075import 
org.apache.hadoop.fs.UnresolvedLinkException;
-076import 
org.apache.hadoop.fs.permission.FsPermission;
-077import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-078import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-079import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-080import 
org.apache.hadoop.hbase.util.FSUtils;
-081import 
org.apache.hadoop.hdfs.DFSClient;
-082import 
org.apache.hadoop.hdfs.DFSOutputStream;
-083import 
org.apache.hadoop.hdfs.DistributedFileSystem;
-084import 
org.apache.hadoop.hdfs.protocol.ClientProtocol;
-085import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-086import 
org.apache.hadoop.hdfs.protocol.ExtendedBlock;
-087import 
org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
-088import 
org.apache.hadoop.hdfs.protocol.LocatedBlock;
-089import 
org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage;
-090import 
org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtoUtil;
-091import 
org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol;
-092import 
org.apache.hadoop.hdfs.protocol.datatransfer.Op;
-093import 
org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;
-094import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto;
-095import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto;
-096import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.CachingStrategyProto;
-097import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto;
-098import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto;
-099import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto;
-100import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;
-101import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
-102import 
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto;
-103import 
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypeProto;
-104import 
org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
-105import 
org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException;
-106import 
org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
-107import 
org.apache.hadoop.io.EnumSetWritable;
-108import 
org.apache.hadoop.ipc.RemoteException;
-109import org.apache.hadoop.net.NetUtils;
-110import 
org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
-111import 
org.apache.hadoop.security.token.Token;
-112import 
org.apache.hadoop.util.DataChecksum;
-113
-114/**
-115 * Helper class for implementing {@link 
FanOutOneBlockAsyncDFSOutput}.
-116 */
-117@InterfaceAudience.Private
-118public final class 
FanOutOneBlockAsyncDFSOutputHelper {
+049import 
io.netty.handler.codec.protobuf.ProtobufDecoder;
+050import 
io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
+051import

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2d27954a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
index 16c0042..71844ce 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
@@ -126,2499 +126,2543 @@
 118import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
 119import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
 120import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-121import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-122import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-123import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-124import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-125import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-126import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-127import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-128import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-129import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-130import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-131import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-132import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-133import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-134import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-135import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-136import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-137import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;
-138import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse;
-139import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest;
-140import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse;
-141import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest;
-142import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse;
-143import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest;
-144import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse;
-145import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest;
-146import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse;
-147import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest;
-148import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse;
-149import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledRequest;
-150import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledResponse;
-151import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest;
-152import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse;
-153import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest;
-154import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse;
-155import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest;
-156import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse;
-157import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest;
-158import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse;
-159import 
org.apache.hadoop.hbase.shaded.protobuf.generated.Ma

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/client/Table.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/Table.html 
b/devapidocs/org/apache/hadoop/hbase/client/Table.html
index a76bcee..201b071 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Table.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Table.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":38,"i25":6,"i26":6,"i27":6,"i28":6,"i29":38,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":38,"i38":38,"i39":38,"i40":38,"i41":38};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":38,"i26":6,"i27":6,"i28":6,"i29":38,"i30":38,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":38,"i39":38,"i40":38,"i41":38,"i42":38};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -327,24 +327,30 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 
+TableDescriptor
+getDescriptor()
+Gets the table 
descriptor for this table.
+
+
+
 TableName
 getName()
 Gets the fully qualified table name instance of this 
table.
 
 
-
+
 int
 getOperationTimeout()
 Get timeout (millisecond) of each operation for in Table 
instance.
 
 
-
+
 int
 getReadRpcTimeout()
 Get timeout (millisecond) of each rpc read request in this 
Table instance.
 
 
-
+
 int
 getRpcTimeout()
 Deprecated. 
@@ -352,33 +358,36 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 
-
+
 ResultScanner
 getScanner(byte[] family)
 Gets a scanner on the current table for the given 
family.
 
 
-
+
 ResultScanner
 getScanner(byte[] family,
   byte[] qualifier)
 Gets a scanner on the current table for the given family 
and qualifier.
 
 
-
+
 ResultScanner
 getScanner(Scan scan)
 Returns a scanner on the current table as specified by the 
Scan
  object.
 
 
-
+
 HTableDescriptor
 getTableDescriptor()
-Gets the table descriptor for 
this table.
+Deprecated. 
+since 2.0 version and will 
be removed in 3.0 version.
+ use getDescriptor()
+
 
 
-
+
 long
 getWriteBufferSize()
 Deprecated. 
@@ -386,19 +395,19 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 
-
+
 int
 getWriteRpcTimeout()
 Get timeout (millisecond) of each rpc write request in this 
Table instance.
 
 
-
+
 Result
 increment(Increment increment)
 Increments one or more columns within a single row.
 
 
-
+
 long
 incrementColumnValue(byte[] row,
 byte[] family,
@@ -407,7 +416,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 See incrementColumnValue(byte[],
 byte[], byte[], long, Durability)
 
 
-
+
 long
 incrementColumnValue(byte[] row,
 byte[] family,
@@ -417,25 +426,25 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 Atomically increments a column value.
 
 
-
+
 void
 mutateRow(RowMutations rm)
 Performs multiple mutations atomically on a single 
row.
 
 
-
+
 void
 put(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List puts)
 Puts some data in the table, in batch.
 
 
-
+
 void
 put(Put put)
 Puts some data in the table.
 
 
-
+
 void
 setOperationTimeout(int operationTimeout)
 Deprecated. 
@@ -443,7 +452,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 
-
+
 void
 setReadRpcTimeout(int readRpcTimeout)
 Deprecated. 
@@ -451,7 +460,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 
-
+
 void
 setRpcTimeout(int rpcTimeout)
 Deprecated. 
@@ -459,7 +468,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 
-
+
 void
 setWriteBufferSize(long writeBufferSize)
 Deprecated. 
@@ -468,7 +477,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 
-
+
 void
 setWriteRpcTimeout(int writeRpcTimeout)
 Deprecated. 
@@ -520,8 +529,11 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 getTableDescriptor
-HTableDescriptor getTableDescriptor()
- throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
+http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
+HTableDescriptor getTableDe

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b3b50f22/hbase-archetypes/hbase-shaded-client-project/source-repository.html
--
diff --git 
a/hbase-archetypes/hbase-shaded-client-project/source-repository.html 
b/hbase-archetypes/hbase-shaded-client-project/source-repository.html
index a96f5db..26d17b2 100644
--- a/hbase-archetypes/hbase-shaded-client-project/source-repository.html
+++ b/hbase-archetypes/hbase-shaded-client-project/source-repository.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd";>
-
+
 http://www.w3.org/1999/xhtml"; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-05
+Last Published: 2017-07-07
    | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Exemplar for 
hbase-shaded-client archetype

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b3b50f22/hbase-archetypes/hbase-shaded-client-project/team-list.html
--
diff --git a/hbase-archetypes/hbase-shaded-client-project/team-list.html 
b/hbase-archetypes/hbase-shaded-client-project/team-list.html
index 348a47e..d3bf4a3 100644
--- a/hbase-archetypes/hbase-shaded-client-project/team-list.html
+++ b/hbase-archetypes/hbase-shaded-client-project/team-list.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd";>
-
+
 http://www.w3.org/1999/xhtml"; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-05
+Last Published: 2017-07-07
    | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Exemplar for 
hbase-shaded-client archetype

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b3b50f22/hbase-archetypes/index.html
--
diff --git a/hbase-archetypes/index.html b/hbase-archetypes/index.html
index 48de553..5198dbb 100644
--- a/hbase-archetypes/index.html
+++ b/hbase-archetypes/index.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd";>
-
+
 http://www.w3.org/1999/xhtml"; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-05
+Last Published: 2017-07-07
    | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Archetypes

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b3b50f22/hbase-archetypes/integration.html
--
diff --git a/hbase-archetypes/integration.html 
b/hbase-archetypes/integration.html
index 8671c54..ae65c7f 100644
--- a/hbase-archetypes/integration.html
+++ b/hbase-archetypes/integration.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd";>
-
+
 http://www.w3.org/1999/xhtml"; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-05
+Last Published: 2017-07-07
    | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Archetypes

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b3b50f22/hbase-archetypes/issue-tracking.html
--
diff --git a/hbase-archetypes/issue-tracking.html 
b/hbase-archetypes/issue-tracking.html
index 44b1639..a2ffc39 100644
--- a/hbase-archetypes/issue-tracking.html
+++ b/hbase-archetypes/issue-tracking.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd";>
-
+
 http://www.w3.org/1999/xhtml"; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-07-05
+Last Published: 2017-07-07
    | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Archetypes

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b3b50f22/hbase-archetypes/license.html
--
diff --git a/hbase-archetype

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-05 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ca9f6925/devapidocs/org/apache/hadoop/hbase/regionserver/ImmutableSegment.Type.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/ImmutableSegment.Type.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/ImmutableSegment.Type.html
deleted file mode 100644
index d45d7c9..000
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/ImmutableSegment.Type.html
+++ /dev/null
@@ -1,347 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd";>
-
-
-
-
-
-ImmutableSegment.Type (Apache HBase 3.0.0-SNAPSHOT API)
-
-
-
-
-
-var methods = {"i0":9,"i1":9};
-var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],8:["t4","Concrete Methods"]};
-var altColor = "altColor";
-var rowColor = "rowColor";
-var tableTab = "tableTab";
-var activeTableTab = "activeTableTab";
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev Class
-Next Class
-
-
-Frames
-No Frames
-
-
-All Classes
-
-
-
-
-
-
-
-Summary: 
-Nested | 
-Enum Constants | 
-Field | 
-Method
-
-
-Detail: 
-Enum Constants | 
-Field | 
-Method
-
-
-
-
-
-
-
-
-org.apache.hadoop.hbase.regionserver
-Enum 
ImmutableSegment.Type
-
-
-
-http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">java.lang.Object
-
-
-http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">java.lang.Enum
-
-
-org.apache.hadoop.hbase.regionserver.ImmutableSegment.Type
-
-
-
-
-
-
-
-
-
-All Implemented Interfaces:
-http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable, http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable
-
-
-Enclosing class:
-ImmutableSegment
-
-
-
-public static enum ImmutableSegment.Type
-extends http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum
-Types of ImmutableSegment
-
-
-
-
-
-
-
-
-
-
-
-Enum Constant Summary
-
-Enum Constants 
-
-Enum Constant and Description
-
-
-ARRAY_MAP_BASED 
-
-
-SKIPLIST_MAP_BASED 
-
-
-
-
-
-
-
-
-
-Method Summary
-
-All Methods Static Methods Concrete Methods 
-
-Modifier and Type
-Method and Description
-
-
-static ImmutableSegment.Type
-valueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
-Returns the enum constant of this type with the specified 
name.
-
-
-
-static ImmutableSegment.Type[]
-values()
-Returns an array containing the constants of this enum 
type, in
-the order they are declared.
-
-
-
-
-
-
-
-Methods inherited from class java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum
-http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#clone--";
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#compareTo-E-";
 title="class or interface in java.lang">compareTo, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#equals-java.lang.Object-";
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#finalize--";
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#getDeclaringClass--";
 title="class or interface in java.lang">getDeclaringClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#hashCode--";
 title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/8/docs/
 api/java/lang/Enum.html?is-external=true#name--" title="class or interface in 
java.lang">name, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#ordinal--";
 title="class or interface in java.lang">ordinal, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#toString--";
 title="class or interface in java.lang">toString, http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true#valueOf-java.la

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8e3b63ca/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.ReadRequestCostFunction.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.ReadRequestCostFunction.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.ReadRequestCostFunction.html
index c895448..545d4da 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.ReadRequestCostFunction.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.ReadRequestCostFunction.html
@@ -1294,425 +1294,426 @@
 1286  }
 1287
 1288  // We normalize locality to be a 
score between 0 and 1.0 representing how good it
-1289  // is compared to how good it 
could be
-1290  locality /= bestLocality;
-1291}
-1292
-1293@Override
-1294protected void regionMoved(int 
region, int oldServer, int newServer) {
-1295  int oldEntity = type == 
LocalityType.SERVER ? oldServer : cluster.serverIndexToRackIndex[oldServer];
-1296  int newEntity = type == 
LocalityType.SERVER ? newServer : cluster.serverIndexToRackIndex[newServer];
-1297  if (this.services == null) {
-1298return;
-1299  }
-1300  double localityDelta = 
getWeightedLocality(region, newEntity) - getWeightedLocality(region, 
oldEntity);
-1301  double normalizedDelta = 
localityDelta / bestLocality;
-1302  locality += normalizedDelta;
-1303}
-1304
-1305@Override
-1306double cost() {
-1307  return 1 - locality;
-1308}
-1309
-1310private int 
getMostLocalEntityForRegion(int region) {
-1311  return 
cluster.getOrComputeRegionsToMostLocalEntities(type)[region];
-1312}
-1313
-1314private double 
getWeightedLocality(int region, int entity) {
-1315  return 
cluster.getOrComputeWeightedLocality(region, entity, type);
-1316}
-1317
-1318  }
-1319
-1320  static class 
ServerLocalityCostFunction extends LocalityBasedCostFunction {
-1321
-1322private static final String 
LOCALITY_COST_KEY = "hbase.master.balancer.stochastic.localityCost";
-1323private static final float 
DEFAULT_LOCALITY_COST = 25;
-1324
-1325
ServerLocalityCostFunction(Configuration conf, MasterServices srv) {
-1326  super(
-1327  conf,
-1328  srv,
-1329  LocalityType.SERVER,
-1330  LOCALITY_COST_KEY,
-1331  DEFAULT_LOCALITY_COST
-1332  );
-1333}
-1334
-1335@Override
-1336int regionIndexToEntityIndex(int 
region) {
-1337  return 
cluster.regionIndexToServerIndex[region];
-1338}
-1339  }
-1340
-1341  static class RackLocalityCostFunction 
extends LocalityBasedCostFunction {
-1342
-1343private static final String 
RACK_LOCALITY_COST_KEY = "hbase.master.balancer.stochastic.rackLocalityCost";
-1344private static final float 
DEFAULT_RACK_LOCALITY_COST = 15;
-1345
-1346public 
RackLocalityCostFunction(Configuration conf, MasterServices services) {
-1347  super(
-1348  conf,
-1349  services,
-1350  LocalityType.RACK,
-1351  RACK_LOCALITY_COST_KEY,
-1352  DEFAULT_RACK_LOCALITY_COST
-1353  );
-1354}
-1355
-1356@Override
-1357int regionIndexToEntityIndex(int 
region) {
-1358  return 
cluster.getRackForRegion(region);
-1359}
-1360  }
-1361
-1362  /**
-1363   * Base class the allows writing costs 
functions from rolling average of some
-1364   * number from RegionLoad.
-1365   */
-1366  abstract static class 
CostFromRegionLoadFunction extends CostFunction {
-1367
-1368private ClusterStatus clusterStatus 
= null;
-1369private Map> loads = null;
-1370private double[] stats = null;
-1371
CostFromRegionLoadFunction(Configuration conf) {
-1372  super(conf);
-1373}
-1374
-1375void setClusterStatus(ClusterStatus 
status) {
-1376  this.clusterStatus = status;
-1377}
-1378
-1379void setLoads(Map> l) {
-1380  this.loads = l;
-1381}
-1382
-1383@Override
-1384double cost() {
-1385  if (clusterStatus == null || loads 
== null) {
-1386return 0;
-1387  }
-1388
-1389  if (stats == null || stats.length 
!= cluster.numServers) {
-1390stats = new 
double[cluster.numServers];
-1391  }
-1392
-1393  for (int i =0; i < 
stats.length; i++) {
-1394//Cost this server has from 
RegionLoad
-1395long cost = 0;
-1396
-1397// for every region on this 
server get the rl
-1398for(int 
regionIndex:cluster.regionsPerServer[i]) {
-1399  
Collection regionLoadList =  
cluster.regionLoads[regionIndex];
-1400
-1401  // Now if we found a region 
load get the type of cost that was requested.
-1402  if (re

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-26 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aecb1286/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.MasterRpcCall.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.MasterRpcCall.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.MasterRpcCall.html
index 01496d6..dc12c09 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.MasterRpcCall.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.MasterRpcCall.html
@@ -48,2406 +48,2267 @@
 040
 041import io.netty.util.Timeout;
 042import io.netty.util.TimerTask;
-043import java.util.stream.Stream;
-044import org.apache.commons.io.IOUtils;
-045import org.apache.commons.logging.Log;
-046import 
org.apache.commons.logging.LogFactory;
-047import 
org.apache.hadoop.hbase.HRegionInfo;
-048import 
org.apache.hadoop.hbase.HRegionLocation;
-049import 
org.apache.hadoop.hbase.MetaTableAccessor;
-050import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-051import 
org.apache.hadoop.hbase.NotServingRegionException;
-052import 
org.apache.hadoop.hbase.ProcedureInfo;
-053import 
org.apache.hadoop.hbase.RegionLocations;
-054import 
org.apache.hadoop.hbase.ServerName;
-055import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-056import 
org.apache.hadoop.hbase.HConstants;
-057import 
org.apache.hadoop.hbase.TableExistsException;
-058import 
org.apache.hadoop.hbase.TableName;
-059import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-060import 
org.apache.hadoop.hbase.TableNotDisabledException;
-061import 
org.apache.hadoop.hbase.TableNotEnabledException;
-062import 
org.apache.hadoop.hbase.TableNotFoundException;
-063import 
org.apache.hadoop.hbase.UnknownRegionException;
-064import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-065import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-066import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-067import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-068import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-069import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-070import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-071import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-072import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-073import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-074import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-075import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-076import 
org.apache.hadoop.hbase.replication.ReplicationException;
-077import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-078import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-079import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-080import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.Mas

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/a719cd00/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.WriteRequestCostFunction.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.WriteRequestCostFunction.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.WriteRequestCostFunction.html
index 6de986f..c895448 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.WriteRequestCostFunction.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.WriteRequestCostFunction.html
@@ -26,1592 +26,1693 @@
 018package 
org.apache.hadoop.hbase.master.balancer;
 019
 020import java.util.ArrayDeque;
-021import java.util.Arrays;
-022import java.util.Collection;
-023import java.util.Deque;
-024import java.util.HashMap;
-025import java.util.LinkedList;
-026import java.util.List;
-027import java.util.Map;
-028import java.util.Map.Entry;
-029import java.util.Random;
-030
-031import org.apache.commons.logging.Log;
-032import 
org.apache.commons.logging.LogFactory;
-033import 
org.apache.hadoop.conf.Configuration;
-034import 
org.apache.hadoop.hbase.ClusterStatus;
-035import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-036import 
org.apache.hadoop.hbase.HConstants;
-037import 
org.apache.hadoop.hbase.HRegionInfo;
-038import 
org.apache.hadoop.hbase.RegionLoad;
-039import 
org.apache.hadoop.hbase.ServerLoad;
-040import 
org.apache.hadoop.hbase.ServerName;
-041import 
org.apache.hadoop.hbase.TableName;
-042import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-043import 
org.apache.hadoop.hbase.master.MasterServices;
-044import 
org.apache.hadoop.hbase.master.RegionPlan;
-045import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action;
-046import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.Type;
-047import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.AssignRegionAction;
-048import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.MoveRegionAction;
-049import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.SwapRegionsAction;
-050import 
org.apache.hadoop.hbase.util.Bytes;
-051import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-052
-053import com.google.common.collect.Lists;
-054
-055/**
-056 * 

This is a best effort load balancer. Given a Cost function F(C) => x It will -057 * randomly try and mutate the cluster to Cprime. If F(Cprime) < F(C) then the -058 * new cluster state becomes the plan. It includes costs functions to compute the cost of:

-059 *
    -060 *
  • Region Load
  • -061 *
  • Table Load
  • -062 *
  • Data Locality
  • -063 *
  • Memstore Sizes
  • -064 *
  • Storefile Sizes
  • -065 *
-066 * -067 * -068 *

Every cost function returns a number between 0 and 1 inclusive; where 0 is the lowest cost -069 * best solution, and 1 is the highest possible cost and the worst solution. The computed costs are -070 * scaled by their respective multipliers:

+021import java.util.ArrayList; +022import java.util.Arrays; +023import java.util.Collection; +024import java.util.Collections; +025import java.util.Deque; +026import java.util.HashMap; +027import java.util.LinkedList; +028import java.util.List; +029import java.util.Map; +030import java.util.Map.Entry; +031import java.util.Random; +032 +033import org.apache.commons.logging.Log; +034import org.apache.commons.logging.LogFactory; +035import org.apache.hadoop.conf.Configuration; +036import org.apache.hadoop.hbase.ClusterStatus; +037import org.apache.hadoop.hbase.HBaseInterfaceAudience; +038import org.apache.hadoop.hbase.HConstants; +039import org.apache.hadoop.hbase.HRegionInfo; +040import org.apache.hadoop.hbase.RegionLoad; +041import org.apache.hadoop.hbase.ServerLoad; +042import org.apache.hadoop.hbase.ServerName; +043import org.apache.hadoop.hbase.TableName; +044import org.apache.hadoop.hbase.classification.InterfaceAudience; +045import org.apache.hadoop.hbase.master.MasterServices; +046import org.apache.hadoop.hbase.master.RegionPlan; +047import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action; +048import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.Type; +049import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.AssignRegionAction; +050import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.LocalityType; +051import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.MoveRegionAction; +052import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.SwapRegionsAction; +053import org.apache.hadoop.hbase.util.Bytes; +054import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; +055 +056import com.google.

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-10 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/476c54ed/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipperThread.WorkerState.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipperThread.WorkerState.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipperThread.WorkerState.html
new file mode 100644
index 000..20bb545
--- /dev/null
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipperThread.WorkerState.html
@@ -0,0 +1,431 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+Source code
+
+
+
+
+001/*
+002 *
+003 * Licensed to the Apache Software 
Foundation (ASF) under one
+004 * or more contributor license 
agreements.  See the NOTICE file
+005 * distributed with this work for 
additional information
+006 * regarding copyright ownership.  The 
ASF licenses this file
+007 * to you under the Apache License, 
Version 2.0 (the
+008 * "License"); you may not use this file 
except in compliance
+009 * with the License.  You may obtain a 
copy of the License at
+010 *
+011 * 
http://www.apache.org/licenses/LICENSE-2.0
+012 *
+013 * Unless required by applicable law or 
agreed to in writing, software
+014 * distributed under the License is 
distributed on an "AS IS" BASIS,
+015 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+016 * See the License for the specific 
language governing permissions and
+017 * limitations under the License.
+018 */
+019package 
org.apache.hadoop.hbase.replication.regionserver;
+020
+021import java.io.IOException;
+022import java.util.List;
+023import java.util.Map;
+024import 
java.util.concurrent.PriorityBlockingQueue;
+025import java.util.concurrent.TimeUnit;
+026
+027import org.apache.commons.logging.Log;
+028import 
org.apache.commons.logging.LogFactory;
+029import 
org.apache.hadoop.conf.Configuration;
+030import org.apache.hadoop.fs.Path;
+031import org.apache.hadoop.hbase.Cell;
+032import 
org.apache.hadoop.hbase.CellUtil;
+033import 
org.apache.hadoop.hbase.MetaTableAccessor;
+034import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+035import 
org.apache.hadoop.hbase.regionserver.wal.WALEdit;
+036import 
org.apache.hadoop.hbase.replication.ReplicationEndpoint;
+037import 
org.apache.hadoop.hbase.replication.regionserver.ReplicationSourceWALReaderThread.WALEntryBatch;
+038import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor;
+039import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor;
+040import 
org.apache.hadoop.hbase.util.Bytes;
+041import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+042import 
org.apache.hadoop.hbase.util.Threads;
+043import 
org.apache.hadoop.hbase.wal.WAL.Entry;
+044
+045import 
com.google.common.cache.CacheBuilder;
+046import 
com.google.common.cache.CacheLoader;
+047import 
com.google.common.cache.LoadingCache;
+048
+049/**
+050 * This thread reads entries from a queue 
and ships them. Entries are placed onto the queue by
+051 * ReplicationSourceWALReaderThread
+052 */
+053@InterfaceAudience.Private
+054public class 
ReplicationSourceShipperThread extends Thread {
+055  private static final Log LOG = 
LogFactory.getLog(ReplicationSourceShipperThread.class);
+056
+057  // Hold the state of a replication 
worker thread
+058  public enum WorkerState {
+059RUNNING,
+060STOPPED,
+061FINISHED,  // The worker is done 
processing a recovered queue
+062  }
+063
+064  protected final Configuration conf;
+065  protected final String walGroupId;
+066  protected final 
PriorityBlockingQueue queue;
+067  protected final 
ReplicationSourceInterface source;
+068
+069  // Last position in the log that we 
sent to ZooKeeper
+070  protected long lastLoggedPosition = 
-1;
+071  // Path of the current log
+072  protected volatile Path currentPath;
+073  // Current state of the worker thread
+074  private WorkerState state;
+075  protected 
ReplicationSourceWALReaderThread entryReader;
+076
+077  // How long should we sleep for each 
retry
+078  protected final long sleepForRetries;
+079  // Maximum number of retries before 
taking bold actions
+080  protected final int 
maxRetriesMultiplier;
+081
+082  // Use guava cache to set ttl for each 
key
+083  private final LoadingCache canSkipWaitingSet = CacheBuilder.newBuilder()
+084  .expireAfterAccess(1, 
TimeUnit.DAYS).build(
+085  new CacheLoader() {
+086@Override
+087public Boolean load(String key) 
throws Exception {
+088  return false;
+089}
+090  }
+091  );
+092
+093  public 
ReplicationSourceShipperThread(Configuration conf, String walGroupId,
+094  PriorityBlockingQueue 
queue, ReplicationSourceInterface source) {
+095this.conf 

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/77a552c4/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html 
b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
index 37b26c9..25f9006 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class MasterRpcServices
+public class MasterRpcServices
 extends RSRpcServices
 implements 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.BlockingInterface,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService.BlockingInterface,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockService.BlockingInterface
 Implements the master RPC services.
@@ -741,7 +741,7 @@ implements 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -750,7 +750,7 @@ implements 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 
 
 master
-private final HMaster master
+private final HMaster master
 
 
 
@@ -767,7 +767,7 @@ implements 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 
 
 MasterRpcServices
-public MasterRpcServices(HMaster m)
+public MasterRpcServices(HMaster m)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 Throws:
@@ -789,7 +789,7 @@ implements 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 
 
 createConfigurationSubset
-private org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builder createConfigurationSubset()
+private org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builder createConfigurationSubset()
 
 Returns:
 Subset of configuration to pass initializing regionservers: e.g.
@@ -803,7 +803,7 @@ implements 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 
 
 addConfig
-private org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builder addConfig(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builder resp,
+private org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builder addConfig(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builder resp,

  http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String key)
 
 
@@ -813,7 +813,7 @@ implements 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 
 
 createPriority
-protected PriorityFunction createPriority()
+protected PriorityFunction createPriority()
 
 Overrides:
 createPriority in
 class RSRpcServices
@@ -826,7 +826,7 @@ implements 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 
 
 switchBalancer
-boolean switchBalancer(boolean b,
+boolean switchBalancer(boolean b,
MasterRpcServices.BalanceSwitchMode mode)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Assigns balancer switch according to BalanceSwitchMode
@@ -847,7 +847,7 @@ implements 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 
 
 synchronousBalanceSwitch
-boolean synchronousBalanceSwitch(boolean b)
+boolean synchronousBalanceSwitch(boolean b)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 Throws:
@@ -861,7 +861,7 @@ implements 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 
 
 normalizerSwitch
-public boolean normalizerSwitch(boolean on)
+public boolean normalizerSwitch(boolean on)
 Sets normalizer on/off flag in ZK.
 
 
@@ -871,7 +871,7 @@ implements 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 
 
 getServices
-protected http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List getServices()
+protected http://docs.oracle.com/javase/8/docs/api/java/u

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b44796ef/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
index 0c0987a..cb93985 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html
@@ -4,7 +4,7 @@
 
 
 
-AsyncRequestFutureImpl (Apache HBase 2.0.0-SNAPSHOT API)
+AsyncRequestFutureImpl (Apache HBase 3.0.0-SNAPSHOT API)
 
 
 
@@ -12,7 +12,7 @@
 
 
-AsyncRpcRetryingCaller (Apache HBase 2.0.0-SNAPSHOT API)
+AsyncRpcRetryingCaller (Apache HBase 3.0.0-SNAPSHOT API)