[09/14] hbase-site git commit: Published site at d957f0fa1926c13355c8cca01bbfd7133866e05d.

2019-01-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4a007343/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.CoprocessorServiceBuilderImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.CoprocessorServiceBuilderImpl.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.CoprocessorServiceBuilderImpl.html
index 3d7d280..82373f4 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.CoprocessorServiceBuilderImpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.CoprocessorServiceBuilderImpl.html
@@ -269,462 +269,467 @@
 261
 262  @Override
 263  public CompletableFuture 
get(Get get) {
-264CompletableFuture 
primaryFuture =
-265  get(get, 
RegionReplicaUtil.DEFAULT_REPLICA_ID, readRpcTimeoutNs);
-266if (get.getConsistency() == 
Consistency.STRONG) {
-267  return primaryFuture;
-268}
-269// Timeline consistent read, where we 
will send requests to other region replicas
-270CompletableFuture 
future = new CompletableFuture<>();
-271connect(primaryFuture, future);
-272long primaryCallTimeoutNs = 
conn.connConf.getPrimaryCallTimeoutNs();
-273long startNs = System.nanoTime();
-274
addListener(conn.getLocator().getRegionLocations(tableName, get.getRow(),
-275  RegionLocateType.CURRENT, false, 
readRpcTimeoutNs), (locs, error) -> {
-276if (error != null) {
-277  LOG.warn(
-278"Failed to locate all the 
replicas for table={}, row='{}'," +
-279  " give up timeline 
consistent read",
-280tableName, 
Bytes.toStringBinary(get.getRow()), error);
-281  return;
-282}
-283if (locs.size() <= 1) {
-284  LOG.warn(
-285"There are no secondary 
replicas for region {}," + " give up timeline consistent read",
-286
locs.getDefaultRegionLocation().getRegion());
-287  return;
-288}
-289long delayNs = 
primaryCallTimeoutNs - (System.nanoTime() - startNs);
-290if (delayNs <= 0) {
-291  timelineConsistentGet(get, 
locs, future);
-292} else {
-293  
AsyncConnectionImpl.RETRY_TIMER.newTimeout(
-294timeout -> 
timelineConsistentGet(get, locs, future), delayNs, TimeUnit.NANOSECONDS);
-295}
-296  });
-297return future;
-298  }
-299
-300  @Override
-301  public CompletableFuture 
put(Put put) {
-302return this. 
newCaller(put, writeRpcTimeoutNs)
-303  .action((controller, loc, stub) 
-> RawAsyncTableImpl. voidMutate(controller, loc, stub,
-304put, 
RequestConverter::buildMutateRequest))
-305  .call();
-306  }
-307
-308  @Override
-309  public CompletableFuture 
delete(Delete delete) {
-310return this. 
newCaller(delete, writeRpcTimeoutNs)
-311  .action((controller, loc, stub) 
-> RawAsyncTableImpl. voidMutate(controller, loc,
-312stub, delete, 
RequestConverter::buildMutateRequest))
-313  .call();
-314  }
-315
-316  @Override
-317  public CompletableFuture 
append(Append append) {
-318checkHasFamilies(append);
-319return this. 
newCaller(append, rpcTimeoutNs)
-320  .action((controller, loc, stub) 
-> this. noncedMutate(controller, loc, stub,
-321append, 
RequestConverter::buildMutateRequest, RawAsyncTableImpl::toResult))
-322  .call();
-323  }
-324
-325  @Override
-326  public CompletableFuture 
increment(Increment increment) {
-327checkHasFamilies(increment);
-328return this. 
newCaller(increment, rpcTimeoutNs)
-329  .action((controller, loc, stub) 
-> this. noncedMutate(controller, loc,
-330stub, increment, 
RequestConverter::buildMutateRequest, RawAsyncTableImpl::toResult))
-331  .call();
-332  }
-333
-334  private final class 
CheckAndMutateBuilderImpl implements CheckAndMutateBuilder {
-335
-336private final byte[] row;
-337
-338private final byte[] family;
-339
-340private byte[] qualifier;
-341
-342private TimeRange timeRange;
-343
-344private CompareOperator op;
-345
-346private byte[] value;
-347
-348public 
CheckAndMutateBuilderImpl(byte[] row, byte[] family) {
-349  this.row = 
Preconditions.checkNotNull(row, "row is null");
-350  this.family = 
Preconditions.checkNotNull(family, "family is null");
-351}
+264if (get.getConsistency() == 
Consistency.STRONG) {
+265  return get(get, 
RegionReplicaUtil.DEFAULT_REPLICA_ID, readRpcTimeoutNs);
+266}
+267// user specifies a replica id 
explicitly, just send request to the specific replica
+268if (get.getReplicaId() >= 0) {
+269  return get(get, get.getReplicaId(), 
readRpcTimeoutNs);
+270}
+271
+272// Timeline consistent read, where we 
may send requests to 

[09/14] hbase-site git commit: Published site at b2bf22e209d2e87121986b35c5749b2b8ae45fa2.

2018-12-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f19aea37/replication.html
--
diff --git a/replication.html b/replication.html
index c053f02..dbca3b0 100644
--- a/replication.html
+++ b/replication.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – 
   Apache HBase (TM) Replication
@@ -313,7 +313,7 @@ under the License. -->
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-12-21
+  Last Published: 
2018-12-22
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f19aea37/resources.html
--
diff --git a/resources.html b/resources.html
index b8e9cd5..e454e03 100644
--- a/resources.html
+++ b/resources.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Other Apache HBase (TM) Resources
 
@@ -341,7 +341,7 @@ under the License. -->
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-12-21
+  Last Published: 
2018-12-22
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f19aea37/source-repository.html
--
diff --git a/source-repository.html b/source-repository.html
index bb78aca..d63a1b6 100644
--- a/source-repository.html
+++ b/source-repository.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Source Code Management
 
@@ -309,7 +309,7 @@
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-12-21
+  Last Published: 
2018-12-22
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f19aea37/sponsors.html
--
diff --git a/sponsors.html b/sponsors.html
index 9a42dbc..3eb4f72 100644
--- a/sponsors.html
+++ b/sponsors.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Apache HBase™ Sponsors
 
@@ -343,7 +343,7 @@ under the License. -->
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-12-21
+  Last Published: 
2018-12-22
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f19aea37/supportingprojects.html
--
diff --git a/supportingprojects.html b/supportingprojects.html
index 0435d9c..289c025 100644
--- a/supportingprojects.html
+++ b/supportingprojects.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Supporting Projects
 
@@ -530,7 +530,7 @@ under the License. -->
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-12-21
+  Last Published: 
2018-12-22
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f19aea37/team-list.html
--
diff --git a/team-list.html b/team-list.html
index ed2d4e6..39c1f29 100644
--- a/team-list.html
+++ b/team-list.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Project Team
 
@@ -776,7 +776,7 @@
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-12-21
+  Last Published: 
2018-12-22
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f19aea37/testdevapidocs/index-all.html
--
diff --git a/testdevapidocs/index-all.html b/testdevapidocs/index-all.html
index 0c5a341..b8ca33a 100644
--- a/testdevapidocs/index-all.html
+++ b/testdevapidocs/index-all.html
@@ -48925,6 +48925,10 @@
  
 testColumnPrefixFilter()
 - Method in class org.apache.hadoop.hbase.filter.TestParseFilter
  
+testColumnPrefixFilterConcatWithOR()
 - Method in class org.apache.hadoop.hbase.filter.TestFilterListOnMini
+
+Test case for HBASE-21620
+
 testColumnPrefixFilterWithFilterList()
 - Method in class org.apache.hadoop.hbase.filter.TestColumnPrefixFilter
  
 TestColumnRangeFilter - Class in org.apache.hadoop.hbase.filter
@@ -52033,6 +52037,8 @@
 When we do a "MUST_PASS_ONE" (a 

[09/14] hbase-site git commit: Published site at 7cdb52519236966a7cb6dff7fbd0609c87545f75.

2018-10-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5150e577/devapidocs/src-html/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.HFileDeleteTask.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.HFileDeleteTask.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.HFileDeleteTask.html
index 31d4f2f..547fb48 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.HFileDeleteTask.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.HFileDeleteTask.html
@@ -40,471 +40,480 @@
 032import org.apache.hadoop.fs.Path;
 033import 
org.apache.hadoop.hbase.Stoppable;
 034import 
org.apache.hadoop.hbase.io.HFileLink;
-035import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-036import 
org.apache.hadoop.hbase.util.StealJobQueue;
-037import 
org.apache.yetus.audience.InterfaceAudience;
-038import org.slf4j.Logger;
-039import org.slf4j.LoggerFactory;
-040import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-041/**
-042 * This Chore, every time it runs, will 
clear the HFiles in the hfile archive
-043 * folder that are deletable for each 
HFile cleaner in the chain.
-044 */
-045@InterfaceAudience.Private
-046public class HFileCleaner extends 
CleanerChore {
-047
-048  public static final String 
MASTER_HFILE_CLEANER_PLUGINS = "hbase.master.hfilecleaner.plugins";
-049
-050  public HFileCleaner(final int period, 
final Stoppable stopper, Configuration conf, FileSystem fs,
-051  Path directory) {
-052this(period, stopper, conf, fs, 
directory, null);
-053  }
-054
-055  // Configuration key for large/small 
throttle point
-056  public final static String 
HFILE_DELETE_THROTTLE_THRESHOLD =
-057  
"hbase.regionserver.thread.hfilecleaner.throttle";
-058  public final static int 
DEFAULT_HFILE_DELETE_THROTTLE_THRESHOLD = 64 * 1024 * 1024;// 64M
-059
-060  // Configuration key for large queue 
initial size
-061  public final static String 
LARGE_HFILE_QUEUE_INIT_SIZE =
-062  
"hbase.regionserver.hfilecleaner.large.queue.size";
-063  public final static int 
DEFAULT_LARGE_HFILE_QUEUE_INIT_SIZE = 10240;
-064
-065  // Configuration key for small queue 
initial size
-066  public final static String 
SMALL_HFILE_QUEUE_INIT_SIZE =
-067  
"hbase.regionserver.hfilecleaner.small.queue.size";
-068  public final static int 
DEFAULT_SMALL_HFILE_QUEUE_INIT_SIZE = 10240;
-069
-070  // Configuration key for large file 
delete thread number
-071  public final static String 
LARGE_HFILE_DELETE_THREAD_NUMBER =
-072  
"hbase.regionserver.hfilecleaner.large.thread.count";
-073  public final static int 
DEFAULT_LARGE_HFILE_DELETE_THREAD_NUMBER = 1;
-074
-075  // Configuration key for small file 
delete thread number
-076  public final static String 
SMALL_HFILE_DELETE_THREAD_NUMBER =
-077  
"hbase.regionserver.hfilecleaner.small.thread.count";
-078  public final static int 
DEFAULT_SMALL_HFILE_DELETE_THREAD_NUMBER = 1;
-079
-080  public static final String 
HFILE_DELETE_THREAD_TIMEOUT_MSEC =
-081  
"hbase.regionserver.hfilecleaner.thread.timeout.msec";
-082  @VisibleForTesting
-083  static final long 
DEFAULT_HFILE_DELETE_THREAD_TIMEOUT_MSEC = 60 * 1000L;
-084
-085  public static final String 
HFILE_DELETE_THREAD_CHECK_INTERVAL_MSEC =
-086  
"hbase.regionserver.hfilecleaner.thread.check.interval.msec";
-087  @VisibleForTesting
-088  static final long 
DEFAULT_HFILE_DELETE_THREAD_CHECK_INTERVAL_MSEC = 1000L;
-089
-090  private static final Logger LOG = 
LoggerFactory.getLogger(HFileCleaner.class);
-091
-092  StealJobQueue 
largeFileQueue;
-093  BlockingQueue 
smallFileQueue;
-094  private int throttlePoint;
-095  private int largeQueueInitSize;
-096  private int smallQueueInitSize;
-097  private int 
largeFileDeleteThreadNumber;
-098  private int 
smallFileDeleteThreadNumber;
-099  private long 
cleanerThreadTimeoutMsec;
-100  private long 
cleanerThreadCheckIntervalMsec;
-101  private List threads = 
new ArrayList();
-102  private boolean running;
-103
-104  private AtomicLong deletedLargeFiles = 
new AtomicLong();
-105  private AtomicLong deletedSmallFiles = 
new AtomicLong();
+035import 
org.apache.hadoop.hbase.master.HMaster;
+036import 
org.apache.hadoop.hbase.master.MasterServices;
+037import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
+038import 
org.apache.hadoop.hbase.util.StealJobQueue;
+039import 
org.apache.yetus.audience.InterfaceAudience;
+040import org.slf4j.Logger;
+041import org.slf4j.LoggerFactory;
+042import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
+043/**
+044 * This Chore, every time it runs, will 
clear the HFiles in the hfile archive
+045 * folder that are deletable for each 
HFile cleaner in the chain.
+046 */
+047@InterfaceAudience.

[09/14] hbase-site git commit: Published site at .

2018-02-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/fb491705/testapidocs/src-html/org/apache/hadoop/hbase/HBaseTestingUtility.html
--
diff --git 
a/testapidocs/src-html/org/apache/hadoop/hbase/HBaseTestingUtility.html 
b/testapidocs/src-html/org/apache/hadoop/hbase/HBaseTestingUtility.html
index 5a1361f..8a84538 100644
--- a/testapidocs/src-html/org/apache/hadoop/hbase/HBaseTestingUtility.html
+++ b/testapidocs/src-html/org/apache/hadoop/hbase/HBaseTestingUtility.html
@@ -354,3779 +354,3782 @@
 346  }
 347}
 348
-349// Every cluster is a local cluster 
until we start DFS
-350// Note that conf could be null, but 
this.conf will not be
-351String dataTestDir = 
getDataTestDir().toString();
-352
this.conf.set("fs.defaultFS","file:///");
-353this.conf.set(HConstants.HBASE_DIR, 
"file://" + dataTestDir);
-354LOG.debug("Setting {} to {}", 
HConstants.HBASE_DIR, dataTestDir);
-355
this.conf.setBoolean(CommonFSUtils.UNSAFE_STREAM_CAPABILITY_ENFORCE,false);
-356  }
-357
-358  /**
-359   * @deprecated use {@link 
HBaseTestingUtility#HBaseTestingUtility()} instead
-360   * @return a normal 
HBaseTestingUtility
-361   */
-362  @Deprecated
-363  public static HBaseTestingUtility 
createLocalHTU() {
-364return new HBaseTestingUtility();
-365  }
-366
-367  /**
-368   * @deprecated use {@link 
HBaseTestingUtility#HBaseTestingUtility(Configuration)} instead
-369   * @return a normal 
HBaseTestingUtility
-370   */
-371  @Deprecated
-372  public static HBaseTestingUtility 
createLocalHTU(Configuration c) {
-373return new HBaseTestingUtility(c);
-374  }
-375
-376  /**
-377   * Close both the region {@code r} and 
it's underlying WAL. For use in tests.
-378   */
-379  public static void 
closeRegionAndWAL(final Region r) throws IOException {
-380closeRegionAndWAL((HRegion)r);
-381  }
-382
-383  /**
-384   * Close both the HRegion {@code r} and 
it's underlying WAL. For use in tests.
-385   */
-386  public static void 
closeRegionAndWAL(final HRegion r) throws IOException {
-387if (r == null) return;
-388r.close();
-389if (r.getWAL() == null) return;
-390r.getWAL().close();
-391  }
-392
-393  /**
-394   * Returns this classes's instance of 
{@link Configuration}.  Be careful how
-395   * you use the returned Configuration 
since {@link Connection} instances
-396   * can be shared.  The Map of 
Connections is keyed by the Configuration.  If
-397   * say, a Connection was being used 
against a cluster that had been shutdown,
-398   * see {@link #shutdownMiniCluster()}, 
then the Connection will no longer
-399   * be wholesome.  Rather than use the 
return direct, its usually best to
-400   * make a copy and use that.  Do
-401   * Configuration c = new 
Configuration(INSTANCE.getConfiguration());
-402   * @return Instance of Configuration.
-403   */
-404  @Override
-405  public Configuration getConfiguration() 
{
-406return super.getConfiguration();
-407  }
-408
-409  public void 
setHBaseCluster(HBaseCluster hbaseCluster) {
-410this.hbaseCluster = hbaseCluster;
-411  }
-412
-413  /**
-414   * Home our data in a dir under {@link 
#DEFAULT_BASE_TEST_DIRECTORY}.
-415   * Give it a random name so can have 
many concurrent tests running if
-416   * we need to.  It needs to amend the 
{@link #TEST_DIRECTORY_KEY}
-417   * System property, as it's what 
minidfscluster bases
-418   * it data dir on.  Moding a System 
property is not the way to do concurrent
-419   * instances -- another instance could 
grab the temporary
-420   * value unintentionally -- but not 
anything can do about it at moment;
-421   * single instance only is how the 
minidfscluster works.
-422   *
-423   * We also create the underlying 
directory for
-424   *  hadoop.log.dir, 
mapreduce.cluster.local.dir and hadoop.tmp.dir, and set the values
-425   *  in the conf, and as a system 
property for hadoop.tmp.dir
-426   *
-427   * @return The calculated data test 
build directory, if newly-created.
-428   */
-429  @Override
-430  protected Path setupDataTestDir() {
-431Path testPath = 
super.setupDataTestDir();
-432if (null == testPath) {
-433  return null;
-434}
-435
-436createSubDirAndSystemProperty(
-437  "hadoop.log.dir",
-438  testPath, "hadoop-log-dir");
-439
-440// This is defaulted in 
core-default.xml to /tmp/hadoop-${user.name}, but
-441//  we want our own value to ensure 
uniqueness on the same machine
-442createSubDirAndSystemProperty(
-443  "hadoop.tmp.dir",
-444  testPath, "hadoop-tmp-dir");
-445
-446// Read and modified in 
org.apache.hadoop.mapred.MiniMRCluster
-447createSubDir(
-448  "mapreduce.cluster.local.dir",
-449  testPath, "mapred-local-dir");
-450
-451return testPath;
-452  }
+349// Save this for when setting default 
file:// breaks things
+350this.conf.set("original.defaultFS", 
this.conf.get("fs.defaultFS"));
+351
+

[09/14] hbase-site git commit: Published site at .

2017-12-04 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/29385b7e/testdevapidocs/index-all.html
--
diff --git a/testdevapidocs/index-all.html b/testdevapidocs/index-all.html
index e1028db..e701665 100644
--- a/testdevapidocs/index-all.html
+++ b/testdevapidocs/index-all.html
@@ -14238,6 +14238,8 @@
  
 getOrderingFilter()
 - Method in class org.apache.hadoop.hbase.filter.TestFilterList
  
+getOtherRegionServer(HRegionServer)
 - Method in class org.apache.hadoop.hbase.HBaseTestingUtility
+ 
 getOtherRegionServer(MiniHBaseCluster,
 HRegionServer) - Method in class 
org.apache.hadoop.hbase.regionserver.TestSplitTransactionOnCluster
 
 Find regionserver other than the one passed.
@@ -20225,6 +20227,8 @@
  
 LOG
 - Static variable in class org.apache.hadoop.hbase.regionserver.TestCompactionFileNotFound
  
+LOG
 - Static variable in class org.apache.hadoop.hbase.regionserver.TestCompactionInDeadRegionServer
+ 
 LOG
 - Static variable in class org.apache.hadoop.hbase.regionserver.TestCompactionPolicy
  
 LOG
 - Static variable in class org.apache.hadoop.hbase.regionserver.TestCompactionState

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/29385b7e/testdevapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.PortAllocator.AvailablePortChecker.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.PortAllocator.AvailablePortChecker.html
 
b/testdevapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.PortAllocator.AvailablePortChecker.html
index a36e149..b5a3c98 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.PortAllocator.AvailablePortChecker.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.PortAllocator.AvailablePortChecker.html
@@ -105,7 +105,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static interface HBaseTestingUtility.PortAllocator.AvailablePortChecker
+static interface HBaseTestingUtility.PortAllocator.AvailablePortChecker
 
 
 
@@ -149,7 +149,7 @@ var activeTableTab = "activeTableTab";
 
 
 available
-boolean available(int port)
+boolean available(int port)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/29385b7e/testdevapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.PortAllocator.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.PortAllocator.html 
b/testdevapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.PortAllocator.html
index 26eefa0..e7d0389 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.PortAllocator.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/HBaseTestingUtility.PortAllocator.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static class HBaseTestingUtility.PortAllocator
+static class HBaseTestingUtility.PortAllocator
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 
 
@@ -250,7 +250,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 MIN_RANDOM_PORT
-private static final int MIN_RANDOM_PORT
+private static final int MIN_RANDOM_PORT
 
 See Also:
 Constant
 Field Values
@@ -263,7 +263,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 MAX_RANDOM_PORT
-private static final int MAX_RANDOM_PORT
+private static final int MAX_RANDOM_PORT
 
 See Also:
 Constant
 Field Values
@@ -276,7 +276,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 takenRandomPorts
-private final http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true";
 title="class or interface in java.util">SetInteger> takenRandomPorts
+private final http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true";
 title="class or interface in java.util">SetInteger> takenRandomPorts
 A set of ports that have been claimed using randomFreePort().
 
 
@@ -286,7 +286,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 random
-private final http://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true";
 title="class or interface in java.util">Random random
+private final http://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true";
 title="class or interface in java.util">Random random
 
 
 
@@ -295,7 +295,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 portChecker
-private final HBaseTestingUtility.PortAllocator.AvailablePortChecker
 portChecker
+private final HBaseTestingUtility.PortAllocator.AvailableP

[09/14] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-02 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f63def63/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html
index 23fce63..36f2731 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html
@@ -34,9 +34,9 @@
 026@org.jamon.annotations.Argument(name 
= "regionServer", type = "HRegionServer")},
 027  optionalArguments = {
 028@org.jamon.annotations.Argument(name 
= "bcv", type = "String"),
-029@org.jamon.annotations.Argument(name 
= "filter", type = "String"),
-030@org.jamon.annotations.Argument(name 
= "bcn", type = "String"),
-031@org.jamon.annotations.Argument(name 
= "format", type = "String")})
+029@org.jamon.annotations.Argument(name 
= "bcn", type = "String"),
+030@org.jamon.annotations.Argument(name 
= "format", type = "String"),
+031@org.jamon.annotations.Argument(name 
= "filter", type = "String")})
 032public class RSStatusTmpl
 033  extends 
org.jamon.AbstractTemplateProxy
 034{
@@ -94,57 +94,57 @@
 086  return m_bcv__IsNotDefault;
 087}
 088private boolean 
m_bcv__IsNotDefault;
-089// 21, 1
-090public void setFilter(String 
filter)
+089// 23, 1
+090public void setBcn(String bcn)
 091{
-092  // 21, 1
-093  m_filter = filter;
-094  m_filter__IsNotDefault = true;
+092  // 23, 1
+093  m_bcn = bcn;
+094  m_bcn__IsNotDefault = true;
 095}
-096public String getFilter()
+096public String getBcn()
 097{
-098  return m_filter;
+098  return m_bcn;
 099}
-100private String m_filter;
-101public boolean 
getFilter__IsNotDefault()
+100private String m_bcn;
+101public boolean 
getBcn__IsNotDefault()
 102{
-103  return m_filter__IsNotDefault;
+103  return m_bcn__IsNotDefault;
 104}
-105private boolean 
m_filter__IsNotDefault;
-106// 23, 1
-107public void setBcn(String bcn)
+105private boolean 
m_bcn__IsNotDefault;
+106// 22, 1
+107public void setFormat(String 
format)
 108{
-109  // 23, 1
-110  m_bcn = bcn;
-111  m_bcn__IsNotDefault = true;
+109  // 22, 1
+110  m_format = format;
+111  m_format__IsNotDefault = true;
 112}
-113public String getBcn()
+113public String getFormat()
 114{
-115  return m_bcn;
+115  return m_format;
 116}
-117private String m_bcn;
-118public boolean 
getBcn__IsNotDefault()
+117private String m_format;
+118public boolean 
getFormat__IsNotDefault()
 119{
-120  return m_bcn__IsNotDefault;
+120  return m_format__IsNotDefault;
 121}
-122private boolean 
m_bcn__IsNotDefault;
-123// 22, 1
-124public void setFormat(String 
format)
+122private boolean 
m_format__IsNotDefault;
+123// 21, 1
+124public void setFilter(String 
filter)
 125{
-126  // 22, 1
-127  m_format = format;
-128  m_format__IsNotDefault = true;
+126  // 21, 1
+127  m_filter = filter;
+128  m_filter__IsNotDefault = true;
 129}
-130public String getFormat()
+130public String getFilter()
 131{
-132  return m_format;
+132  return m_filter;
 133}
-134private String m_format;
-135public boolean 
getFormat__IsNotDefault()
+134private String m_filter;
+135public boolean 
getFilter__IsNotDefault()
 136{
-137  return m_format__IsNotDefault;
+137  return m_filter__IsNotDefault;
 138}
-139private boolean 
m_format__IsNotDefault;
+139private boolean 
m_filter__IsNotDefault;
 140  }
 141  @Override
 142  protected 
org.jamon.AbstractTemplateProxy.ImplData makeImplData()
@@ -163,24 +163,24 @@
 155return this;
 156  }
 157  
-158  protected String filter;
-159  public final 
org.apache.hadoop.hbase.tmpl.regionserver.RSStatusTmpl setFilter(String 
p_filter)
+158  protected String bcn;
+159  public final 
org.apache.hadoop.hbase.tmpl.regionserver.RSStatusTmpl setBcn(String p_bcn)
 160  {
-161
(getImplData()).setFilter(p_filter);
+161(getImplData()).setBcn(p_bcn);
 162return this;
 163  }
 164  
-165  protected String bcn;
-166  public final 
org.apache.hadoop.hbase.tmpl.regionserver.RSStatusTmpl setBcn(String p_bcn)
+165  protected String format;
+166  public final 
org.apache.hadoop.hbase.tmpl.regionserver.RSStatusTmpl setFormat(String 
p_format)
 167  {
-168(getImplData()).setBcn(p_bcn);
+168
(getImplData()).setFormat(p_format);
 169return this;
 170  }
 171  
-172  protected String format;
-173  public final 
org.apache.hadoop.hbase.tmpl.regionserver.RSStatusTmpl setFormat(String 
p_format)
+172  pr