[03/30] hbase-site git commit: Published site at 931156f66b1decc19d89f8bb3ce9e5f355fb4fb2.

2018-10-22 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6b8b907f/testdevapidocs/src-html/org/apache/hadoop/hbase/HBaseTestingUtility.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/HBaseTestingUtility.html 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/HBaseTestingUtility.html
index 8d98ec3..f2d5735 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/HBaseTestingUtility.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/HBaseTestingUtility.html
@@ -179,4148 +179,4147 @@
 171 * avoiding port contention if another 
local HBase instance is already running).
 172 * pTo preserve test data 
directories, pass the system property "hbase.testing.preserve.testdir"
 173 * setting it to true.
-174 * Trigger pre commit.
-175 */
-176@InterfaceAudience.Public
-177@SuppressWarnings("deprecation")
-178public class HBaseTestingUtility extends 
HBaseZKTestingUtility {
-179
-180  /**
-181   * System property key to get test 
directory value. Name is as it is because mini dfs has
-182   * hard-codings to put test data here. 
It should NOT be used directly in HBase, as it's a property
-183   * used in mini dfs.
-184   * @deprecated can be used only with 
mini dfs
-185   */
-186  @Deprecated
-187  private static final String 
TEST_DIRECTORY_KEY = "test.build.data";
-188
-189  public static final String 
REGIONS_PER_SERVER_KEY = "hbase.test.regions-per-server";
-190  /**
-191   * The default number of regions per 
regionserver when creating a pre-split
-192   * table.
-193   */
-194  public static final int 
DEFAULT_REGIONS_PER_SERVER = 3;
+174 */
+175@InterfaceAudience.Public
+176@SuppressWarnings("deprecation")
+177public class HBaseTestingUtility extends 
HBaseZKTestingUtility {
+178
+179  /**
+180   * System property key to get test 
directory value. Name is as it is because mini dfs has
+181   * hard-codings to put test data here. 
It should NOT be used directly in HBase, as it's a property
+182   * used in mini dfs.
+183   * @deprecated can be used only with 
mini dfs
+184   */
+185  @Deprecated
+186  private static final String 
TEST_DIRECTORY_KEY = "test.build.data";
+187
+188  public static final String 
REGIONS_PER_SERVER_KEY = "hbase.test.regions-per-server";
+189  /**
+190   * The default number of regions per 
regionserver when creating a pre-split
+191   * table.
+192   */
+193  public static final int 
DEFAULT_REGIONS_PER_SERVER = 3;
+194
 195
-196
-197  public static final String 
PRESPLIT_TEST_TABLE_KEY = "hbase.test.pre-split-table";
-198  public static final boolean 
PRESPLIT_TEST_TABLE = true;
-199
-200  private MiniDFSCluster dfsCluster = 
null;
-201
-202  private volatile HBaseCluster 
hbaseCluster = null;
-203  private MiniMRCluster mrCluster = 
null;
-204
-205  /** If there is a mini cluster running 
for this testing utility instance. */
-206  private volatile boolean 
miniClusterRunning;
-207
-208  private String hadoopLogDir;
-209
-210  /** Directory on test filesystem where 
we put the data for this instance of
-211* HBaseTestingUtility*/
-212  private Path dataTestDirOnTestFS = 
null;
-213
-214  /**
-215   * Shared cluster connection.
-216   */
-217  private volatile Connection 
connection;
-218
-219  /** Filesystem URI used for map-reduce 
mini-cluster setup */
-220  private static String FS_URI;
-221
-222  /** This is for unit tests 
parameterized with a single boolean. */
-223  public static final 
ListObject[] MEMSTORETS_TAGS_PARAMETRIZED = 
memStoreTSAndTagsCombination();
-224
-225  /**
-226   * Checks to see if a specific port is 
available.
-227   *
-228   * @param port the port number to check 
for availability
-229   * @return tttrue/tt if 
the port is available, or ttfalse/tt if not
-230   */
-231  public static boolean available(int 
port) {
-232ServerSocket ss = null;
-233DatagramSocket ds = null;
-234try {
-235  ss = new ServerSocket(port);
-236  ss.setReuseAddress(true);
-237  ds = new DatagramSocket(port);
-238  ds.setReuseAddress(true);
-239  return true;
-240} catch (IOException e) {
-241  // Do nothing
-242} finally {
-243  if (ds != null) {
-244ds.close();
-245  }
-246
-247  if (ss != null) {
-248try {
-249  ss.close();
-250} catch (IOException e) {
-251  /* should not be thrown */
-252}
-253  }
-254}
-255
-256return false;
-257  }
-258
-259  /**
-260   * Create all combinations of Bloom 
filters and compression algorithms for
-261   * testing.
-262   */
-263  private static ListObject[] 
bloomAndCompressionCombinations() {
-264ListObject[] configurations = 
new ArrayList();
-265for (Compression.Algorithm comprAlgo 
:
-266 
HBaseCommonTestingUtility.COMPRESSION_ALGORITHMS) {
-267  for (BloomType bloomType : 
BloomType.values()) {
-268configurations.add(new Object[] { 
comprAlgo, bloomType });
-269  }
-270}
-271return 

[03/30] hbase-site git commit: Published site at a8e184dc77470bdf9d62e19c5d36bc1de7cf4c6d.

2018-07-31 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5fd895c6/devapidocs/src-html/org/apache/hadoop/hbase/security/User.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/security/User.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/User.html
index ec5ba00..997f52f 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/security/User.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/security/User.html
@@ -35,357 +35,404 @@
 027import java.util.HashMap;
 028import java.util.List;
 029import java.util.Map;
-030import 
java.util.concurrent.ExecutionException;
-031
-032import 
org.apache.hadoop.conf.Configuration;
-033import 
org.apache.hadoop.hbase.util.Methods;
-034import 
org.apache.hadoop.security.Groups;
-035import 
org.apache.hadoop.security.SecurityUtil;
-036import 
org.apache.hadoop.security.UserGroupInformation;
-037import 
org.apache.hadoop.security.token.Token;
-038import 
org.apache.hadoop.security.token.TokenIdentifier;
-039import 
org.apache.yetus.audience.InterfaceAudience;
-040
-041import 
org.apache.hbase.thirdparty.com.google.common.cache.LoadingCache;
+030import java.util.Optional;
+031import 
java.util.concurrent.ExecutionException;
+032
+033import 
org.apache.hadoop.conf.Configuration;
+034import 
org.apache.hadoop.hbase.AuthUtil;
+035import 
org.apache.hadoop.hbase.util.Methods;
+036import 
org.apache.hadoop.security.Groups;
+037import 
org.apache.hadoop.security.SecurityUtil;
+038import 
org.apache.hadoop.security.UserGroupInformation;
+039import 
org.apache.hadoop.security.token.Token;
+040import 
org.apache.hadoop.security.token.TokenIdentifier;
+041import 
org.apache.yetus.audience.InterfaceAudience;
 042
-043/**
-044 * Wrapper to abstract out usage of user 
and group information in HBase.
-045 *
-046 * p
-047 * This class provides a common interface 
for interacting with user and group
-048 * information across changing APIs in 
different versions of Hadoop.  It only
-049 * provides access to the common set of 
functionality in
-050 * {@link 
org.apache.hadoop.security.UserGroupInformation} currently needed by
-051 * HBase, but can be extended as needs 
change.
-052 * /p
-053 */
-054@InterfaceAudience.Public
-055public abstract class User {
-056  public static final String 
HBASE_SECURITY_CONF_KEY =
-057  "hbase.security.authentication";
-058  public static final String 
HBASE_SECURITY_AUTHORIZATION_CONF_KEY =
-059  "hbase.security.authorization";
-060
-061  protected UserGroupInformation ugi;
+043import 
org.apache.hbase.thirdparty.com.google.common.cache.LoadingCache;
+044
+045/**
+046 * Wrapper to abstract out usage of user 
and group information in HBase.
+047 *
+048 * p
+049 * This class provides a common interface 
for interacting with user and group
+050 * information across changing APIs in 
different versions of Hadoop.  It only
+051 * provides access to the common set of 
functionality in
+052 * {@link 
org.apache.hadoop.security.UserGroupInformation} currently needed by
+053 * HBase, but can be extended as needs 
change.
+054 * /p
+055 */
+056@InterfaceAudience.Public
+057public abstract class User {
+058  public static final String 
HBASE_SECURITY_CONF_KEY =
+059  "hbase.security.authentication";
+060  public static final String 
HBASE_SECURITY_AUTHORIZATION_CONF_KEY =
+061  "hbase.security.authorization";
 062
-063  public UserGroupInformation getUGI() 
{
-064return ugi;
-065  }
-066
-067  /**
-068   * Returns the full user name.  For 
Kerberos principals this will include
-069   * the host and realm portions of the 
principal name.
-070   *
-071   * @return User full name.
-072   */
-073  public String getName() {
-074return ugi.getUserName();
-075  }
-076
-077  /**
-078   * Returns the list of groups of which 
this user is a member.  On secure
-079   * Hadoop this returns the group 
information for the user as resolved on the
-080   * server.  For 0.20 based Hadoop, the 
group names are passed from the client.
-081   */
-082  public String[] getGroupNames() {
-083return ugi.getGroupNames();
-084  }
-085
-086  /**
-087   * Returns the shortened version of the 
user name -- the portion that maps
-088   * to an operating system user name.
-089   *
-090   * @return Short name
-091   */
-092  public abstract String 
getShortName();
-093
-094  /**
-095   * Executes the given action within the 
context of this user.
-096   */
-097  public abstract T T 
runAs(PrivilegedActionT action);
-098
-099  /**
-100   * Executes the given action within the 
context of this user.
-101   */
-102  public abstract T T 
runAs(PrivilegedExceptionActionT action)
-103  throws IOException, 
InterruptedException;
-104
-105  /**
-106   * Returns the Token of the specified 
kind associated with this user,
-107   * or null if the Token is not 
present.
-108   *
-109   * @param kind the kind of token
-110   * @param service service on which the 
token is supposed to 

[03/30] hbase-site git commit: Published site at 59867eeeebd28fcc49f338ef36769fb6a9bff4dc.

2018-07-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/67e3bccd/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.html
index 6cd6a17..85dd23b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.html
@@ -2119,6526 +2119,6532 @@
 2111  }
 2112
 2113  status = 
TaskMonitor.get().createStatus("Compacting " + store + " in " + this);
-2114  if (this.closed.get()) {
-2115String msg = "Skipping 
compaction on " + this + " because closed";
-2116LOG.debug(msg);
-2117status.abort(msg);
-2118return false;
-2119  }
-2120  boolean wasStateSet = false;
-2121  try {
-2122synchronized (writestate) {
-2123  if (writestate.writesEnabled) 
{
-2124wasStateSet = true;
-2125
writestate.compacting.incrementAndGet();
-2126  } else {
-2127String msg = "NOT compacting 
region " + this + ". Writes disabled.";
-2128LOG.info(msg);
-2129status.abort(msg);
-2130return false;
-2131  }
-2132}
-2133LOG.info("Starting compaction of 
{} in {}{}", store, this,
-2134
(compaction.getRequest().isOffPeak()?" as an off-peak compaction":""));
-2135doRegionCompactionPrep();
-2136try {
-2137  status.setStatus("Compacting 
store " + store);
-2138  // We no longer need to cancel 
the request on the way out of this
-2139  // method because 
Store#compact will clean up unconditionally
-2140  requestNeedsCancellation = 
false;
-2141  store.compact(compaction, 
throughputController, user);
-2142} catch (InterruptedIOException 
iioe) {
-2143  String msg = "compaction 
interrupted";
-2144  LOG.info(msg, iioe);
-2145  status.abort(msg);
-2146  return false;
-2147}
-2148  } finally {
-2149if (wasStateSet) {
-2150  synchronized (writestate) {
-2151
writestate.compacting.decrementAndGet();
-2152if 
(writestate.compacting.get() = 0) {
-2153  writestate.notifyAll();
-2154}
-2155  }
-2156}
-2157  }
-2158  status.markComplete("Compaction 
complete");
-2159  return true;
-2160} finally {
-2161  if (requestNeedsCancellation) 
store.cancelRequestedCompaction(compaction);
-2162  if (status != null) 
status.cleanup();
-2163}
-2164  }
-2165
-2166  /**
-2167   * Flush the cache.
-2168   *
-2169   * pWhen this method is called 
the cache will be flushed unless:
-2170   * ol
-2171   *   lithe cache is 
empty/li
-2172   *   lithe region is 
closed./li
-2173   *   lia flush is already in 
progress/li
-2174   *   liwrites are 
disabled/li
-2175   * /ol
-2176   *
-2177   * pThis method may block for 
some time, so it should not be called from a
-2178   * time-sensitive thread.
-2179   * @param force whether we want to 
force a flush of all stores
-2180   * @return FlushResult indicating 
whether the flush was successful or not and if
-2181   * the region needs compacting
-2182   *
-2183   * @throws IOException general io 
exceptions
-2184   * because a snapshot was not properly 
persisted.
-2185   */
-2186  // TODO HBASE-18905. We might have to 
expose a requestFlush API for CPs
-2187  public FlushResult flush(boolean 
force) throws IOException {
-2188return flushcache(force, false, 
FlushLifeCycleTracker.DUMMY);
-2189  }
-2190
-2191  public interface FlushResult {
-2192enum Result {
-2193  FLUSHED_NO_COMPACTION_NEEDED,
-2194  FLUSHED_COMPACTION_NEEDED,
-2195  // Special case where a flush 
didn't run because there's nothing in the memstores. Used when
-2196  // bulk loading to know when we 
can still load even if a flush didn't happen.
-2197  CANNOT_FLUSH_MEMSTORE_EMPTY,
-2198  CANNOT_FLUSH
-2199}
-2200
-2201/** @return the detailed result code 
*/
-2202Result getResult();
-2203
-2204/** @return true if the memstores 
were flushed, else false */
-2205boolean isFlushSucceeded();
-2206
-2207/** @return True if the flush 
requested a compaction, else false */
-2208boolean isCompactionNeeded();
-2209  }
+2114  
status.enableStatusJournal(false);
+2115  if (this.closed.get()) {
+2116String msg = "Skipping 
compaction on " + this + " because closed";
+2117LOG.debug(msg);
+2118status.abort(msg);
+2119return false;
+2120  }
+2121  boolean wasStateSet = false;
+2122  try {
+2123synchronized (writestate) {
+2124  if (writestate.writesEnabled) 
{
+2125wasStateSet = true;
+2126

[03/30] hbase-site git commit: Published site at .

2018-01-15 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e80e3339/testdevapidocs/src-html/org/apache/hadoop/hbase/TestZooKeeper.MockLoadBalancer.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/TestZooKeeper.MockLoadBalancer.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/TestZooKeeper.MockLoadBalancer.html
index 49cd7ba..6ba24e0 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/TestZooKeeper.MockLoadBalancer.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/TestZooKeeper.MockLoadBalancer.html
@@ -28,452 +28,273 @@
 020
 021import static 
org.junit.Assert.assertEquals;
 022import static 
org.junit.Assert.assertFalse;
-023import static 
org.junit.Assert.assertNotNull;
-024import static 
org.junit.Assert.assertNull;
-025import static 
org.junit.Assert.assertTrue;
-026import static org.junit.Assert.fail;
-027
-028import java.io.IOException;
-029import java.util.List;
-030import java.util.Map;
-031
-032import 
org.apache.hadoop.conf.Configuration;
-033import 
org.apache.hadoop.hbase.client.Admin;
-034import 
org.apache.hadoop.hbase.client.Put;
-035import 
org.apache.hadoop.hbase.client.RegionInfo;
-036import 
org.apache.hadoop.hbase.client.Result;
-037import 
org.apache.hadoop.hbase.client.ResultScanner;
-038import 
org.apache.hadoop.hbase.client.Scan;
-039import 
org.apache.hadoop.hbase.client.Table;
-040import 
org.apache.hadoop.hbase.coordination.ZkSplitLogWorkerCoordination;
-041import 
org.apache.hadoop.hbase.master.HMaster;
-042import 
org.apache.hadoop.hbase.master.LoadBalancer;
-043import 
org.apache.hadoop.hbase.master.balancer.SimpleLoadBalancer;
-044import 
org.apache.hadoop.hbase.testclassification.LargeTests;
-045import 
org.apache.hadoop.hbase.testclassification.MiscTests;
-046import 
org.apache.hadoop.hbase.util.Bytes;
-047import 
org.apache.hadoop.hbase.util.FSUtils;
-048import 
org.apache.hadoop.hbase.util.Threads;
-049import 
org.apache.hadoop.hbase.zookeeper.EmptyWatcher;
-050import 
org.apache.hadoop.hbase.zookeeper.ZKConfig;
-051import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-052import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-053import org.apache.zookeeper.CreateMode;
-054import 
org.apache.zookeeper.KeeperException;
-055import org.apache.zookeeper.ZooDefs;
-056import org.apache.zookeeper.ZooKeeper;
-057import org.apache.zookeeper.data.ACL;
-058import org.apache.zookeeper.data.Stat;
-059import org.junit.After;
-060import org.junit.AfterClass;
-061import org.junit.Before;
-062import org.junit.BeforeClass;
-063import org.junit.Rule;
-064import org.junit.Test;
-065import 
org.junit.experimental.categories.Category;
-066import org.junit.rules.TestName;
-067import org.slf4j.Logger;
-068import org.slf4j.LoggerFactory;
-069
-070
-071@Category({MiscTests.class, 
LargeTests.class})
-072public class TestZooKeeper {
-073  private static final Logger LOG = 
LoggerFactory.getLogger(TestZooKeeper.class);
-074
-075  private final static 
HBaseTestingUtility
-076  TEST_UTIL = new 
HBaseTestingUtility();
-077
-078  @Rule
-079  public TestName name = new 
TestName();
-080
-081  /**
-082   * @throws java.lang.Exception
-083   */
-084  @BeforeClass
-085  public static void setUpBeforeClass() 
throws Exception {
-086// Test we can first start the ZK 
cluster by itself
-087Configuration conf = 
TEST_UTIL.getConfiguration();
-088TEST_UTIL.startMiniDFSCluster(2);
-089TEST_UTIL.startMiniZKCluster();
-090
conf.setInt(HConstants.ZK_SESSION_TIMEOUT, 1000);
-091
conf.setClass(HConstants.HBASE_MASTER_LOADBALANCER_CLASS, 
MockLoadBalancer.class,
-092LoadBalancer.class);
-093  }
-094
-095  /**
-096   * @throws java.lang.Exception
-097   */
-098  @AfterClass
-099  public static void tearDownAfterClass() 
throws Exception {
-100TEST_UTIL.shutdownMiniCluster();
-101  }
-102
-103  /**
-104   * @throws java.lang.Exception
-105   */
-106  @Before
-107  public void setUp() throws Exception 
{
-108TEST_UTIL.startMiniHBaseCluster(2, 
2);
-109  }
-110
-111  @After
-112  public void after() throws Exception 
{
-113try {
-114  // Some regionserver could fail to 
delete its znode.
-115  // So shutdown could hang. Let's 
kill them all instead.
-116  
TEST_UTIL.getHBaseCluster().killAll();
-117
-118  // Still need to clean things up
-119  
TEST_UTIL.shutdownMiniHBaseCluster();
-120} finally {
-121  
TEST_UTIL.getTestFileSystem().delete(FSUtils.getRootDir(TEST_UTIL.getConfiguration()),
 true);
-122  
ZKUtil.deleteNodeRecursively(TEST_UTIL.getZooKeeperWatcher(), "/hbase");
-123}
-124  }
-125
-126  @Test (timeout = 12)
-127  public void 
testRegionServerSessionExpired() throws Exception {
-128LOG.info("Starting " + 
name.getMethodName());
-129
TEST_UTIL.expireRegionServerSession(0);
-130testSanity(name.getMethodName());
-131  }
-132
-133  @Test(timeout = 30)
-134  public void testMasterSessionExpired() 
throws 

[03/30] hbase-site git commit: Published site at .

2017-09-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dc5c2985/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
index b5f8667..30ab0d8 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-shaded-client archetype  
Reactor Dependency Convergence
 
@@ -650,7 +650,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-09-22
+  Last Published: 
2017-09-23
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dc5c2985/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-info.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-info.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-info.html
index bc6821c..dbda352 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-info.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-shaded-client archetype  
Dependency Information
 
@@ -147,7 +147,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-09-22
+  Last Published: 
2017-09-23
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dc5c2985/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-management.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-management.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-management.html
index 0d35b4b..29c12a5 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-management.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-shaded-client archetype  
Project Dependency Management
 
@@ -772,7 +772,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-09-22
+  Last Published: 
2017-09-23
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dc5c2985/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/index.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/index.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/index.html
index fee2450..572b931 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/index.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/index.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-shaded-client archetype  
About
 
@@ -119,7 +119,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-09-22
+  Last Published: 
2017-09-23
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dc5c2985/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/integration.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/integration.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/integration.html
index 47db70f..4ea3bb1 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/integration.html
+++ 

[03/30] hbase-site git commit: Published site at .

2017-09-18 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/2bb8bd01/testdevapidocs/src-html/org/apache/hadoop/hbase/security/access/TestAccessController.TestTableDDLProcedure.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/security/access/TestAccessController.TestTableDDLProcedure.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/security/access/TestAccessController.TestTableDDLProcedure.html
index 6899321..da046b2 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/security/access/TestAccessController.TestTableDDLProcedure.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/security/access/TestAccessController.TestTableDDLProcedure.html
@@ -865,2376 +865,2351 @@
 857  }
 858
 859  @Test (timeout=18)
-860  public void testMergeRegions() throws 
Exception {
-861final TableName tableName = 
TableName.valueOf(name.getMethodName());
-862createTestTable(tableName);
-863try {
-864  final ListHRegion regions = 
TEST_UTIL.getHBaseCluster().findRegionsForTable(tableName);
-865  assertTrue("not enough regions: " + 
regions.size(), regions.size() = 2);
-866
-867  AccessTestAction action = new 
AccessTestAction() {
-868@Override
-869public Object run() throws 
Exception {
-870  
ACCESS_CONTROLLER.preMerge(ObserverContext.createAndPrepare(RSCP_ENV, null),
-871regions.get(0), 
regions.get(1));
-872  return null;
-873}
-874  };
-875
-876  verifyAllowed(action, SUPERUSER, 
USER_ADMIN, USER_OWNER, USER_GROUP_ADMIN);
-877  verifyDenied(action, USER_CREATE, 
USER_RW, USER_RO, USER_NONE, USER_GROUP_READ,
-878USER_GROUP_WRITE, 
USER_GROUP_CREATE);
-879} finally {
-880  deleteTable(TEST_UTIL, 
tableName);
-881}
-882  }
-883
-884  @Test (timeout=18)
-885  public void testFlush() throws 
Exception {
-886AccessTestAction action = new 
AccessTestAction() {
-887  @Override
-888  public Object run() throws 
Exception {
-889
ACCESS_CONTROLLER.preFlush(ObserverContext.createAndPrepare(RCP_ENV, null));
-890return null;
-891  }
-892};
-893
-894verifyAllowed(action, SUPERUSER, 
USER_ADMIN, USER_OWNER, USER_CREATE, USER_GROUP_CREATE,
-895  USER_GROUP_ADMIN);
-896verifyDenied(action, USER_RW, 
USER_RO, USER_NONE, USER_GROUP_READ, USER_GROUP_WRITE);
-897  }
-898
-899  @Test (timeout=18)
-900  public void testCompact() throws 
Exception {
-901AccessTestAction action = new 
AccessTestAction() {
-902  @Override
-903  public Object run() throws 
Exception {
-904
ACCESS_CONTROLLER.preCompact(ObserverContext.createAndPrepare(RCP_ENV, null), 
null, null,
-905  
ScanType.COMPACT_RETAIN_DELETES, null);
-906return null;
-907  }
-908};
-909
-910verifyAllowed(action, SUPERUSER, 
USER_ADMIN, USER_OWNER, USER_CREATE, USER_GROUP_CREATE,
-911  USER_GROUP_ADMIN);
-912verifyDenied(action, USER_RW, 
USER_RO, USER_NONE, USER_GROUP_READ, USER_GROUP_WRITE);
-913  }
-914
-915  private void 
verifyRead(AccessTestAction action) throws Exception {
-916verifyAllowed(action, SUPERUSER, 
USER_ADMIN, USER_OWNER, USER_CREATE, USER_RW, USER_RO,
-917  USER_GROUP_READ);
-918verifyDenied(action, USER_NONE, 
USER_GROUP_CREATE, USER_GROUP_ADMIN, USER_GROUP_WRITE);
-919  }
-920
-921  private void 
verifyReadWrite(AccessTestAction action) throws Exception {
-922verifyAllowed(action, SUPERUSER, 
USER_ADMIN, USER_OWNER, USER_CREATE, USER_RW);
-923verifyDenied(action, USER_NONE, 
USER_RO, USER_GROUP_ADMIN, USER_GROUP_CREATE, USER_GROUP_READ,
-924USER_GROUP_WRITE);
-925  }
-926
-927  @Test (timeout=18)
-928  public void testRead() throws Exception 
{
-929// get action
-930AccessTestAction getAction = new 
AccessTestAction() {
-931  @Override
-932  public Object run() throws 
Exception {
-933Get g = new Get(TEST_ROW);
-934g.addFamily(TEST_FAMILY);
-935try(Connection conn = 
ConnectionFactory.createConnection(conf);
-936Table t = 
conn.getTable(TEST_TABLE)) {
-937  t.get(g);
-938}
-939return null;
-940  }
-941};
-942verifyRead(getAction);
-943
-944// action for scanning
-945AccessTestAction scanAction = new 
AccessTestAction() {
-946  @Override
-947  public Object run() throws 
Exception {
-948Scan s = new Scan();
-949s.addFamily(TEST_FAMILY);
-950try(Connection conn = 
ConnectionFactory.createConnection(conf);
-951Table table = 
conn.getTable(TEST_TABLE)) {
-952  ResultScanner scanner = 
table.getScanner(s);
-953  try {
-954for (Result r = 
scanner.next(); r != null; r = scanner.next()) {
-955  // do nothing
-956}
-957  } finally {
-958scanner.close();
-959  }
-960}
-961return null;
-962  

[03/30] hbase-site git commit: Published site at .

2017-08-19 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/495ddb86/testdevapidocs/src-html/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.html
index baa975f..36e14c0 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.html
@@ -25,45 +25,45 @@
 017 */
 018package org.apache.hadoop.hbase.filter;
 019
-020import static 
org.junit.Assert.assertEquals;
-021
-022import java.io.IOException;
-023import java.nio.ByteBuffer;
-024import java.util.ArrayList;
-025import java.util.List;
-026
-027import org.apache.commons.logging.Log;
-028import 
org.apache.commons.logging.LogFactory;
-029import 
org.apache.hadoop.conf.Configuration;
-030import org.apache.hadoop.hbase.Cell;
-031import 
org.apache.hadoop.hbase.CellUtil;
-032import 
org.apache.hadoop.hbase.HBaseTestingUtility;
-033import 
org.apache.hadoop.hbase.HConstants;
-034import 
org.apache.hadoop.hbase.TableName;
-035import 
org.apache.hadoop.hbase.client.Durability;
-036import 
org.apache.hadoop.hbase.client.Put;
-037import 
org.apache.hadoop.hbase.client.Result;
-038import 
org.apache.hadoop.hbase.client.ResultScanner;
-039import 
org.apache.hadoop.hbase.client.Scan;
-040import 
org.apache.hadoop.hbase.client.Table;
-041import 
org.apache.hadoop.hbase.filter.FilterList.Operator;
-042import 
org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;
-043import 
org.apache.hadoop.hbase.regionserver.HRegion;
-044import 
org.apache.hadoop.hbase.regionserver.RegionScanner;
-045import 
org.apache.hadoop.hbase.testclassification.FilterTests;
-046import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-047import 
org.apache.hadoop.hbase.util.Bytes;
-048import 
org.apache.hadoop.hbase.util.Pair;
-049import org.junit.After;
-050import org.junit.AfterClass;
-051import org.junit.Before;
-052import org.junit.BeforeClass;
-053import org.junit.Rule;
-054import org.junit.Test;
-055import 
org.junit.experimental.categories.Category;
-056
-057import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-058import org.junit.rules.TestName;
+020import org.apache.commons.logging.Log;
+021import 
org.apache.commons.logging.LogFactory;
+022import 
org.apache.hadoop.conf.Configuration;
+023import org.apache.hadoop.hbase.Cell;
+024import 
org.apache.hadoop.hbase.CellUtil;
+025import 
org.apache.hadoop.hbase.HBaseTestingUtility;
+026import 
org.apache.hadoop.hbase.HConstants;
+027import 
org.apache.hadoop.hbase.TableName;
+028import 
org.apache.hadoop.hbase.client.Durability;
+029import 
org.apache.hadoop.hbase.client.Put;
+030import 
org.apache.hadoop.hbase.client.Result;
+031import 
org.apache.hadoop.hbase.client.ResultScanner;
+032import 
org.apache.hadoop.hbase.client.Scan;
+033import 
org.apache.hadoop.hbase.client.Table;
+034import 
org.apache.hadoop.hbase.filter.FilterList.Operator;
+035import 
org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;
+036import 
org.apache.hadoop.hbase.regionserver.HRegion;
+037import 
org.apache.hadoop.hbase.regionserver.RegionScanner;
+038import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+039import 
org.apache.hadoop.hbase.testclassification.FilterTests;
+040import 
org.apache.hadoop.hbase.testclassification.MediumTests;
+041import 
org.apache.hadoop.hbase.util.Bytes;
+042import 
org.apache.hadoop.hbase.util.Pair;
+043import org.junit.After;
+044import org.junit.AfterClass;
+045import org.junit.Before;
+046import org.junit.BeforeClass;
+047import org.junit.Rule;
+048import org.junit.Test;
+049import 
org.junit.experimental.categories.Category;
+050import org.junit.rules.TestName;
+051
+052import java.io.IOException;
+053import java.nio.ByteBuffer;
+054import java.util.ArrayList;
+055import java.util.Arrays;
+056import java.util.List;
+057
+058import static 
org.junit.Assert.assertEquals;
 059
 060/**
 061 */
@@ -148,311 +148,320 @@
 140ListPairbyte[], 
byte[] data = new ArrayList();
 141byte[] fuzzyKey = 
Bytes.toBytesBinary("\\x9B\\x00\\x044e");
 142byte[] mask = new byte[] { 0, 0, 0, 
0, 0 };
-143data.add(new Pair(fuzzyKey, 
mask));
-144FuzzyRowFilter filter = new 
FuzzyRowFilter(data);
-145
-146Scan scan = new Scan();
-147scan.setFilter(filter);
-148
-149ResultScanner scanner = 
ht.getScanner(scan);
-150int total = 0;
-151while (scanner.next() != null) {
-152  total++;
-153}
-154assertEquals(2, total);
-155
TEST_UTIL.deleteTable(TableName.valueOf(name.getMethodName()));
-156  }
-157
-158  @Test
-159  public void testHBASE14782() throws 
IOException
-160  {
-161String cf = "f";
-162String cq = "q";
+143
+144// copy 

[03/30] hbase-site git commit: Published site at .

2017-08-18 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/cef8af03/testdevapidocs/index-all.html
--
diff --git a/testdevapidocs/index-all.html b/testdevapidocs/index-all.html
index 42866a5..e40d3b0 100644
--- a/testdevapidocs/index-all.html
+++ b/testdevapidocs/index-all.html
@@ -20321,6 +20321,8 @@
 
 LOG
 - Static variable in class org.apache.hadoop.hbase.zookeeper.TestZooKeeperNodeTracker.TestingZKListener
 
+LOG
 - Static variable in class org.apache.hadoop.hbase.zookeeper.TestZooKeeperWatcher
+
 LOG_FILES
 - Variable in class org.apache.hadoop.hbase.util.ProcessBasedLocalHBaseCluster.LocalDaemonLogTailer
 
 LOG_PATH_FORMAT_RE
 - Static variable in class org.apache.hadoop.hbase.util.ProcessBasedLocalHBaseCluster
@@ -41786,6 +41788,8 @@
 
 testConnectionDefaultUsesCodec()
 - Method in class org.apache.hadoop.hbase.client.TestFromClientSide3
 
+testConnectionEvent()
 - Method in class org.apache.hadoop.hbase.zookeeper.TestZooKeeperWatcher
+
 testConnectionExhaustion()
 - Method in class org.apache.hadoop.hbase.mapred.TestTableOutputFormatConnectionExhaust
 
 testConnectionIdle()
 - Method in class org.apache.hadoop.hbase.client.TestHCM

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/cef8af03/testdevapidocs/org/apache/hadoop/hbase/RESTApiClusterManager.html
--
diff --git a/testdevapidocs/org/apache/hadoop/hbase/RESTApiClusterManager.html 
b/testdevapidocs/org/apache/hadoop/hbase/RESTApiClusterManager.html
index e46a4ac..58e52d6 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/RESTApiClusterManager.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/RESTApiClusterManager.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public class RESTApiClusterManager
+public class RESTApiClusterManager
 extends org.apache.hadoop.conf.Configured
 implements ClusterManager
 A ClusterManager implementation designed to control 
Cloudera Manager (http://www.cloudera.com)
@@ -193,7 +193,7 @@ implements API_VERSION
 
 
-private com.sun.jersey.api.client.Client
+private javax.ws.rs.client.Client
 client
 
 
@@ -439,7 +439,7 @@ implements 
 
 REST_API_CLUSTER_MANAGER_HOSTNAME
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REST_API_CLUSTER_MANAGER_HOSTNAME
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REST_API_CLUSTER_MANAGER_HOSTNAME
 
 See Also:
 Constant
 Field Values
@@ -452,7 +452,7 @@ implements 
 
 REST_API_CLUSTER_MANAGER_USERNAME
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REST_API_CLUSTER_MANAGER_USERNAME
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REST_API_CLUSTER_MANAGER_USERNAME
 
 See Also:
 Constant
 Field Values
@@ -465,7 +465,7 @@ implements 
 
 REST_API_CLUSTER_MANAGER_PASSWORD
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REST_API_CLUSTER_MANAGER_PASSWORD
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REST_API_CLUSTER_MANAGER_PASSWORD
 
 See Also:
 Constant
 Field Values
@@ -478,7 +478,7 @@ implements 
 
 REST_API_CLUSTER_MANAGER_CLUSTER_NAME
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REST_API_CLUSTER_MANAGER_CLUSTER_NAME
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REST_API_CLUSTER_MANAGER_CLUSTER_NAME
 
 See Also:
 Constant
 Field Values
@@ -491,7 +491,7 @@ implements 
 
 DEFAULT_SERVER_HOSTNAME
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String DEFAULT_SERVER_HOSTNAME
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String DEFAULT_SERVER_HOSTNAME
 
 See Also:
 Constant
 Field Values
@@ -504,7 +504,7 @@ implements 
 
 DEFAULT_SERVER_USERNAME
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String DEFAULT_SERVER_USERNAME
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String DEFAULT_SERVER_USERNAME
 
 See Also:
 Constant
 Field Values
@@ -517,7 +517,7 

[03/30] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-19 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7f23ee04/hbase-archetypes/hbase-archetype-builder/project-summary.html
--
diff --git a/hbase-archetypes/hbase-archetype-builder/project-summary.html 
b/hbase-archetypes/hbase-archetype-builder/project-summary.html
index fe08450..d04c16d 100644
--- a/hbase-archetypes/hbase-archetype-builder/project-summary.html
+++ b/hbase-archetypes/hbase-archetype-builder/project-summary.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-06-17
+Last Published: 2017-06-19
   | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Archetype builder

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7f23ee04/hbase-archetypes/hbase-archetype-builder/source-repository.html
--
diff --git a/hbase-archetypes/hbase-archetype-builder/source-repository.html 
b/hbase-archetypes/hbase-archetype-builder/source-repository.html
index c1a2728..34e200c 100644
--- a/hbase-archetypes/hbase-archetype-builder/source-repository.html
+++ b/hbase-archetypes/hbase-archetype-builder/source-repository.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-06-17
+Last Published: 2017-06-19
   | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Archetype builder

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7f23ee04/hbase-archetypes/hbase-archetype-builder/team-list.html
--
diff --git a/hbase-archetypes/hbase-archetype-builder/team-list.html 
b/hbase-archetypes/hbase-archetype-builder/team-list.html
index d8fffba..d951af6 100644
--- a/hbase-archetypes/hbase-archetype-builder/team-list.html
+++ b/hbase-archetypes/hbase-archetype-builder/team-list.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-06-17
+Last Published: 2017-06-19
   | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Archetype builder

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7f23ee04/hbase-archetypes/hbase-client-project/checkstyle.html
--
diff --git a/hbase-archetypes/hbase-client-project/checkstyle.html 
b/hbase-archetypes/hbase-client-project/checkstyle.html
index 2fdd30b..3b6f0ee 100644
--- a/hbase-archetypes/hbase-client-project/checkstyle.html
+++ b/hbase-archetypes/hbase-client-project/checkstyle.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-06-17
+Last Published: 2017-06-19
   | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Exemplar for 
hbase-client archetype

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7f23ee04/hbase-archetypes/hbase-client-project/dependencies.html
--
diff --git a/hbase-archetypes/hbase-client-project/dependencies.html 
b/hbase-archetypes/hbase-client-project/dependencies.html
index 81527b0..fdaeedc 100644
--- a/hbase-archetypes/hbase-client-project/dependencies.html
+++ b/hbase-archetypes/hbase-client-project/dependencies.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-06-17
+Last Published: 2017-06-19
   | Version: 

[03/30] hbase-site git commit: Published site at 28cd48b673ca743d193874b2951bc995699e8e89.

2016-02-24 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/89b638a4/xref/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.html
--
diff --git a/xref/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.html 
b/xref/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.html
index e90a138..0549d41 100644
--- a/xref/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.html
+++ b/xref/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.html
@@ -38,203 +38,219 @@
 28  import org.apache.hadoop.hbase.HConstants;
 29  import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 30  import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-31  import 
org.apache.hadoop.hbase.util.BoundedPriorityBlockingQueue;
-32  
-33  /**
-34   * A scheduler that maintains isolated handler pools 
for general,
-35   * high-priority, and replication requests.
-36   */
-37  
@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC, 
HBaseInterfaceAudience.PHOENIX})
-38  
@InterfaceStability.Evolving
-39  public class 
SimpleRpcScheduler
 extends RpcScheduler
 {
-40private static final 
Log LOG = LogFactory.getLog(SimpleRpcScheduler.class);
-41  
-42public static 
final String 
CALL_QUEUE_READ_SHARE_CONF_KEY =
-43"hbase.ipc.server.callqueue.read.ratio";
-44public static 
final String 
CALL_QUEUE_SCAN_SHARE_CONF_KEY =
-45"hbase.ipc.server.callqueue.scan.ratio";
-46public static 
final String 
CALL_QUEUE_HANDLER_FACTOR_CONF_KEY =
-47"hbase.ipc.server.callqueue.handler.factor";
-48  
-49/** If set to 
'deadline', uses a priority queue and deprioritize long-running scans */
-50public static 
final String CALL_QUEUE_TYPE_CONF_KEY = 
"hbase.ipc.server.callqueue.type";
-51public static 
final String 
CALL_QUEUE_TYPE_DEADLINE_CONF_VALUE = "deadline";
-52public static 
final String 
CALL_QUEUE_TYPE_FIFO_CONF_VALUE = "fifo";
-53  
-54/** max delay in 
msec used to bound the deprioritized requests */
-55public static 
final String QUEUE_MAX_CALL_DELAY_CONF_KEY
-56= "hbase.ipc.server.queue.max.call.delay";
-57  
-58/**
-59 * Comparator used by the "normal callQueue" if 
DEADLINE_CALL_QUEUE_CONF_KEY is set to true.
-60 * It uses the calculated "deadline" e.g. to 
deprioritize long-running job
-61 *
-62 * If multiple requests have the same deadline 
BoundedPriorityBlockingQueue will order them in
-63 * FIFO (first-in-first-out) manner.
-64 */
-65private static class 
CallPriorityComparator
 implements ComparatorCallRunner {
-66  private final 
static int DEFAULT_MAX_CALL_DELAY = 5000;
-67  
-68  private final 
PriorityFunction
 priority;
-69  private final 
int maxDelay;
-70  
-71  public CallPriorityComparator(final Configuration conf, final PriorityFunction
 priority) {
-72this.priority = priority;
-73this.maxDelay = 
conf.getInt(QUEUE_MAX_CALL_DELAY_CONF_KEY, DEFAULT_MAX_CALL_DELAY);
-74  }
-75  
-76  @Override
-77  public int 
compare(CallRunner
 a, CallRunner
 b) {
-78RpcServer.Call 
callA = a.getCall();
-79RpcServer.Call 
callB = b.getCall();
-80long deadlineA = 
priority.getDeadline(callA.getHeader(), callA.param);
-81long deadlineB = 
priority.getDeadline(callB.getHeader(), callB.param);
-82deadlineA = 
callA.timestamp + Math.min(deadlineA, maxDelay);
-83deadlineB = 
callB.timestamp + Math.min(deadlineB, maxDelay);
-84return (int)(deadlineA - deadlineB);
-85  }
-86}
-87  
-88private int 
port;
-89private final 
PriorityFunction
 priority;
-90private final 
RpcExecutor
 callExecutor;
-91private final 
RpcExecutor
 priorityExecutor;
-92private final 
RpcExecutor
 replicationExecutor;
-93  
-94/** What level a 
high priority call is at. */
-95private final 
int highPriorityLevel;
-96  
-97private Abortable 
abortable = null;
-98  
-99/**
-100* @param conf
-101* @param handlerCount the number of handler 
threads that will be used to process calls
-102* @param priorityHandlerCount How many threads 
for priority handling.
-103* @param replicationHandlerCount How many threads 
for replication handling.
-104* @param highPriorityLevel
-105* @param priority Function to extract request 
priority.
-106*/
-107   public SimpleRpcScheduler(
-108   Configuration 
conf,
-109   int handlerCount,
-110   int priorityHandlerCount,
-111   int replicationHandlerCount,
-112   PriorityFunction
 priority,
-113   Abortable 
server,
-114   int highPriorityLevel) {
-115 int maxQueueLength = conf.getInt("hbase.ipc.server.max.callqueue.length",
-116 handlerCount 
* RpcServer.DEFAULT_MAX_CALLQUEUE_LENGTH_PER_HANDLER);
-117 this.priority = priority;
-118 this.highPriorityLevel = highPriorityLevel;
-119 this.abortable = server;
-120 
-121 String 
callQueueType =