[23/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e9db7c5d/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.KeyBeforeConcatenatedLists.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.KeyBeforeConcatenatedLists.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.KeyBeforeConcatenatedLists.html
index b09e20b..3c088c8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.KeyBeforeConcatenatedLists.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.KeyBeforeConcatenatedLists.html
@@ -155,937 +155,942 @@
 147  }
 148
 149  @Override
-150  public void 
insertNewFiles(CollectionStoreFile sfs) throws IOException {
-151CompactionOrFlushMergeCopy cmc = new 
CompactionOrFlushMergeCopy(true);
-152// Passing null does not cause 
NPE??
-153cmc.mergeResults(null, sfs);
-154debugDumpState("Added new files");
-155  }
-156
-157  @Override
-158  public 
ImmutableCollectionStoreFile clearFiles() {
-159ImmutableCollectionStoreFile 
result = state.allFilesCached;
-160this.state = new State();
-161this.fileStarts.clear();
-162this.fileEnds.clear();
-163return result;
-164  }
-165
-166  @Override
-167  public 
ImmutableCollectionStoreFile clearCompactedFiles() {
-168ImmutableCollectionStoreFile 
result = state.allCompactedFilesCached;
-169this.state = new State();
-170return result;
-171  }
-172
-173  @Override
-174  public int getStorefileCount() {
-175return state.allFilesCached.size();
+150  public int getCompactedFilesCount() {
+151return 
state.allCompactedFilesCached.size();
+152  }
+153
+154  @Override
+155  public void 
insertNewFiles(CollectionStoreFile sfs) throws IOException {
+156CompactionOrFlushMergeCopy cmc = new 
CompactionOrFlushMergeCopy(true);
+157// Passing null does not cause 
NPE??
+158cmc.mergeResults(null, sfs);
+159debugDumpState("Added new files");
+160  }
+161
+162  @Override
+163  public 
ImmutableCollectionStoreFile clearFiles() {
+164ImmutableCollectionStoreFile 
result = state.allFilesCached;
+165this.state = new State();
+166this.fileStarts.clear();
+167this.fileEnds.clear();
+168return result;
+169  }
+170
+171  @Override
+172  public 
ImmutableCollectionStoreFile clearCompactedFiles() {
+173ImmutableCollectionStoreFile 
result = state.allCompactedFilesCached;
+174this.state = new State();
+175return result;
 176  }
 177
-178  /** See {@link 
StoreFileManager#getCandidateFilesForRowKeyBefore(KeyValue)}
-179   * for details on this methods. */
-180  @Override
-181  public IteratorStoreFile 
getCandidateFilesForRowKeyBefore(final KeyValue targetKey) {
-182KeyBeforeConcatenatedLists result = 
new KeyBeforeConcatenatedLists();
-183// Order matters for this call.
-184
result.addSublist(state.level0Files);
-185if (!state.stripeFiles.isEmpty()) {
-186  int lastStripeIndex = 
findStripeForRow(CellUtil.cloneRow(targetKey), false);
-187  for (int stripeIndex = 
lastStripeIndex; stripeIndex = 0; --stripeIndex) {
-188
result.addSublist(state.stripeFiles.get(stripeIndex));
-189  }
-190}
-191return result.iterator();
-192  }
-193
-194  /** See {@link 
StoreFileManager#getCandidateFilesForRowKeyBefore(KeyValue)} and
-195   * {@link 
StoreFileManager#updateCandidateFilesForRowKeyBefore(Iterator, KeyValue, 
Cell)}
-196   * for details on this methods. */
-197  @Override
-198  public IteratorStoreFile 
updateCandidateFilesForRowKeyBefore(
-199  IteratorStoreFile 
candidateFiles, final KeyValue targetKey, final Cell candidate) {
-200KeyBeforeConcatenatedLists.Iterator 
original =
-201
(KeyBeforeConcatenatedLists.Iterator)candidateFiles;
-202assert original != null;
-203
ArrayListListStoreFile components = original.getComponents();
-204for (int firstIrrelevant = 0; 
firstIrrelevant  components.size(); ++firstIrrelevant) {
-205  StoreFile sf = 
components.get(firstIrrelevant).get(0);
-206  byte[] endKey = endOf(sf);
-207  // Entries are ordered as such: L0, 
then stripes in reverse order. We never remove
-208  // level 0; we remove the stripe, 
and all subsequent ones, as soon as we find the
-209  // first one that cannot possibly 
have better candidates.
-210  if (!isInvalid(endKey)  
!isOpen(endKey)
-211   
(nonOpenRowCompare(targetKey, endKey) = 0)) {
-212
original.removeComponents(firstIrrelevant);
-213break;
-214  }
-215}
-216return original;
-217  }
-218
-219  private byte[] 
getSplitPoint(CollectionStoreFile sfs) throws IOException {
-220OptionalStoreFile largestFile 
= StoreUtils.getLargestFile(sfs);
-221return largestFile.isPresent()
-222? 
StoreUtils.getFileSplitPoint(largestFile.get(), 

[23/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.CallCleanup.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.CallCleanup.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.CallCleanup.html
index be47cd3..5bf8c01 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.CallCleanup.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.CallCleanup.html
@@ -38,763 +38,770 @@
 030import 
java.nio.channels.ReadableByteChannel;
 031import 
java.nio.channels.WritableByteChannel;
 032import java.util.ArrayList;
-033import java.util.HashMap;
-034import java.util.List;
-035import java.util.Map;
-036import 
java.util.concurrent.atomic.LongAdder;
-037
-038import org.apache.commons.logging.Log;
-039import 
org.apache.commons.logging.LogFactory;
-040import 
org.apache.hadoop.conf.Configuration;
-041import 
org.apache.hadoop.hbase.CallQueueTooBigException;
-042import 
org.apache.hadoop.hbase.CellScanner;
-043import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-044import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-045import 
org.apache.hadoop.hbase.HConstants;
-046import org.apache.hadoop.hbase.Server;
-047import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-048import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-049import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-050import 
org.apache.hadoop.hbase.exceptions.RequestTooBigException;
-051import 
org.apache.hadoop.hbase.io.ByteBufferPool;
-052import 
org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler;
-053import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-054import 
org.apache.hadoop.hbase.nio.ByteBuff;
-055import 
org.apache.hadoop.hbase.nio.MultiByteBuff;
-056import 
org.apache.hadoop.hbase.nio.SingleByteBuff;
-057import 
org.apache.hadoop.hbase.regionserver.RSRpcServices;
-058import 
org.apache.hadoop.hbase.security.HBaseSaslRpcServer;
-059import 
org.apache.hadoop.hbase.security.User;
-060import 
org.apache.hadoop.hbase.security.UserProvider;
-061import 
org.apache.hadoop.hbase.security.token.AuthenticationTokenSecretManager;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService;
-063import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor;
-064import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
-065import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
-066import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader;
-070import 
org.apache.hadoop.hbase.util.Pair;
-071import 
org.apache.hadoop.security.UserGroupInformation;
-072import 
org.apache.hadoop.security.authorize.AuthorizationException;
-073import 
org.apache.hadoop.security.authorize.PolicyProvider;
-074import 
org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
-075import 
org.apache.hadoop.security.token.SecretManager;
-076import 
org.apache.hadoop.security.token.TokenIdentifier;
-077import 
org.codehaus.jackson.map.ObjectMapper;
-078
-079/**
-080 * An RPC server that hosts protobuf 
described Services.
-081 *
-082 */
-083@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
 HBaseInterfaceAudience.PHOENIX})
-084@InterfaceStability.Evolving
-085public abstract class RpcServer 
implements RpcServerInterface,
-086ConfigurationObserver {
-087  // LOG is being used in CallRunner and 
the log level is being changed in tests
-088  public static final Log LOG = 
LogFactory.getLog(RpcServer.class);
-089  protected static final 
CallQueueTooBigException CALL_QUEUE_TOO_BIG_EXCEPTION
-090  = new CallQueueTooBigException();
-091
-092  private final boolean authorize;
-093  protected boolean isSecurityEnabled;
+033import java.util.Collections;
+034import java.util.HashMap;
+035import java.util.List;
+036import java.util.Locale;
+037import java.util.Map;
+038import 
java.util.concurrent.atomic.LongAdder;
+039
+040import org.apache.commons.logging.Log;
+041import 
org.apache.commons.logging.LogFactory;
+042import 
org.apache.hadoop.conf.Configuration;
+043import 
org.apache.hadoop.hbase.CallQueueTooBigException;
+044import 
org.apache.hadoop.hbase.CellScanner;
+045import 
org.apache.hadoop.hbase.DoNotRetryIOException;
+046import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
+047import 
org.apache.hadoop.hbase.HConstants;
+048import org.apache.hadoop.hbase.Server;
+049import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+050import 
org.apache.hadoop.hbase.classification.InterfaceStability;
+051import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
+052import