[13/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-07 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1b6d8c10/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
index 9a6c30b..af6a1dd 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ModifyColumnFamilyProcedureBiConsumer.html
@@ -54,1176 +54,1176 @@
 046import org.apache.commons.io.IOUtils;
 047import org.apache.commons.logging.Log;
 048import 
org.apache.commons.logging.LogFactory;
-049import 
org.apache.hadoop.hbase.HRegionInfo;
-050import 
org.apache.hadoop.hbase.HRegionLocation;
-051import 
org.apache.hadoop.hbase.MetaTableAccessor;
-052import 
org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
-053import 
org.apache.hadoop.hbase.NotServingRegionException;
-054import 
org.apache.hadoop.hbase.ProcedureInfo;
-055import 
org.apache.hadoop.hbase.RegionLocations;
-056import 
org.apache.hadoop.hbase.ServerName;
-057import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-058import 
org.apache.hadoop.hbase.HConstants;
-059import 
org.apache.hadoop.hbase.TableExistsException;
-060import 
org.apache.hadoop.hbase.TableName;
-061import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-062import 
org.apache.hadoop.hbase.TableNotDisabledException;
-063import 
org.apache.hadoop.hbase.TableNotEnabledException;
-064import 
org.apache.hadoop.hbase.TableNotFoundException;
-065import 
org.apache.hadoop.hbase.UnknownRegionException;
-066import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-067import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder;
-068import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder;
-069import 
org.apache.hadoop.hbase.client.Scan.ReadType;
-070import 
org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper;
-071import 
org.apache.hadoop.hbase.client.replication.TableCFs;
-072import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-073import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-074import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-075import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-076import 
org.apache.hadoop.hbase.quotas.QuotaTableUtil;
-077import 
org.apache.hadoop.hbase.replication.ReplicationException;
-078import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-079import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-080import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-081import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-082import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-083import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
-084import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
-085import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
-086import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
-087import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
-088import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
-089import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
-090import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
-091import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
-092import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest;
-093import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse;
-094import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-095import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
-096import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-097import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-098import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-099import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-100import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-101import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-102import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-103import 

[13/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-23 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e9db7c5d/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestStore.html
--
diff --git a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestStore.html 
b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestStore.html
index 80e77c1..d84640c 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestStore.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestStore.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":9,"i8":10,"i9":9,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":9,"i9":10,"i10":9,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public class TestStore
+public class TestStore
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Test class for the Store
 
@@ -166,9 +166,13 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 TestStore.MyScannerHook
 
 
-private static class
+(package private) class
 TestStore.MyStore
 
+
+private static class
+TestStore.MyThread
+
 
 
 
@@ -322,21 +326,29 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 private org.apache.hadoop.hbase.Cell
-createCell(byte[]qualifier,
+createCell(byte[]row,
+  byte[]qualifier,
   longts,
   longsequenceId,
   byte[]value)
 
 
+private org.apache.hadoop.hbase.Cell
+createCell(byte[]qualifier,
+  longts,
+  longsequenceId,
+  byte[]value)
+
+
 private void
 flush(intstoreFilessize)
 
-
+
 private static void
 flushStore(org.apache.hadoop.hbase.regionserver.HStorestore,
   longid)
 
-
+
 (package private) http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.Cell
 getKeyValueSet(long[]timestamps,
   intnumRows,
@@ -345,34 +357,34 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 Generate a list of KeyValues for testing based on given 
parameters
 
 
-
+
 private static long
 getLowestTimeStampFromFS(org.apache.hadoop.fs.FileSystemfs,
 http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in 
java.util">Collectionorg.apache.hadoop.hbase.regionserver.StoreFilecandidates)
 
-
+
 private void
 init(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringmethodName)
 
-
-private void
+
+private 
org.apache.hadoop.hbase.regionserver.Store
 init(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringmethodName,
 org.apache.hadoop.conf.Configurationconf)
 
-
-private void
+
+private 
org.apache.hadoop.hbase.regionserver.Store
 init(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringmethodName,
 org.apache.hadoop.conf.Configurationconf,
 org.apache.hadoop.hbase.HColumnDescriptorhcd)
 
-
+
 private 
org.apache.hadoop.hbase.regionserver.Store
 init(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringmethodName,
 org.apache.hadoop.conf.Configurationconf,
 org.apache.hadoop.hbase.HTableDescriptorhtd,
 org.apache.hadoop.hbase.HColumnDescriptorhcd)
 
-
+
 private 
org.apache.hadoop.hbase.regionserver.Store
 init(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringmethodName,
 org.apache.hadoop.conf.Configurationconf,
@@ -380,131 +392,144 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 org.apache.hadoop.hbase.HColumnDescriptorhcd,
 TestStore.MyScannerHookhook)
 
-
+
+private 
org.apache.hadoop.hbase.regionserver.Store

[13/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.SaslDigestCallbackHandler.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.SaslDigestCallbackHandler.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.SaslDigestCallbackHandler.html
index 2aea531..4ff3ed5 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.SaslDigestCallbackHandler.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.SaslDigestCallbackHandler.html
@@ -6,7 +6,7 @@
 
 
 
-001/*
+001/**
 002 * Licensed to the Apache Software 
Foundation (ASF) under one
 003 * or more contributor license 
agreements.  See the NOTICE file
 004 * distributed with this work for 
additional information
@@ -23,169 +23,225 @@
 015 * See the License for the specific 
language governing permissions and
 016 * limitations under the License.
 017 */
-018
-019package 
org.apache.hadoop.hbase.security;
-020
-021import java.io.ByteArrayInputStream;
-022import java.io.DataInputStream;
-023import java.io.IOException;
-024import java.util.Locale;
-025import java.util.Map;
-026import java.util.function.Consumer;
-027
-028import 
javax.security.auth.callback.Callback;
-029import 
javax.security.auth.callback.CallbackHandler;
-030import 
javax.security.auth.callback.NameCallback;
-031import 
javax.security.auth.callback.PasswordCallback;
-032import 
javax.security.auth.callback.UnsupportedCallbackException;
-033import 
javax.security.sasl.AuthorizeCallback;
-034import 
javax.security.sasl.RealmCallback;
-035
-036import org.apache.commons.logging.Log;
-037import 
org.apache.commons.logging.LogFactory;
-038import 
org.apache.hadoop.conf.Configuration;
+018package 
org.apache.hadoop.hbase.security;
+019
+020import java.io.ByteArrayInputStream;
+021import java.io.DataInputStream;
+022import java.io.IOException;
+023import 
java.security.PrivilegedExceptionAction;
+024import java.util.Map;
+025
+026import 
javax.security.auth.callback.Callback;
+027import 
javax.security.auth.callback.CallbackHandler;
+028import 
javax.security.auth.callback.NameCallback;
+029import 
javax.security.auth.callback.PasswordCallback;
+030import 
javax.security.auth.callback.UnsupportedCallbackException;
+031import 
javax.security.sasl.AuthorizeCallback;
+032import 
javax.security.sasl.RealmCallback;
+033import javax.security.sasl.Sasl;
+034import 
javax.security.sasl.SaslException;
+035import javax.security.sasl.SaslServer;
+036
+037import org.apache.commons.logging.Log;
+038import 
org.apache.commons.logging.LogFactory;
 039import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-040import 
org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
-041import 
org.apache.hadoop.security.UserGroupInformation;
-042import 
org.apache.hadoop.security.token.SecretManager;
-043import 
org.apache.hadoop.security.token.SecretManager.InvalidToken;
-044import 
org.apache.hadoop.security.token.TokenIdentifier;
-045
-046/**
-047 * A utility class for dealing with SASL 
on RPC server
+040import 
org.apache.hadoop.security.UserGroupInformation;
+041import 
org.apache.hadoop.security.token.SecretManager;
+042import 
org.apache.hadoop.security.token.SecretManager.InvalidToken;
+043import 
org.apache.hadoop.security.token.TokenIdentifier;
+044
+045/**
+046 * A utility class that encapsulates SASL 
logic for RPC server. Copied from
+047 * 
codeorg.apache.hadoop.security/code
 048 */
 049@InterfaceAudience.Private
 050public class HBaseSaslRpcServer {
-051  private static final Log LOG = 
LogFactory.getLog(HBaseSaslRpcServer.class);
-052
-053  private static MapString, 
String saslProps = null;
-054
-055  public static void init(Configuration 
conf) {
-056saslProps = 
SaslUtil.initSaslProperties(conf.get("hbase.rpc.protection",
-057  
QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT)));
-058  }
-059
-060  public static MapString, String 
getSaslProps() {
-061return saslProps;
-062  }
-063
-064  public static T extends 
TokenIdentifier T getIdentifier(String id,
-065  SecretManagerT 
secretManager) throws InvalidToken {
-066byte[] tokenId = 
SaslUtil.decodeIdentifier(id);
-067T tokenIdentifier = 
secretManager.createIdentifier();
-068try {
-069  tokenIdentifier.readFields(new 
DataInputStream(new ByteArrayInputStream(
-070  tokenId)));
-071} catch (IOException e) {
-072  throw (InvalidToken) new 
InvalidToken(
-073  "Can't de-serialize 
tokenIdentifier").initCause(e);
-074}
-075return tokenIdentifier;
-076  }
-077
-078
-079  /** CallbackHandler for SASL DIGEST-MD5 
mechanism */
-080  public static class 
SaslDigestCallbackHandler implements CallbackHandler {
-081private 
SecretManagerTokenIdentifier secretManager;
-082private