hbase git commit: HBASE-16792 - reverting to change commit message
Repository: hbase Updated Branches: refs/heads/master 3a0dbf71a -> bda5fd5a1 HBASE-16792 - reverting to change commit message Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bda5fd5a Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bda5fd5a Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bda5fd5a Branch: refs/heads/master Commit: bda5fd5a11f8fa127bd0dfdc10e88cd49d51b201 Parents: 3a0dbf7 Author: RamkrishnaAuthored: Mon Oct 17 12:24:17 2016 +0530 Committer: Ramkrishna Committed: Mon Oct 17 12:25:13 2016 +0530 -- .../main/java/org/apache/hadoop/hbase/KeyValue.java | 15 --- .../hbase/io/encoding/BufferedDataBlockEncoder.java | 4 ++-- .../apache/hadoop/hbase/regionserver/HRegion.java| 4 ++-- 3 files changed, 4 insertions(+), 19 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/bda5fd5a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java -- diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index f9a621a..8f8554c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -43,7 +43,6 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.RawComparator; - import com.google.common.annotations.VisibleForTesting; /** @@ -2642,20 +2641,6 @@ public class KeyValue implements ExtendedCell { this.rowLen = Bytes.toShort(this.bytes, this.offset); } -public void set(KeyOnlyKeyValue keyOnlyKeyValue) { - this.bytes = keyOnlyKeyValue.bytes; - this.length = keyOnlyKeyValue.length; - this.offset = keyOnlyKeyValue.offset; - this.rowLen = keyOnlyKeyValue.rowLen; -} - -public void clear() { - rowLen = -1; - bytes = null; - offset = 0; - length = 0; -} - @Override public int getKeyOffset() { return this.offset; http://git-wip-us.apache.org/repos/asf/hbase/blob/bda5fd5a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java -- diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java index edecd9a..4d3a26c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java @@ -139,7 +139,7 @@ abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder { protected void invalidate() { valueOffset = -1; tagsCompressedLength = 0; - currentKey.clear(); + currentKey = new KeyValue.KeyOnlyKeyValue(); uncompressTags = true; currentBuffer = null; } @@ -188,7 +188,7 @@ abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder { keyBuffer, nextState.lastCommonPrefix, nextState.keyLength - nextState.lastCommonPrefix); } - currentKey.set(nextState.currentKey); + currentKey = nextState.currentKey; valueOffset = nextState.valueOffset; keyLength = nextState.keyLength; http://git-wip-us.apache.org/repos/asf/hbase/blob/bda5fd5a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index ca92f06..f61eb4b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -356,9 +356,9 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi // We need to ensure that while we are calculating the smallestReadPoint // no new RegionScanners can grab a readPoint that we are unaware of. // We achieve this by synchronizing on the scannerReadPoints object. -synchronized (scannerReadPoints) { +synchronized(scannerReadPoints) { minimumReadPoint = mvcc.getReadPoint(); - for (Long readPoint : this.scannerReadPoints.values()) { + for (Long readPoint: this.scannerReadPoints.values()) { if (readPoint <
[24/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/KeyValue.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/KeyValue.html b/devapidocs/org/apache/hadoop/hbase/KeyValue.html index 70e8f69..200da30 100644 --- a/devapidocs/org/apache/hadoop/hbase/KeyValue.html +++ b/devapidocs/org/apache/hadoop/hbase/KeyValue.html @@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Private -public class KeyValue +public class KeyValue extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object implements ExtendedCell An HBase Key/Value. This is the fundamental HBase Type. @@ -1200,7 +1200,7 @@ implements EMPTY_ARRAY_LIST -private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListTag EMPTY_ARRAY_LIST +private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true; title="class or interface in java.util">ArrayListTag EMPTY_ARRAY_LIST @@ -1209,7 +1209,7 @@ implements LOG -private static finalorg.apache.commons.logging.Log LOG +private static finalorg.apache.commons.logging.Log LOG @@ -1218,7 +1218,7 @@ implements COLUMN_FAMILY_DELIMITER -public static finalchar COLUMN_FAMILY_DELIMITER +public static finalchar COLUMN_FAMILY_DELIMITER Colon character in UTF-8 See Also: @@ -1232,7 +1232,7 @@ implements COLUMN_FAMILY_DELIM_ARRAY -public static finalbyte[] COLUMN_FAMILY_DELIM_ARRAY +public static finalbyte[] COLUMN_FAMILY_DELIM_ARRAY @@ -1242,7 +1242,7 @@ implements COMPARATOR http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true; title="class or interface in java.lang">@Deprecated -public static finalKeyValue.KVComparator COMPARATOR +public static finalKeyValue.KVComparator COMPARATOR Deprecated.Use CellComparator.COMPARATOR instead Comparator for plain key/values; i.e. non-catalog table key/values. Works on Key portion of KeyValue only. @@ -1255,7 +1255,7 @@ public static final META_COMPARATOR http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true; title="class or interface in java.lang">@Deprecated -public static finalKeyValue.KVComparator META_COMPARATOR +public static finalKeyValue.KVComparator META_COMPARATOR Deprecated.Use CellComparator.META_COMPARATOR instead A KeyValue.KVComparator for hbase:meta catalog table KeyValues. @@ -1268,7 +1268,7 @@ public static final RAW_COMPARATOR http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true; title="class or interface in java.lang">@Deprecated -public static finalKeyValue.KVComparator RAW_COMPARATOR +public static finalKeyValue.KVComparator RAW_COMPARATOR Deprecated. Needed for Bloom Filters. * @deprecated Use Bytes.BYTES_RAWCOMPARATOR instead @@ -1280,7 +1280,7 @@ public static final KEY_LENGTH_SIZE -public static finalint KEY_LENGTH_SIZE +public static finalint KEY_LENGTH_SIZE Size of the key length field in bytes See Also: @@ -1294,7 +1294,7 @@ public static final TYPE_SIZE -public static finalint TYPE_SIZE +public static finalint TYPE_SIZE Size of the key type field in bytes See Also: @@ -1308,7 +1308,7 @@ public static final ROW_LENGTH_SIZE -public static finalint ROW_LENGTH_SIZE +public static finalint ROW_LENGTH_SIZE Size of the row length field in bytes See Also: @@ -1322,7 +1322,7 @@ public static final FAMILY_LENGTH_SIZE -public static finalint FAMILY_LENGTH_SIZE +public static finalint FAMILY_LENGTH_SIZE Size of the family length field in bytes See Also: @@ -1336,7 +1336,7 @@ public static final TIMESTAMP_SIZE -public static finalint TIMESTAMP_SIZE +public static finalint TIMESTAMP_SIZE Size of the timestamp field in bytes See Also: @@ -1350,7 +1350,7 @@ public static final TIMESTAMP_TYPE_SIZE -public static finalint TIMESTAMP_TYPE_SIZE +public static finalint TIMESTAMP_TYPE_SIZE See Also: Constant Field Values @@ -1363,7 +1363,7 @@ public static final KEY_INFRASTRUCTURE_SIZE -public static finalint KEY_INFRASTRUCTURE_SIZE +public static finalint KEY_INFRASTRUCTURE_SIZE See Also: Constant Field Values @@ -1376,7 +1376,7 @@ public static final ROW_OFFSET -public static finalint ROW_OFFSET +public static finalint ROW_OFFSET See Also: Constant Field Values @@ -1389,7 +1389,7 @@ public static final ROW_KEY_OFFSET -public static finalint ROW_KEY_OFFSET +public static finalint ROW_KEY_OFFSET See Also: Constant Field Values @@ -1402,7 +1402,7 @@ public static final KEYVALUE_INFRASTRUCTURE_SIZE -public static finalint KEYVALUE_INFRASTRUCTURE_SIZE +public static finalint KEYVALUE_INFRASTRUCTURE_SIZE See Also: Constant Field Values @@ -1415,7 +1415,7 @@ public static final
[22/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html index c0e8535..97d04ae 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html @@ -173,69 +173,65 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. -org.apache.hadoop.hbase.mob.mapreduce - - - org.apache.hadoop.hbase.regionserver - + org.apache.hadoop.hbase.regionserver.handler - + org.apache.hadoop.hbase.regionserver.querymatcher - + org.apache.hadoop.hbase.regionserver.wal - + org.apache.hadoop.hbase.replication Multi Cluster Replication - + org.apache.hadoop.hbase.replication.regionserver - + org.apache.hadoop.hbase.rest HBase REST - + org.apache.hadoop.hbase.rest.model - + org.apache.hadoop.hbase.security.access - + org.apache.hadoop.hbase.security.visibility - + org.apache.hadoop.hbase.thrift Provides an HBase http://incubator.apache.org/thrift/;>Thrift service. - + org.apache.hadoop.hbase.util - + org.apache.hadoop.hbase.util.test - + org.apache.hadoop.hbase.wal @@ -954,9 +950,10 @@ service. static KeyValue -KeyValueUtil.copyCellTo(Cellcell, +KeyValueUtil.copyCellTo(Cellcell, byte[]buf, - intoffset) + intoffset, + intlen) Write the given cell in KeyValue serialization format into the given buf and return a new KeyValue object around that. @@ -1440,7 +1437,7 @@ service. intrvlength) -static void +static int KeyValueUtil.oswrite(Cellcell, http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true; title="class or interface in java.io">OutputStreamout, booleanwithTags) @@ -4217,26 +4214,6 @@ service. - - - -Uses of Cell in org.apache.hadoop.hbase.mob.mapreduce - -Methods in org.apache.hadoop.hbase.mob.mapreduce with parameters of type Cell - -Modifier and Type -Method and Description - - - -void -MemStoreWrapper.addToMemstore(Cellcell) -Adds a Cell into the memstore. - - - - - @@ -4979,81 +4956,77 @@ service. long -Store.add(Cellcell) -Adds a value to the memstore - - - -long AbstractMemStore.add(Cellcell) Write an update - + long MemStore.add(Cellcell) Write an update - + boolean CellSet.add(Celle) - + boolean CellFlatMap.CellFlatMapCollection.add(Cellk) - + long -HStore.add(Cellcell) +HStore.add(Cellcell) +Adds a value to the memstore + - + long MutableSegment.add(Cellcell, booleanmslabUsed) Adds the given cell into the segment - + void CellSink.append(Cellcell) Append the given cell - + void StripeMultiFileWriter.BoundaryMultiWriter.append(Cellcell) - + void StripeMultiFileWriter.SizeMultiWriter.append(Cellcell) - + void StoreFileWriter.append(Cellcell) - + void DateTieredMultiFileWriter.append(Cellcell) - + private void StoreFileWriter.appendDeleteFamilyBloomFilter(Cellcell) - + private void StoreFileWriter.appendGeneralBloomfilter(Cellcell) - + private long HRegion.applyToMemstore(Storestore, Cellcell) - + boolean NonReversedNonLazyKeyValueScanner.backwardSeek(Cellkey) - + boolean SegmentScanner.backwardSeek(Cellkey) Seek the scanner at or before the row of specified Cell, it firstly @@ -5063,17 +5036,17 @@ service. previous row of specified KeyValue - + boolean MemStoreScanner.backwardSeek(Cellcell) Set the scanner at the seek key. - + boolean ReversedKeyValueHeap.backwardSeek(CellseekKey) - + boolean KeyValueScanner.backwardSeek(Cellkey) Seek the scanner at or before the row of specified Cell, it firstly @@ -5083,33 +5056,33 @@ service. previous row of specified KeyValue - + boolean StoreFileScanner.backwardSeek(Cellkey) - + boolean ReversedStoreScanner.backwardSeek(Cellkey) - + Cell CellSet.ceiling(Celle) - + http://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true; title="class or interface in java.util">Map.EntryCell,Cell CellFlatMap.ceilingEntry(Cellk) - + Cell CellFlatMap.ceilingKey(Cellk) - + private boolean StoreFileReader.checkGeneralBloomFilter(byte[]key, CellkvKey, BloomFilterbloomFilter) - + protected void StoreScanner.checkScanOrder(CellprevKV, Cellkv, @@ -5117,116 +5090,116 @@ service. Check whether scan as expected order - + protected void ReversedStoreScanner.checkScanOrder(CellprevKV, Cellkv, CellComparatorcomparator) - + int Segment.compare(Cellleft, Cellright) - + int KeyValueHeap.KVScannerComparator.compare(Cellleft, Cellright) Compares two KeyValue - + int
[46/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/org/apache/hadoop/hbase/client/Get.html -- diff --git a/apidocs/org/apache/hadoop/hbase/client/Get.html b/apidocs/org/apache/hadoop/hbase/client/Get.html index 5f03b63..72734d4 100644 --- a/apidocs/org/apache/hadoop/hbase/client/Get.html +++ b/apidocs/org/apache/hadoop/hbase/client/Get.html @@ -18,7 +18,7 @@ catch(err) { } //--> -var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":42,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":42,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10}; +var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":42,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":42,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10}; var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]}; var altColor = "altColor"; var rowColor = "rowColor"; @@ -171,7 +171,7 @@ implements Fields inherited from classorg.apache.hadoop.hbase.client.Query -colFamTimeRangeMap, consistency, filter, targetReplicaId +colFamTimeRangeMap, consistency, filter, loadColumnFamiliesOnDemand, targetReplicaId @@ -398,35 +398,42 @@ implements Get +setLoadColumnFamiliesOnDemand(booleanvalue) +Set the value indicating whether loading CFs on demand should be allowed (cluster + default is false). + + + +Get setMaxResultsPerColumnFamily(intlimit) Set the maximum number of values to return per row per Column Family - + Get setMaxVersions() Get all available versions. - + Get setMaxVersions(intmaxVersions) Get up to the specified number of versions of each column. - + Get setReplicaId(intId) Specify region replica id where Query will fetch data from. - + Get setRowOffsetPerColumnFamily(intoffset) Set offset for the row per Column Family. - + Get setTimeRange(longminStamp, longmaxStamp) @@ -434,13 +441,13 @@ implements - + Get setTimeStamp(longtimestamp) Get versions of columns with the specified timestamp. - + http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object toMap(intmaxCols) Compile the details beyond the scope of getFingerprint (row, columns, @@ -453,7 +460,7 @@ implements Methods inherited from classorg.apache.hadoop.hbase.client.Query -getACL, getAuthorizations, getColumnFamilyTimeRange, getConsistency, getFilter, getIsolationLevel, getReplicaId +doLoadColumnFamiliesOnDemand, getACL, getAuthorizations, getColumnFamilyTimeRange, getConsistency, getFilter, getIsolationLevel, getLoadColumnFamiliesOnDemandValue, get ReplicaId @@ -535,7 +542,7 @@ implements isCheckExistenceOnly -publicbooleanisCheckExistenceOnly() +publicbooleanisCheckExistenceOnly() @@ -544,7 +551,7 @@ implements setCheckExistenceOnly -publicGetsetCheckExistenceOnly(booleancheckExistenceOnly) +publicGetsetCheckExistenceOnly(booleancheckExistenceOnly) @@ -554,7 +561,7 @@ implements isClosestRowBefore http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true; title="class or interface in java.lang">@Deprecated -publicbooleanisClosestRowBefore() +publicbooleanisClosestRowBefore() Deprecated.since 2.0.0 and will be removed in 3.0.0 This will always return the default value which is false as client cannot set the value to this property any more. @@ -567,7 +574,7 @@ publicboolean setClosestRowBefore http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true; title="class or interface in java.lang">@Deprecated -publicGetsetClosestRowBefore(booleanclosestRowBefore) +publicGetsetClosestRowBefore(booleanclosestRowBefore) Deprecated.since 2.0.0 and will be removed in 3.0.0 This is not used any more and does nothing. Use reverse scan instead. @@ -578,7 +585,7 @@ public addFamily -publicGetaddFamily(byte[]family) +publicGetaddFamily(byte[]family) Get all columns from the specified family. Overrides previous calls to addColumn for this family. @@ -596,7 +603,7 @@ public addColumn -publicGetaddColumn(byte[]family, +publicGetaddColumn(byte[]family, byte[]qualifier) Get the column from the specific family with the
[28/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/deprecated-list.html -- diff --git a/devapidocs/deprecated-list.html b/devapidocs/deprecated-list.html index 13f153e..9027794 100644 --- a/devapidocs/deprecated-list.html +++ b/devapidocs/deprecated-list.html @@ -600,185 +600,188 @@ org.apache.hadoop.hbase.http.HttpServer.getPort() +org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv.getProcedureQueue() + + org.apache.hadoop.hbase.CellUtil.getQualifierBufferShallowCopy(Cell) As of release 2.0.0, this will be removed in HBase 3.0.0. - + org.apache.hadoop.hbase.regionserver.Region.getReadpoint(IsolationLevel) Since 1.2.0. Use Region.getReadPoint(IsolationLevel) instead. - + org.apache.hadoop.hbase.MetaTableAccessor.getRegion(Connection, byte[]) use MetaTableAccessor.getRegionLocation(Connection, byte[]) instead - + org.apache.hadoop.hbase.regionserver.HRegion.getRegionDir(Path, HRegionInfo) For tests only; to be removed. - + org.apache.hadoop.hbase.regionserver.HRegion.getRegionDir(Path, String) For tests only; to be removed. - + org.apache.hadoop.hbase.TableName.getRowComparator() The comparator is an internal property of the table. Should not have been exposed here - + org.apache.hadoop.hbase.client.Table.getRpcTimeout() Use getReadRpcTimeout or getWriteRpcTimeout instead - + org.apache.hadoop.hbase.client.HTable.getRpcTimeout() - + org.apache.hadoop.hbase.client.HTableWrapper.getRpcTimeout() - + org.apache.hadoop.hbase.rest.client.RemoteHTable.getRpcTimeout() - + org.apache.hadoop.hbase.MetaTableAccessor.getScanForTableName(Connection, TableName) - + org.apache.hadoop.hbase.regionserver.StoreFileReader.getScanner(boolean, boolean) Do not write further code which depends on this call. Instead use getStoreFileScanner() which uses the StoreFileScanner class/interface which is the preferred way to scan a store with higher level concepts. - + org.apache.hadoop.hbase.regionserver.StoreFileReader.getScanner(boolean, boolean, boolean) Do not write further code which depends on this call. Instead use getStoreFileScanner() which uses the StoreFileScanner class/interface which is the preferred way to scan a store with higher level concepts. - + org.apache.hadoop.hbase.KeyValue.KVComparator.getShortMidpointKey(byte[], byte[]) Since 0.99.2; - + org.apache.hadoop.hbase.io.ImmutableBytesWritable.getSize() use ImmutableBytesWritable.getLength() instead - + org.apache.hadoop.hbase.util.Bytes.getSize() use Bytes.getLength() instead - + org.apache.hadoop.hbase.regionserver.HStore.getStoreHomedir(Path, HRegionInfo, byte[]) - + org.apache.hadoop.hbase.regionserver.HStore.getStoreHomedir(Path, String, byte[]) - + org.apache.hadoop.hbase.HTableDescriptor.getTableDir(Path, byte[]) - + org.apache.hadoop.hbase.client.HTableInterface.getTableName() Use Table.getName() instead - + org.apache.hadoop.hbase.util.ZKDataMigrator.getTableState(ZooKeeperWatcher, TableName) - + org.apache.hadoop.hbase.KeyValue.getType() - + org.apache.hadoop.hbase.io.hfile.HFileScanner.getValueString() Since hbase-2.0.0 - + org.apache.hadoop.hbase.client.Table.getWriteBufferSize() as of 1.0.1 (should not have been in 1.0.0). Replaced by BufferedMutator.getWriteBufferSize() - + org.apache.hadoop.hbase.client.HTableInterface.getWriteBufferSize() as of 1.0.0. Replaced by BufferedMutator.getWriteBufferSize() - + org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.initCredentialsForCluster(Job, String) Since 1.2.0, use TableMapReduceUtil.initCredentialsForCluster(Job, Configuration) instead. - + org.apache.hadoop.hbase.regionserver.HRegion.initialize() use HRegion.createHRegion() or HRegion.openHRegion() - + org.apache.hadoop.hbase.client.HTableInterface.isAutoFlush() as of 1.0.0. Replaced by BufferedMutator - + org.apache.hadoop.hbase.client.Get.isClosestRowBefore() since 2.0.0 and will be removed in 3.0.0 - + org.apache.hadoop.hbase.KeyValue.iscreate(InputStream) - + org.apache.hadoop.hbase.client.ClusterConnection.isDeadServer(ServerName) internal method, do not use thru ClusterConnection - + org.apache.hadoop.hbase.KeyValue.isDelete() - + org.apache.hadoop.hbase.master.cleaner.BaseLogCleanerDelegate.isLogDeletable(FileStatus) - + org.apache.hadoop.hbase.client.ClusterConnection.isMasterRunning() this has been deprecated without a replacement - + org.apache.hadoop.hbase.client.ConnectionImplementation.isMasterRunning() this has been deprecated without a replacement - + org.apache.hadoop.hbase.security.visibility.VisibilityClient.listLabels(Configuration, String) Use VisibilityClient.listLabels(Connection,String) instead. - + org.apache.hadoop.hbase.CellUtil.matchingRow(Cell, Cell) As of release 2.0.0, this will be
[32/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/book.html -- diff --git a/book.html b/book.html index 876f41c..2810c41 100644 --- a/book.html +++ b/book.html @@ -4,11 +4,11 @@ - + Apache HBase Reference Guide -https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.4.0/css/font-awesome.min.css;> +https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.2.0/css/font-awesome.min.css;> @@ -93,190 +93,191 @@ 42. Constraints 43. Schema Design Case Studies 44. Operational and Performance Configuration Options +45. Special Cases HBase and MapReduce -45. HBase, MapReduce, and the CLASSPATH -46. MapReduce Scan Caching -47. Bundled HBase MapReduce Jobs -48. HBase as a MapReduce Job Data Source and Data Sink -49. Writing HFiles Directly During Bulk Import -50. RowCounter Example -51. Map-Task Splitting -52. HBase MapReduce Examples -53. Accessing Other HBase Tables in a MapReduce Job -54. Speculative Execution -55. Cascading +46. HBase, MapReduce, and the CLASSPATH +47. MapReduce Scan Caching +48. Bundled HBase MapReduce Jobs +49. HBase as a MapReduce Job Data Source and Data Sink +50. Writing HFiles Directly During Bulk Import +51. RowCounter Example +52. Map-Task Splitting +53. HBase MapReduce Examples +54. Accessing Other HBase Tables in a MapReduce Job +55. Speculative Execution +56. Cascading Securing Apache HBase -56. Using Secure HTTP (HTTPS) for the Web UI -57. Using SPNEGO for Kerberos authentication with Web UIs -58. Secure Client Access to Apache HBase -59. Simple User Access to Apache HBase -60. Securing Access to HDFS and ZooKeeper -61. Securing Access To Your Data -62. Security Configuration Example +57. Using Secure HTTP (HTTPS) for the Web UI +58. Using SPNEGO for Kerberos authentication with Web UIs +59. Secure Client Access to Apache HBase +60. Simple User Access to Apache HBase +61. Securing Access to HDFS and ZooKeeper +62. Securing Access To Your Data +63. Security Configuration Example Architecture -63. Overview -64. Catalog Tables -65. Client -66. Client Request Filters -67. Master -68. RegionServer -69. Regions -70. Bulk Loading -71. HDFS -72. Timeline-consistent High Available Reads -73. Storing Medium-sized Objects (MOB) +64. Overview +65. Catalog Tables +66. Client +67. Client Request Filters +68. Master +69. RegionServer +70. Regions +71. Bulk Loading +72. HDFS +73. Timeline-consistent High Available Reads +74. Storing Medium-sized Objects (MOB) Apache HBase APIs -74. Examples +75. Examples Apache HBase External APIs -75. REST -76. Thrift -77. C/C++ Apache HBase Client -78. Using Java Data Objects (JDO) with HBase -79. Scala -80. Jython +76. REST +77. Thrift +78. C/C++ Apache HBase Client +79. Using Java Data Objects (JDO) with HBase +80. Scala +81. Jython Thrift API and Filter Language -81. Filter Language +82. Filter Language HBase and Spark -82. Basic Spark -83. Spark Streaming -84. Bulk Load -85. SparkSQL/DataFrames +83. Basic Spark +84. Spark Streaming +85. Bulk Load +86. SparkSQL/DataFrames Apache HBase Coprocessors -86. Coprocessor Overview -87. Types of Coprocessors -88. Loading Coprocessors -89. Examples -90. Guidelines For Deploying A Coprocessor -91. Monitor Time Spent in Coprocessors +87. Coprocessor Overview +88. Types of Coprocessors +89. Loading Coprocessors +90. Examples +91. Guidelines For Deploying A Coprocessor +92. Monitor Time Spent in Coprocessors Apache HBase Performance Tuning -92. Operating System -93. Network -94. Java -95. HBase Configurations -96. ZooKeeper -97. Schema Design -98. HBase General Patterns -99. Writing to HBase -100. Reading from HBase -101. Deleting from HBase -102. HDFS -103. Amazon EC2 -104. Collocating HBase and MapReduce -105. Case Studies +93. Operating System +94. Network +95. Java +96. HBase Configurations +97. ZooKeeper +98. Schema Design +99. HBase General Patterns +100. Writing to HBase +101. Reading from HBase +102. Deleting from HBase +103. HDFS +104. Amazon EC2 +105. Collocating HBase and MapReduce +106. Case Studies Troubleshooting and Debugging Apache HBase -106. General Guidelines -107. Logs -108. Resources -109. Tools -110. Client -111. MapReduce -112. NameNode -113. Network -114. RegionServer -115. Master -116. ZooKeeper -117. Amazon EC2 -118. HBase and Hadoop version issues -119. IPC Configuration Conflicts with Hadoop -120. HBase and HDFS -121. Running unit or integration tests -122. Case Studies -123. Cryptographic Features -124. Operating System Specific Issues -125. JDK Issues +107. General Guidelines +108. Logs +109. Resources +110. Tools +111. Client +112. MapReduce +113. NameNode +114. Network +115. RegionServer +116. Master +117. ZooKeeper +118. Amazon EC2 +119. HBase and Hadoop version issues +120. IPC Configuration Conflicts with Hadoop +121. HBase and HDFS +122. Running unit or integration tests +123. Case Studies
[08/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/BufferedMutator.ExceptionListener.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/BufferedMutator.ExceptionListener.html b/devapidocs/org/apache/hadoop/hbase/client/BufferedMutator.ExceptionListener.html index 7deca6c..1acadd1 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/BufferedMutator.ExceptionListener.html +++ b/devapidocs/org/apache/hadoop/hbase/client/BufferedMutator.ExceptionListener.html @@ -107,7 +107,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Public @InterfaceStability.Evolving -public static interface BufferedMutator.ExceptionListener +public static interface BufferedMutator.ExceptionListener Listens for asynchronous exceptions on a BufferedMutator. @@ -153,7 +153,7 @@ public static interface onException -voidonException(RetriesExhaustedWithDetailsExceptionexception, +voidonException(RetriesExhaustedWithDetailsExceptionexception, BufferedMutatormutator) throws RetriesExhaustedWithDetailsException http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/BufferedMutator.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/BufferedMutator.html b/devapidocs/org/apache/hadoop/hbase/client/BufferedMutator.html index 99260f2..8347580 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/BufferedMutator.html +++ b/devapidocs/org/apache/hadoop/hbase/client/BufferedMutator.html @@ -18,7 +18,7 @@ catch(err) { } //--> -var methods = {"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6}; +var methods = {"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6}; var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],4:["t3","Abstract Methods"]}; var altColor = "altColor"; var rowColor = "rowColor"; @@ -230,6 +230,18 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html Sends a Mutation to the table. + +void +setOperationTimeout(inttimeout) +Set operation timeout for this mutator instance + + + +void +setRpcTimeout(inttimeout) +Set rpc timeout for this mutator instance + + @@ -343,7 +355,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html - + getWriteBufferSize longgetWriteBufferSize() @@ -356,6 +368,26 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html + + + + + +setRpcTimeout +voidsetRpcTimeout(inttimeout) +Set rpc timeout for this mutator instance + + + + + + + +setOperationTimeout +voidsetOperationTimeout(inttimeout) +Set operation timeout for this mutator instance + + http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/BufferedMutatorImpl.QueueRowAccess.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/BufferedMutatorImpl.QueueRowAccess.html b/devapidocs/org/apache/hadoop/hbase/client/BufferedMutatorImpl.QueueRowAccess.html index 7ecf3ac..368253f 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/BufferedMutatorImpl.QueueRowAccess.html +++ b/devapidocs/org/apache/hadoop/hbase/client/BufferedMutatorImpl.QueueRowAccess.html @@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab"; -private class BufferedMutatorImpl.QueueRowAccess +private class BufferedMutatorImpl.QueueRowAccess extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object implements RowAccessRow @@ -231,7 +231,7 @@ implements remainder -privateint remainder +privateint remainder @@ -248,7 +248,7 @@ implements QueueRowAccess -privateQueueRowAccess() +privateQueueRowAccess() @@ -265,7 +265,7 @@ implements reset -voidreset() +voidreset() @@ -274,7 +274,7 @@ implements iterator -publichttp://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true; title="class or interface in java.util">IteratorRowiterator() +publichttp://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true; title="class or interface in java.util">IteratorRowiterator() Specified by: http://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true#iterator--; title="class or interface in java.lang">iteratorin interfacehttp://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true; title="class or interface in java.lang">IterableRow @@ -287,7 +287,7 @@ implements size -publicintsize() +publicintsize() Specified by: sizein interfaceRowAccessRow @@ -302,7 +302,7 @@ implements restoreRemainder -voidrestoreRemainder() +voidrestoreRemainder() @@ -311,7 +311,7 @@ implements isEmpty
[18/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html index fcd1bb3..2cf6bde 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html @@ -142,7 +142,7 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. -void +http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String LoadIncrementalHFiles.doBulkLoad(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listorg.apache.hadoop.fs.Pathmap, Adminadmin, Tabletable, http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html index 0afbdd7..89a1f85 100644 --- a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html +++ b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html @@ -306,164 +306,160 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. -org.apache.hadoop.hbase.mob.mapreduce - - - org.apache.hadoop.hbase.monitoring - + org.apache.hadoop.hbase.namespace - + org.apache.hadoop.hbase.nio - + org.apache.hadoop.hbase.procedure - + org.apache.hadoop.hbase.procedure.flush - + org.apache.hadoop.hbase.procedure2 - + org.apache.hadoop.hbase.procedure2.store - + org.apache.hadoop.hbase.procedure2.store.wal - + org.apache.hadoop.hbase.procedure2.util - + org.apache.hadoop.hbase.quotas - + org.apache.hadoop.hbase.regionserver - + org.apache.hadoop.hbase.regionserver.compactions - + org.apache.hadoop.hbase.regionserver.handler - + org.apache.hadoop.hbase.regionserver.querymatcher - + org.apache.hadoop.hbase.regionserver.snapshot - + org.apache.hadoop.hbase.regionserver.throttle - + org.apache.hadoop.hbase.regionserver.wal - + org.apache.hadoop.hbase.replication Multi Cluster Replication - + org.apache.hadoop.hbase.replication.master - + org.apache.hadoop.hbase.replication.regionserver - + org.apache.hadoop.hbase.rest HBase REST - + org.apache.hadoop.hbase.rest.filter - + org.apache.hadoop.hbase.rest.model - + org.apache.hadoop.hbase.rest.provider - + org.apache.hadoop.hbase.rest.provider.consumer - + org.apache.hadoop.hbase.rest.provider.producer - + org.apache.hadoop.hbase.rsgroup - + org.apache.hadoop.hbase.security - + org.apache.hadoop.hbase.security.access - + org.apache.hadoop.hbase.security.token - + org.apache.hadoop.hbase.security.visibility - + org.apache.hadoop.hbase.security.visibility.expression - + org.apache.hadoop.hbase.snapshot - + org.apache.hadoop.hbase.thrift Provides an HBase http://incubator.apache.org/thrift/;>Thrift service. - + org.apache.hadoop.hbase.thrift2 Provides an HBase http://thrift.apache.org/;>Thrift service. - + org.apache.hadoop.hbase.trace - + org.apache.hadoop.hbase.types @@ -471,51 +467,51 @@ service. extensible data type API. - + org.apache.hadoop.hbase.util - + org.apache.hadoop.hbase.util.byterange - + org.apache.hadoop.hbase.util.byterange.impl - + org.apache.hadoop.hbase.util.hbck - + org.apache.hadoop.hbase.util.test - + org.apache.hadoop.hbase.util.vint - + org.apache.hadoop.hbase.wal - + org.apache.hadoop.hbase.zookeeper - + org.apache.hadoop.hbase.zookeeper.lock - + org.apache.hadoop.metrics2.impl - + org.apache.hadoop.metrics2.lib - + org.apache.hadoop.metrics2.util @@ -1181,23 +1177,65 @@ service. (package private) class +AsyncConnectionConfiguration +Timeout configs. + + + +(package private) class +AsyncConnectionImpl +The implementation of AsyncConnection. + + + +(package private) class AsyncProcess This class allows a continuous flow of requests. +(package private) class +AsyncRegionLocator +TODO: reimplement using aync
[01/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
Repository: hbase-site Updated Branches: refs/heads/asf-site 344fa3264 -> 9d13f2d98 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/Scan.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/Scan.html b/devapidocs/org/apache/hadoop/hbase/client/Scan.html index 73c7ec3..64e7f6f 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/Scan.html +++ b/devapidocs/org/apache/hadoop/hbase/client/Scan.html @@ -18,7 +18,7 @@ catch(err) { } //--> -var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10}; +var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10}; var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]}; var altColor = "altColor"; var rowColor = "rowColor"; @@ -231,36 +231,32 @@ extends -private http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true; title="class or interface in java.lang">Boolean -loadColumnFamiliesOnDemand - - private static org.apache.commons.logging.Log LOG - + private long maxResultSize - + private int maxVersions - + private static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String RAW_ATTR - + private boolean reversed - + static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String SCAN_ATTRIBUTES_METRICS_DATA Deprecated. - + static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String SCAN_ATTRIBUTES_METRICS_ENABLE Deprecated. @@ -268,11 +264,11 @@ extends - + static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String SCAN_ATTRIBUTES_TABLE_NAME - + private boolean small Set it true for small scan to get better performance @@ -289,23 +285,23 @@ extends - + private byte[] startRow - + private byte[] stopRow - + private int storeLimit - + private int storeOffset - + private TimeRange tr @@ -315,7 +311,7 @@ extends Fields inherited from classorg.apache.hadoop.hbase.client.Query -colFamTimeRangeMap, consistency, filter, targetReplicaId +colFamTimeRangeMap, consistency, filter, loadColumnFamiliesOnDemand, targetReplicaId @@ -404,185 +400,173 @@ extends boolean -doLoadColumnFamiliesOnDemand() -Get the logical value indicating whether on-demand CF loading should be allowed. - - - -boolean getAllowPartialResults() - + int getBatch() - + boolean getCacheBlocks() Get whether blocks should be cached for this Scan. - + int getCaching() - + byte[][] getFamilies() - + http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/NavigableSet.html?is-external=true; title="class or interface in java.util">NavigableSetbyte[] getFamilyMap() Getting the familyMap - + Filter getFilter() - + http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object getFingerprint() Compile the table and column family (i.e. - -http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true; title="class or interface in java.lang">Boolean -getLoadColumnFamiliesOnDemandValue() -Get the raw loadColumnFamiliesOnDemand setting; if it's not set, can be null. - - - + long getMaxResultSize() - + int
hbase git commit: HBASE-16145 MultiRowRangeFilter constructor shouldn't throw IOException (Konstantin Ryakhovskiy)
Repository: hbase Updated Branches: refs/heads/master 0c304a049 -> 10840a51e HBASE-16145 MultiRowRangeFilter constructor shouldn't throw IOException (Konstantin Ryakhovskiy) Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/10840a51 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/10840a51 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/10840a51 Branch: refs/heads/master Commit: 10840a51e851d3ea50749a9527310142a967dab3 Parents: 0c304a0 Author: tedyuAuthored: Mon Oct 17 16:06:10 2016 -0700 Committer: tedyu Committed: Mon Oct 17 16:06:10 2016 -0700 -- .../apache/hadoop/hbase/filter/MultiRowRangeFilter.java | 12 ++-- .../apache/hadoop/hbase/rest/model/ScannerModel.java| 6 +- .../org/apache/hadoop/hbase/mapreduce/RowCounter.java | 7 +-- 3 files changed, 4 insertions(+), 21 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/10840a51/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java index 328498d..66556b0 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hbase.filter; -import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -64,11 +63,8 @@ public class MultiRowRangeFilter extends FilterBase { /** * @param list A list of RowRange - * @throws java.io.IOException - * throw an exception if the range list is not in an natural order or any - * RowRange is invalid */ - public MultiRowRangeFilter(List list) throws IOException { + public MultiRowRangeFilter(List list) { this.rangeList = sortAndMerge(list); } @@ -184,11 +180,7 @@ public class MultiRowRangeFilter extends FilterBase { rangeProto.getStopRow().toByteArray() : null, rangeProto.getStopRowInclusive()); rangeList.add(range); } -try { - return new MultiRowRangeFilter(rangeList); -} catch (IOException e) { - throw new DeserializationException("Fail to instantiate the MultiRowRangeFilter", e); -} +return new MultiRowRangeFilter(rangeList); } /** http://git-wip-us.apache.org/repos/asf/hbase/blob/10840a51/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java -- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java index a1d852e..024fec8 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java @@ -407,11 +407,7 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { filter = new MultipleColumnPrefixFilter(values); } break; case MultiRowRangeFilter: { -try { - filter = new MultiRowRangeFilter(ranges); -} catch (IOException e) { - throw new RuntimeException(e); -} +filter = new MultiRowRangeFilter(ranges); } break; case PageFilter: filter = new PageFilter(Long.parseLong(value)); http://git-wip-us.apache.org/repos/asf/hbase/blob/10840a51/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java index aca84fd..47651af 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java @@ -207,12 +207,7 @@ public class RowCounter extends Configured implements Tool { scan.setStartRow(range.getStartRow()); //inclusive scan.setStopRow(range.getStopRow()); //exclusive } else if (size > 1) { - try { -scan.setFilter(new MultiRowRangeFilter(rowRangeList)); - } catch (IOException e) { -//the IOException should never be thrown. see HBASE-16145 -throw new RuntimeException("Cannot instantiate MultiRowRangeFilter"); - } + scan.setFilter(new
[3/3] hbase git commit: HBASE-16653 Backport HBASE-11393 to branches which support namespace
HBASE-16653 Backport HBASE-11393 to branches which support namespace Signed-off-by: chenhengProject: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/66941910 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/66941910 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/66941910 Branch: refs/heads/branch-1 Commit: 66941910bd07462fe496c5bbb591f4071f77b8fb Parents: 6df7554 Author: Guanghao Zhang Authored: Mon Sep 26 19:33:43 2016 +0800 Committer: chenheng Committed: Tue Oct 18 09:12:47 2016 +0800 -- .../client/replication/ReplicationAdmin.java| 84 +- .../replication/ReplicationPeerConfig.java | 16 +- .../replication/ReplicationPeerZKImpl.java | 80 +- .../hbase/replication/ReplicationPeers.java | 15 +- .../replication/ReplicationPeersZKImpl.java | 60 +- .../replication/ReplicationSerDeHelper.java | 189 +++ .../replication/ReplicationStateZKBase.java | 17 + .../protobuf/generated/ZooKeeperProtos.java | 1155 +- .../src/main/protobuf/ZooKeeper.proto |8 +- .../org/apache/hadoop/hbase/master/HMaster.java |8 + .../replication/master/TableCFsUpdater.java | 120 ++ .../hbase/client/TestReplicaWithCluster.java|5 +- .../replication/TestReplicationAdmin.java | 193 +-- .../cleaner/TestReplicationHFileCleaner.java|2 +- .../replication/TestMasterReplication.java |9 +- .../replication/TestMultiSlaveReplication.java |8 +- .../replication/TestPerTableCFReplication.java | 153 ++- .../hbase/replication/TestReplicationBase.java |4 +- .../replication/TestReplicationSmallTests.java |4 +- .../replication/TestReplicationStateBasic.java | 20 +- .../replication/TestReplicationSyncUpTool.java |4 +- .../TestReplicationTrackerZKImpl.java | 10 +- .../replication/TestReplicationWithTags.java|4 +- .../replication/master/TestTableCFsUpdater.java | 164 +++ .../TestReplicationSourceManager.java |2 +- ...sibilityLabelReplicationWithExpAsString.java |5 +- .../TestVisibilityLabelsReplication.java|5 +- .../apache/hadoop/hbase/util/TestHBaseFsck.java |5 +- .../src/main/ruby/hbase/replication_admin.rb| 49 +- .../src/main/ruby/shell/commands/add_peer.rb|4 +- .../ruby/shell/commands/append_peer_tableCFs.rb |2 +- .../src/main/ruby/shell/commands/list_peers.rb |6 +- .../ruby/shell/commands/remove_peer_tableCFs.rb |4 +- .../ruby/shell/commands/set_peer_tableCFs.rb|4 +- .../hbase/client/TestReplicationShell.java |2 +- .../test/ruby/hbase/replication_admin_test.rb | 118 +- 36 files changed, 2167 insertions(+), 371 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/66941910/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java index 1304396..9fca28b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java @@ -55,6 +55,7 @@ import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; import org.apache.hadoop.hbase.replication.ReplicationPeerZKImpl; import org.apache.hadoop.hbase.replication.ReplicationPeers; import org.apache.hadoop.hbase.replication.ReplicationQueuesClient; +import org.apache.hadoop.hbase.replication.ReplicationSerDeHelper; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; @@ -184,8 +185,8 @@ public class ReplicationAdmin implements Closeable { @Deprecated public void addPeer(String id, String clusterKey, String tableCFs) throws ReplicationException { -this.replicationPeers.addPeer(id, - new ReplicationPeerConfig().setClusterKey(clusterKey), tableCFs); +this.addPeer(id, new ReplicationPeerConfig().setClusterKey(clusterKey), + parseTableCFsFromConfig(tableCFs)); } /** @@ -199,7 +200,19 @@ public class ReplicationAdmin implements Closeable { */ public void addPeer(String id, ReplicationPeerConfig peerConfig, Map tableCfs) throws ReplicationException { -this.replicationPeers.addPeer(id, peerConfig, getTableCfsStr(tableCfs)); +if (tableCfs != null) { + peerConfig.setTableCFsMap(tableCfs); +} +this.replicationPeers.addPeer(id, peerConfig); + } + + /** + * Add
[1/3] hbase git commit: HBASE-16653 Backport HBASE-11393 to branches which support namespace
Repository: hbase Updated Branches: refs/heads/branch-1 6df7554d2 -> 66941910b http://git-wip-us.apache.org/repos/asf/hbase/blob/66941910/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java index 2c9fc0f..dd15e4c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java @@ -22,10 +22,14 @@ package org.apache.hadoop.hbase.replication; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -37,6 +41,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; @@ -183,13 +188,13 @@ public class TestPerTableCFReplication { MaptabCFsMap = null; // 1. null or empty string, result should be null -tabCFsMap = ReplicationAdmin.parseTableCFsFromConfig(null); +tabCFsMap = ReplicationSerDeHelper.parseTableCFsFromConfig(null); assertEquals(null, tabCFsMap); -tabCFsMap = ReplicationAdmin.parseTableCFsFromConfig(""); +tabCFsMap = ReplicationSerDeHelper.parseTableCFsFromConfig(""); assertEquals(null, tabCFsMap); -tabCFsMap = ReplicationAdmin.parseTableCFsFromConfig(" "); +tabCFsMap = ReplicationSerDeHelper.parseTableCFsFromConfig(" "); assertEquals(null, tabCFsMap); TableName tab1 = TableName.valueOf("tab1"); @@ -197,20 +202,20 @@ public class TestPerTableCFReplication { TableName tab3 = TableName.valueOf("tab3"); // 2. single table: "tab1" / "tab2:cf1" / "tab3:cf1,cf3" -tabCFsMap = ReplicationAdmin.parseTableCFsFromConfig("tab1"); +tabCFsMap = ReplicationSerDeHelper.parseTableCFsFromConfig("tab1"); assertEquals(1, tabCFsMap.size()); // only one table assertTrue(tabCFsMap.containsKey(tab1)); // its table name is "tab1" assertFalse(tabCFsMap.containsKey(tab2)); // not other table assertEquals(null, tabCFsMap.get(tab1)); // null cf-list, -tabCFsMap = ReplicationAdmin.parseTableCFsFromConfig("tab2:cf1"); +tabCFsMap = ReplicationSerDeHelper.parseTableCFsFromConfig("tab2:cf1"); assertEquals(1, tabCFsMap.size()); // only one table assertTrue(tabCFsMap.containsKey(tab2)); // its table name is "tab2" assertFalse(tabCFsMap.containsKey(tab1)); // not other table assertEquals(1, tabCFsMap.get(tab2).size()); // cf-list contains only 1 cf assertEquals("cf1", tabCFsMap.get(tab2).get(0));// the only cf is "cf1" -tabCFsMap = ReplicationAdmin.parseTableCFsFromConfig("tab3 : cf1 , cf3"); +tabCFsMap = ReplicationSerDeHelper.parseTableCFsFromConfig("tab3 : cf1 , cf3"); assertEquals(1, tabCFsMap.size()); // only one table assertTrue(tabCFsMap.containsKey(tab3)); // its table name is "tab2" assertFalse(tabCFsMap.containsKey(tab1)); // not other table @@ -219,7 +224,7 @@ public class TestPerTableCFReplication { assertTrue(tabCFsMap.get(tab3).contains("cf3"));// contains "cf3" // 3. multiple tables: "tab1 ; tab2:cf1 ; tab3:cf1,cf3" -tabCFsMap = ReplicationAdmin.parseTableCFsFromConfig("tab1 ; tab2:cf1 ; tab3:cf1,cf3"); +tabCFsMap = ReplicationSerDeHelper.parseTableCFsFromConfig("tab1 ; tab2:cf1 ; tab3:cf1,cf3"); // 3.1 contains 3 tables : "tab1", "tab2" and "tab3" assertEquals(3, tabCFsMap.size()); assertTrue(tabCFsMap.containsKey(tab1)); @@ -237,7 +242,7 @@ public class TestPerTableCFReplication { // 4. contiguous or additional ";"(table delimiter) or ","(cf delimiter) can be tolerated // still use the example of multiple tables: "tab1 ; tab2:cf1 ; tab3:cf1,cf3" -tabCFsMap = ReplicationAdmin.parseTableCFsFromConfig( +tabCFsMap = ReplicationSerDeHelper.parseTableCFsFromConfig( "tab1 ; ; tab2:cf1 ; tab3:cf1,,cf3 ;"); // 4.1 contains 3 tables : "tab1", "tab2" and "tab3" assertEquals(3, tabCFsMap.size()); @@ -256,7 +261,7 @@ public class TestPerTableCFReplication {
[2/3] hbase git commit: HBASE-16653 Backport HBASE-11393 to branches which support namespace
http://git-wip-us.apache.org/repos/asf/hbase/blob/66941910/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java -- diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java index 09479c4..955995f 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java @@ -5032,6 +5032,719 @@ public final class ZooKeeperProtos { // @@protoc_insertion_point(class_scope:hbase.pb.Table) } + public interface TableCFOrBuilder + extends com.google.protobuf.MessageOrBuilder { + +// optional .hbase.pb.TableName table_name = 1; +/** + * optional .hbase.pb.TableName table_name = 1; + */ +boolean hasTableName(); +/** + * optional .hbase.pb.TableName table_name = 1; + */ +org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(); +/** + * optional .hbase.pb.TableName table_name = 1; + */ +org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); + +// repeated bytes families = 2; +/** + * repeated bytes families = 2; + */ +java.util.List getFamiliesList(); +/** + * repeated bytes families = 2; + */ +int getFamiliesCount(); +/** + * repeated bytes families = 2; + */ +com.google.protobuf.ByteString getFamilies(int index); + } + /** + * Protobuf type {@code hbase.pb.TableCF} + */ + public static final class TableCF extends + com.google.protobuf.GeneratedMessage + implements TableCFOrBuilder { +// Use TableCF.newBuilder() to construct. +private TableCF(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); +} +private TableCF(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + +private static final TableCF defaultInstance; +public static TableCF getDefaultInstance() { + return defaultInstance; +} + +public TableCF getDefaultInstanceForType() { + return defaultInstance; +} + +private final com.google.protobuf.UnknownFieldSet unknownFields; +@java.lang.Override +public final com.google.protobuf.UnknownFieldSet +getUnknownFields() { + return this.unknownFields; +} +private TableCF( +com.google.protobuf.CodedInputStream input, +com.google.protobuf.ExtensionRegistryLite extensionRegistry) +throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { +boolean done = false; +while (!done) { + int tag = input.readTag(); + switch (tag) { +case 0: + done = true; + break; +default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { +done = true; + } + break; +} +case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null; + if (((bitField0_ & 0x0001) == 0x0001)) { +subBuilder = tableName_.toBuilder(); + } + tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry); + if (subBuilder != null) { +subBuilder.mergeFrom(tableName_); +tableName_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x0001; + break; +} +case 18: { + if (!((mutable_bitField0_ & 0x0002) == 0x0002)) { +families_ = new java.util.ArrayList(); +mutable_bitField0_ |= 0x0002; + } + families_.add(input.readBytes()); + break; +} + } +} + } catch (com.google.protobuf.InvalidProtocolBufferException e) { +throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { +throw new com.google.protobuf.InvalidProtocolBufferException( +e.getMessage()).setUnfinishedMessage(this); + } finally { +if (((mutable_bitField0_ & 0x0002) == 0x0002)) { + families_ = java.util.Collections.unmodifiableList(families_); +} +this.unknownFields =
[1/2] hbase git commit: HBASE-16846 Procedure v2 - executor cleanup
Repository: hbase Updated Branches: refs/heads/master c8e9a295c -> c6e9dabe6 http://git-wip-us.apache.org/repos/asf/hbase/blob/c6e9dabe/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/util/TestDelayedUtil.java -- diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/util/TestDelayedUtil.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/util/TestDelayedUtil.java new file mode 100644 index 000..1e2db4d --- /dev/null +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/util/TestDelayedUtil.java @@ -0,0 +1,87 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.procedure2.util; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.testclassification.MasterTests; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import static org.junit.Assert.assertEquals; + +@Category({MasterTests.class, SmallTests.class}) +public class TestDelayedUtil { + private static final Log LOG = LogFactory.getLog(TestDelayedUtil.class); + + @Test + public void testDelayedContainerEquals() { +Object o1 = new Object(); +Object o2 = new Object(); +ZeroDelayContainer lnull = new ZeroDelayContainer(null); +ZeroDelayContainer l10a = new ZeroDelayContainer(10L); +ZeroDelayContainer l10b = new ZeroDelayContainer(10L); +ZeroDelayContainer l15 = new ZeroDelayContainer(15L); +ZeroDelayContainer onull = new ZeroDelayContainer(null); +ZeroDelayContainer o1ca = new ZeroDelayContainer(o1); +ZeroDelayContainer o1cb = new ZeroDelayContainer(o1); +ZeroDelayContainer o2c = new ZeroDelayContainer(o2); + +ZeroDelayContainer[] items = new ZeroDelayContainer[] { + lnull, l10a, l10b, l15, onull, o1ca, o1cb, o2c, +}; + +assertContainersEquals(lnull, items, lnull, onull); +assertContainersEquals(l10a, items, l10a, l10b); +assertContainersEquals(l10b, items, l10a, l10b); +assertContainersEquals(l15, items, l15); +assertContainersEquals(onull, items, lnull, onull); +assertContainersEquals(o1ca, items, o1ca, o1cb); +assertContainersEquals(o1cb, items, o1ca, o1cb); +assertContainersEquals(o2c, items, o2c); + } + + private void assertContainersEquals(final ZeroDelayContainer src, + final ZeroDelayContainer[] items, final ZeroDelayContainer... matches) { +for (int i = 0; i < items.length; ++i) { + boolean shouldMatch = false; + for (int j = 0; j < matches.length; ++j) { +if (items[i] == matches[j]) { + shouldMatch = true; + break; +} + } + boolean isMatching = src.equals(items[i]); + assertEquals(src.getObject() + " unexpectedly match " + items[i].getObject(), +shouldMatch, isMatching); +} + } + + private static class ZeroDelayContainer extends DelayedUtil.DelayedContainer { +public ZeroDelayContainer(final T object) { + super(object); +} + +@Override +public long getTimeoutTimestamp() { + return 0; +} + } +} http://git-wip-us.apache.org/repos/asf/hbase/blob/c6e9dabe/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index af5d03d..a8329e3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -1035,6 +1035,7 @@ public class HMaster extends HRegionServer implements MasterServices { procedureStore.registerListener(new MasterProcedureEnv.MasterProcedureStoreListener(this)); procedureExecutor = new ProcedureExecutor(conf, procEnv, procedureStore, procEnv.getProcedureQueue()); +configurationManager.registerObserver(procEnv); final int numThreads =
hbase git commit: HBASE-16860 BackupCopyService#Type refactoring (Vladimir Rodionov)
Repository: hbase Updated Branches: refs/heads/HBASE-7912 3834f411b -> 909c4efa8 HBASE-16860 BackupCopyService#Type refactoring (Vladimir Rodionov) Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/909c4efa Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/909c4efa Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/909c4efa Branch: refs/heads/HBASE-7912 Commit: 909c4efa8ee6aba1004634d099807bca8af4214d Parents: 3834f41 Author: tedyuAuthored: Mon Oct 17 11:04:15 2016 -0700 Committer: tedyu Committed: Mon Oct 17 11:04:15 2016 -0700 -- .../org/apache/hadoop/hbase/backup/BackupCopyService.java | 5 + .../hadoop/hbase/backup/impl/FullTableBackupClient.java | 2 +- .../hbase/backup/impl/IncrementalTableBackupClient.java | 2 +- .../hbase/backup/mapreduce/MapReduceBackupCopyService.java | 9 - 4 files changed, 7 insertions(+), 11 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/909c4efa/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupCopyService.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupCopyService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupCopyService.java index 26e20f1..6c70123 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupCopyService.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupCopyService.java @@ -29,9 +29,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; @InterfaceAudience.Private @InterfaceStability.Evolving public interface BackupCopyService extends Configurable { - static enum Type { -FULL, INCREMENTAL - } /** * Copy backup data @@ -44,7 +41,7 @@ public interface BackupCopyService extends Configurable { * @throws IOException */ public int copy(BackupInfo backupContext, BackupManager backupManager, Configuration conf, - BackupCopyService.Type copyType, String[] options) throws IOException; + BackupType copyType, String[] options) throws IOException; /** http://git-wip-us.apache.org/repos/asf/hbase/blob/909c4efa/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.java index 9355d07..5a8b941 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.java @@ -285,7 +285,7 @@ public class FullTableBackupClient { args[3] = backupContext.getBackupStatus(table).getTargetDir(); LOG.debug("Copy snapshot " + args[1] + " to " + args[3]); - res = copyService.copy(backupContext, backupManager, conf, BackupCopyService.Type.FULL, args); + res = copyService.copy(backupContext, backupManager, conf, BackupType.FULL, args); // if one snapshot export failed, do not continue for remained snapshots if (res != 0) { LOG.error("Exporting Snapshot " + args[1] + " failed with return code: " + res + "."); http://git-wip-us.apache.org/repos/asf/hbase/blob/909c4efa/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.java index d9610a2..0a8d14d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.java @@ -121,7 +121,7 @@ public class IncrementalTableBackupClient { // We run DistCp maximum 2 times // If it fails on a second time, we throw Exception int res = - copyService.copy(backupContext, backupManager, conf, BackupCopyService.Type.INCREMENTAL, + copyService.copy(backupContext, backupManager, conf, BackupType.INCREMENTAL, strArr); if (res != 0) { http://git-wip-us.apache.org/repos/asf/hbase/blob/909c4efa/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupCopyService.java -- diff --git
[09/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/AsyncTableImpl.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncTableImpl.html b/devapidocs/org/apache/hadoop/hbase/client/AsyncTableImpl.html new file mode 100644 index 000..2695dde --- /dev/null +++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncTableImpl.html @@ -0,0 +1,711 @@ +http://www.w3.org/TR/html4/loose.dtd;> + + + + + +AsyncTableImpl (Apache HBase 2.0.0-SNAPSHOT API) + + + + + +var methods = {"i0":9,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10}; +var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]}; +var altColor = "altColor"; +var rowColor = "rowColor"; +var tableTab = "tableTab"; +var activeTableTab = "activeTableTab"; + + +JavaScript is disabled on your browser. + + + + + +Skip navigation links + + + + +Overview +Package +Class +Use +Tree +Deprecated +Index +Help + + + + +PrevClass +NextClass + + +Frames +NoFrames + + +AllClasses + + + + + + + +Summary: +Nested| +Field| +Constr| +Method + + +Detail: +Field| +Constr| +Method + + + + + + + + +org.apache.hadoop.hbase.client +Class AsyncTableImpl + + + +http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">java.lang.Object + + +org.apache.hadoop.hbase.client.AsyncTableImpl + + + + + + + +All Implemented Interfaces: +AsyncTable + + + +@InterfaceAudience.Private +class AsyncTableImpl +extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object +implements AsyncTable +The implementation of AsyncTable. + + + + + + + + + + + +Nested Class Summary + +Nested Classes + +Modifier and Type +Class and Description + + +private static interface +AsyncTableImpl.ConverterD,I,S + + +private static interface +AsyncTableImpl.RpcCallRESP,REQ + + + + + + + + + +Field Summary + +Fields + +Modifier and Type +Field and Description + + +private AsyncConnectionImpl +conn + + +private long +operationTimeoutNs + + +private long +readRpcTimeoutNs + + +private TableName +tableName + + +private long +writeRpcTimeoutNs + + + + + + + + + +Constructor Summary + +Constructors + +Constructor and Description + + +AsyncTableImpl(AsyncConnectionImplconn, + TableNametableName) + + + + + + + + + +Method Summary + +All MethodsStatic MethodsInstance MethodsConcrete Methods + +Modifier and Type +Method and Description + + +private static REQ,PREQ,PRESP,RESPhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFutureRESP +call(HBaseRpcControllercontroller, +HRegionLocationloc, + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.Interfacestub, +REQreq, +AsyncTableImpl.ConverterPREQ,byte[],REQreqConvert, +AsyncTableImpl.RpcCallPRESP,PREQrpcCall, +AsyncTableImpl.ConverterRESP,HBaseRpcController,PRESPrespConverter) + + +http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true; title="class or interface in java.lang">Void +delete(Deletedelete) +Deletes the specified cells/row. + + + +http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true; title="class or interface in java.lang">Boolean +exists(Getget) +Test for the existence of columns in the table, as specified by the Get. + + + +http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFutureResult +get(Getget) +Extracts certain cells from a given row. + + + +org.apache.hadoop.conf.Configuration +getConfiguration() +Returns the Configuration object used by this instance. + + + +TableName +getName() +Gets the fully qualified table name instance of this table. + + + +long +getOperationTimeout(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
hbase git commit: HBASE-16721 Concurrency issue in WAL unflushed seqId tracking - ADDENDUM 2 for making sure that MVCC read point is advanced after flush
Repository: hbase Updated Branches: refs/heads/branch-1.1 ba6e7ddd3 -> 382f88ae8 HBASE-16721 Concurrency issue in WAL unflushed seqId tracking - ADDENDUM 2 for making sure that MVCC read point is advanced after flush Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/382f88ae Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/382f88ae Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/382f88ae Branch: refs/heads/branch-1.1 Commit: 382f88ae87490cf0860aa63e104bf7e8273827c4 Parents: ba6e7dd Author: Enis SoztutarAuthored: Mon Oct 17 14:30:01 2016 -0700 Committer: Enis Soztutar Committed: Mon Oct 17 14:30:01 2016 -0700 -- .../hadoop/hbase/regionserver/HRegion.java | 20 +++- 1 file changed, 11 insertions(+), 9 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/382f88ae/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index cc89b84..e600ae1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -2198,16 +2198,8 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi long trxId = 0; try { try { +mvcc.waitForPreviousTransactionsComplete(); writeEntry = mvcc.beginMemstoreInsert(); -// wait for all in-progress transactions to commit to WAL before -// we can start the flush. This prevents -// uncommitted transactions from being written into HFiles. -// We have to block before we start the flush, otherwise keys that -// were removed via a rollbackMemstore could be written to Hfiles. -mvcc.waitForPreviousTransactionsComplete(writeEntry); -// set w to null to prevent mvcc.advanceMemstore from being called again inside finally block -writeEntry = null; - if (wal != null) { Long earliestUnflushedSequenceIdForTheRegion = wal.startCacheFlush(encodedRegionName, flushedFamilyNames); @@ -2284,6 +2276,16 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi throw ioe; } } + + // wait for all in-progress transactions to commit to WAL before + // we can start the flush. This prevents + // uncommitted transactions from being written into HFiles. + // We have to block before we start the flush, otherwise keys that + // were removed via a rollbackMemstore could be written to Hfiles. + writeEntry.setWriteNumber(flushOpSeqId); + mvcc.waitForPreviousTransactionsComplete(writeEntry); + // set w to null to prevent mvcc.advanceMemstore from being called again inside finally block + writeEntry = null; } finally { if (writeEntry != null) { // in case of failure just mark current writeEntry as complete
hbase git commit: HBASE-16712 fix mvn hadoop-3.0 profile for mvn install
Repository: hbase Updated Branches: refs/heads/master 73e945670 -> 8e100628a HBASE-16712 fix mvn hadoop-3.0 profile for mvn install - Handles licenses for transitive dependencies introduced by Hadoop 3 - Forces dependency on hadoop 3.0.0-alpha1 instead of 3.0.0-SNAPSHOT Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8e100628 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8e100628 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8e100628 Branch: refs/heads/master Commit: 8e100628a45028365486e47858dad786cebb49b9 Parents: 73e9456 Author: Jonathan M HsiehAuthored: Mon Sep 26 12:15:35 2016 -0700 Committer: Jonathan M Hsieh Committed: Mon Oct 17 14:58:52 2016 -0700 -- .../src/main/resources/META-INF/LICENSE.vm | 4 +- .../src/main/resources/META-INF/NOTICE.vm | 23 ++ .../src/main/resources/supplemental-models.xml | 224 ++- pom.xml | 2 +- 4 files changed, 250 insertions(+), 3 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/8e100628/hbase-resource-bundle/src/main/resources/META-INF/LICENSE.vm -- diff --git a/hbase-resource-bundle/src/main/resources/META-INF/LICENSE.vm b/hbase-resource-bundle/src/main/resources/META-INF/LICENSE.vm index 31e508f..688310f 100644 --- a/hbase-resource-bundle/src/main/resources/META-INF/LICENSE.vm +++ b/hbase-resource-bundle/src/main/resources/META-INF/LICENSE.vm @@ -1561,7 +1561,9 @@ You can redistribute it and/or modify it under either the terms of the GPL ## Whitelist of licenses that it's safe to not aggregate as above. ## Note that this doesn't include ALv2 or the aforementioned aggregate ## license mentions. -#set($non_aggregate_fine = [ 'Public Domain', 'New BSD license', 'BSD license', 'Mozilla Public License Version 2.0' ]) +## +## See this FAQ link for justifications: https://www.apache.org/legal/resolved.html +#set($non_aggregate_fine = [ 'Public Domain', 'New BSD license', 'BSD license', 'Mozilla Public License Version 2.0', 'Creative Commons Attribution License, Version 2.5', 'MPL 1.1']) ## include LICENSE sections for anything not under ASL2.0 #foreach( ${dep} in ${projects} ) ## if there are no licenses we'll fail the build later, so http://git-wip-us.apache.org/repos/asf/hbase/blob/8e100628/hbase-resource-bundle/src/main/resources/META-INF/NOTICE.vm -- diff --git a/hbase-resource-bundle/src/main/resources/META-INF/NOTICE.vm b/hbase-resource-bundle/src/main/resources/META-INF/NOTICE.vm index 368e847..53fb51e 100644 --- a/hbase-resource-bundle/src/main/resources/META-INF/NOTICE.vm +++ b/hbase-resource-bundle/src/main/resources/META-INF/NOTICE.vm @@ -272,6 +272,29 @@ JRuby's licence. #if(${dep.artifactId.equals("jruby-complete")}) #set($jruby=true) #end +#if( ${dep.licenses.isEmpty()} ) +ERROR: This product includes ${dep.name} which has no licenses! +Revert the change if invalid or if intentional add license info to supplemental-models.xml + +${dep.groupId} +${dep.artifactId} +${dep.version} + +maven central search +g:${dep.groupId} AND a:${dep.artifactId} AND v:${dep.version} + +project website +${dep.url} +project source +${dep.scm.url} + + + +## fail the template. If you're looking at the source NOTICE.vm +## file based on a stacktrace or exception message, you need to find +## the generated NOTICE file that has the actual dependency info printed. +${dep.license[0]} +#end #if( !(${dep.licenses[0].name.contains("BSD")} || ${dep.licenses[0].name.contains("MIT")}) ) #if( !(${dep.licenses[0].name.equals("Public Domain")}) ) #if( !(${dep.licenses[0].name.contains("Apache Software License, Version 2.0")}) ) http://git-wip-us.apache.org/repos/asf/hbase/blob/8e100628/hbase-resource-bundle/src/main/resources/supplemental-models.xml -- diff --git a/hbase-resource-bundle/src/main/resources/supplemental-models.xml b/hbase-resource-bundle/src/main/resources/supplemental-models.xml index 0979b5f..982a087 100644 --- a/hbase-resource-bundle/src/main/resources/supplemental-models.xml +++ b/hbase-resource-bundle/src/main/resources/supplemental-models.xml @@ -644,6 +644,21 @@ under the License. + + org.apache.commons + commons-csv + 1.0 + + + Apache License, Version 2.0 + http://www.apache.org/licenses/LICENSE-2.0.txt + repo + + + + + + org.apache.commons commons-math @@ -714,6 +729,21 @@ under the License. + + org.apache.curator + curator-test +
hbase git commit: HBASE-16712 ADDENDUM fix whitespace
Repository: hbase Updated Branches: refs/heads/master 8e100628a -> e1a6c9403 HBASE-16712 ADDENDUM fix whitespace Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e1a6c940 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e1a6c940 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e1a6c940 Branch: refs/heads/master Commit: e1a6c94033bf4d986e72cd6109aac003b47e43d9 Parents: 8e10062 Author: Jonathan M HsiehAuthored: Mon Oct 17 15:03:13 2016 -0700 Committer: Jonathan M Hsieh Committed: Mon Oct 17 15:07:50 2016 -0700 -- hbase-resource-bundle/src/main/resources/supplemental-models.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/e1a6c940/hbase-resource-bundle/src/main/resources/supplemental-models.xml -- diff --git a/hbase-resource-bundle/src/main/resources/supplemental-models.xml b/hbase-resource-bundle/src/main/resources/supplemental-models.xml index 982a087..e1de3c7 100644 --- a/hbase-resource-bundle/src/main/resources/supplemental-models.xml +++ b/hbase-resource-bundle/src/main/resources/supplemental-models.xml @@ -1098,7 +1098,7 @@ under the License. - + io.dropwizard.metrics metrics-core @@ -1211,7 +1211,7 @@ Copyright 2006 Envoi Solutions LLC - +
[21/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html b/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html index 37c4991..35afe20 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html @@ -138,38 +138,42 @@ -org.apache.hadoop.hbase.procedure2.store.wal +org.apache.hadoop.hbase.procedure2 -org.apache.hadoop.hbase.quotas +org.apache.hadoop.hbase.procedure2.store.wal -org.apache.hadoop.hbase.regionserver +org.apache.hadoop.hbase.quotas -org.apache.hadoop.hbase.regionserver.wal +org.apache.hadoop.hbase.regionserver -org.apache.hadoop.hbase.rsgroup +org.apache.hadoop.hbase.regionserver.wal -org.apache.hadoop.hbase.security +org.apache.hadoop.hbase.rsgroup -org.apache.hadoop.hbase.security.visibility +org.apache.hadoop.hbase.security -org.apache.hadoop.hbase.snapshot +org.apache.hadoop.hbase.security.visibility +org.apache.hadoop.hbase.snapshot + + + org.apache.hadoop.hbase.util @@ -699,6 +703,24 @@ + + + +Uses of HBaseIOException in org.apache.hadoop.hbase.procedure2 + +Subclasses of HBaseIOException in org.apache.hadoop.hbase.procedure2 + +Modifier and Type +Class and Description + + + +class +BadProcedureException + + + + http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html index dc52d45..44e2d29 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html @@ -129,47 +129,43 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. -org.apache.hadoop.hbase.mob.mapreduce - - - org.apache.hadoop.hbase.regionserver - + org.apache.hadoop.hbase.replication Multi Cluster Replication - + org.apache.hadoop.hbase.rsgroup - + org.apache.hadoop.hbase.security - + org.apache.hadoop.hbase.security.access - + org.apache.hadoop.hbase.security.visibility - + org.apache.hadoop.hbase.snapshot - + org.apache.hadoop.hbase.thrift Provides an HBase http://incubator.apache.org/thrift/;>Thrift service. - + org.apache.hadoop.hbase.tool @@ -1445,68 +1441,6 @@ service. - - - -Uses of HColumnDescriptor in org.apache.hadoop.hbase.mob.mapreduce - -Fields in org.apache.hadoop.hbase.mob.mapreduce declared as HColumnDescriptor - -Modifier and Type -Field and Description - - - -private HColumnDescriptor -SweepReducer.family - - -private HColumnDescriptor -MemStoreWrapper.hcd - - - - -Methods in org.apache.hadoop.hbase.mob.mapreduce with parameters of type HColumnDescriptor - -Modifier and Type -Method and Description - - - -private void -SweepJob.removeUnusedFiles(org.apache.hadoop.mapreduce.Jobjob, - TableNametn, - HColumnDescriptorhcd) -Archives unused mob files. - - - -int -SweepJob.sweep(TableNametn, - HColumnDescriptorfamily) -Runs MapReduce to do the sweeping on the mob files. - - - - - -Constructors in org.apache.hadoop.hbase.mob.mapreduce with parameters of type HColumnDescriptor - -Constructor and Description - - - -MemStoreWrapper(org.apache.hadoop.mapreduce.Reducer.Contextcontext, - org.apache.hadoop.fs.FileSystemfs, - BufferedMutatortable, - HColumnDescriptorhcd, - MemStorememstore, - CacheConfigcacheConfig) - - - - http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html index 9c5db45..7504456 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html @@ -437,6 +437,30 @@ service. +default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFutureHRegionLocation +AsyncTableRegionLocator.getRegionLocation(byte[]row) +Finds the region on which the given row is being served. + + + +http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in
[25/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/KeyValue.RawBytesComparator.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/KeyValue.RawBytesComparator.html b/devapidocs/org/apache/hadoop/hbase/KeyValue.RawBytesComparator.html index f600bb9..9dd3cf6 100644 --- a/devapidocs/org/apache/hadoop/hbase/KeyValue.RawBytesComparator.html +++ b/devapidocs/org/apache/hadoop/hbase/KeyValue.RawBytesComparator.html @@ -126,7 +126,7 @@ var activeTableTab = "activeTableTab"; http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true; title="class or interface in java.lang">@Deprecated -public static class KeyValue.RawBytesComparator +public static class KeyValue.RawBytesComparator extends KeyValue.KVComparator @@ -250,7 +250,7 @@ extends RawBytesComparator -publicRawBytesComparator() +publicRawBytesComparator() Deprecated. @@ -268,7 +268,7 @@ extends getLegacyKeyComparatorName -publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringgetLegacyKeyComparatorName() +publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringgetLegacyKeyComparatorName() Deprecated. The HFileV2 file format's trailer contains this class name. We reinterpret this and instantiate the appropriate comparator. @@ -288,7 +288,7 @@ extends compareFlatKey http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true; title="class or interface in java.lang">@Deprecated -publicintcompareFlatKey(byte[]left, +publicintcompareFlatKey(byte[]left, intloffset, intllength, byte[]right, @@ -312,7 +312,7 @@ publicint compare -publicintcompare(Cellleft, +publicintcompare(Cellleft, Cellright) Deprecated. Description copied from class:KeyValue.KVComparator @@ -332,7 +332,7 @@ publicint compareOnlyKeyPortion -publicintcompareOnlyKeyPortion(Cellleft, +publicintcompareOnlyKeyPortion(Cellleft, Cellright) Deprecated. @@ -347,7 +347,7 @@ publicint calcIndexKey -publicbyte[]calcIndexKey(byte[]lastKeyOfPreviousBlock, +publicbyte[]calcIndexKey(byte[]lastKeyOfPreviousBlock, byte[]firstKeyInBlock) Deprecated. http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/KeyValue.RowOnlyComparator.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/KeyValue.RowOnlyComparator.html b/devapidocs/org/apache/hadoop/hbase/KeyValue.RowOnlyComparator.html index ddee06b..dfd9d4e 100644 --- a/devapidocs/org/apache/hadoop/hbase/KeyValue.RowOnlyComparator.html +++ b/devapidocs/org/apache/hadoop/hbase/KeyValue.RowOnlyComparator.html @@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab"; -public static class KeyValue.RowOnlyComparator +public static class KeyValue.RowOnlyComparator extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object implements http://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true; title="class or interface in java.util">ComparatorKeyValue Comparator that compares row component only of a KeyValue. @@ -215,7 +215,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/Comparato comparator -finalKeyValue.KVComparator comparator +finalKeyValue.KVComparator comparator @@ -232,7 +232,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/Comparato RowOnlyComparator -publicRowOnlyComparator(KeyValue.KVComparatorc) +publicRowOnlyComparator(KeyValue.KVComparatorc) @@ -249,7 +249,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/Comparato compare -publicintcompare(KeyValueleft, +publicintcompare(KeyValueleft, KeyValueright) Specified by: http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/KeyValue.SamePrefixComparator.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/KeyValue.SamePrefixComparator.html b/devapidocs/org/apache/hadoop/hbase/KeyValue.SamePrefixComparator.html index 14fa975..8f6c085 100644 --- a/devapidocs/org/apache/hadoop/hbase/KeyValue.SamePrefixComparator.html +++ b/devapidocs/org/apache/hadoop/hbase/KeyValue.SamePrefixComparator.html @@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab"; -public static interface KeyValue.SamePrefixComparatorT +public static interface KeyValue.SamePrefixComparatorT Avoids redundant comparisons for better
[49/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apache_hbase_reference_guide.pdf -- diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf index 41cd630..6603fac 100644 --- a/apache_hbase_reference_guide.pdf +++ b/apache_hbase_reference_guide.pdf @@ -1,30 +1,28 @@ %PDF-1.4 %���� 1 0 obj -<< /Title +<< /Title (Apache HBase Reference Guide) /Author (Apache HBase Team) -/Creator (Asciidoctor PDF 1.5.0.alpha.11, based on Prawn 1.3.0) +/Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1) /Producer (Apache HBase Team) -/CreationDate (D:20161009074353+00'00') -/ModDate (D:20161009074353+00'00') +/CreationDate (D:20161017144413+00'00') +/ModDate (D:20161017144413+00'00') >> endobj 2 0 obj << /Type /Catalog /Pages 3 0 R -/Names 24 0 R -/Outlines 4237 0 R -/PageLabels 4443 0 R +/Names 25 0 R +/Outlines 4048 0 R +/PageLabels 4255 0 R /PageMode /UseOutlines -/OpenAction [7 0 R /FitH 842.89] -/ViewerPreferences << /DisplayDocTitle true ->> +/ViewerPreferences [/FitWindow] >> endobj 3 0 obj << /Type /Pages -/Count 638 -/Kids [7 0 R 10 0 R 12 0 R 14 0 R 16 0 R 18 0 R 20 0 R 22 0 R 41 0 R 45 0 R 49 0 R 62 0 R 65 0 R 68 0 R 70 0 R 72 0 R 80 0 R 83 0 R 87 0 R 91 0 R 94 0 R 96 0 R 101 0 R 109 0 R 114 0 R 116 0 R 139 0 R 145 0 R 152 0 R 154 0 R 160 0 R 163 0 R 175 0 R 181 0 R 196 0 R 200 0 R 204 0 R 206 0 R 210 0 R 221 0 R 228 0 R 237 0 R 244 0 R 251 0 R 259 0 R 267 0 R 279 0 R 288 0 R 296 0 R 305 0 R 313 0 R 320 0 R 328 0 R 334 0 R 341 0 R 349 0 R 358 0 R 366 0 R 374 0 R 383 0 R 392 0 R 401 0 R 408 0 R 416 0 R 424 0 R 433 0 R 441 0 R 448 0 R 457 0 R 469 0 R 475 0 R 483 0 R 489 0 R 498 0 R 506 0 R 514 0 R 518 0 R 521 0 R 526 0 R 532 0 R 548 0 R 558 0 R 563 0 R 574 0 R 583 0 R 588 0 R 590 0 R 592 0 R 603 0 R 609 0 R 614 0 R 621 0 R 633 0 R 645 0 R 662 0 R 676 0 R 686 0 R 688 0 R 690 0 R 700 0 R 712 0 R 722 0 R 732 0 R 736 0 R 739 0 R 743 0 R 747 0 R 750 0 R 753 0 R 755 0 R 758 0 R 762 0 R 764 0 R 769 0 R 772 0 R 780 0 R 783 0 R 785 0 R 791 0 R 793 0 R 797 0 R 806 0 R 808 0 R 811 0 R 814 0 R 817 0 R 820 0 R 834 0 R 841 0 R 850 0 R 861 0 R 867 0 R 879 0 R 883 0 R 886 0 R 890 0 R 893 0 R 904 0 R 912 0 R 918 0 R 923 0 R 927 0 R 929 0 R 944 0 R 955 0 R 961 0 R 967 0 R 970 0 R 978 0 R 986 0 R 990 0 R 995 0 R 1000 0 R 1002 0 R 1004 0 R 1006 0 R 1017 0 R 1025 0 R 1029 0 R 1036 0 R 1044 0 R 1051 0 R 1055 0 R 1061 0 R 1066 0 R 1073 0 R 1079 0 R 1082 0 R 1085 0 R 1096 0 R 1100 0 R 1102 0 R 1105 0 R 1109 0 R 1114 0 R 1117 0 R 1129 0 R 1133 0 R 1139 0 R 1146 0 R 1152 0 R 1156 0 R 1160 0 R 1162 0 R 1165 0 R 1168 0 R 1171 0 R 1174 0 R 1178 0 R 1182 0 R 1187 0 R 1191 0 R 1194 0 R 1196 0 R 1206 0 R 1210 0 R 1218 0 R 1227 0 R 1233 0 R 1237 0 R 1239 0 R 1250 0 R 1253 0 R 1259 0 R 1267 0 R 1270 0 R 1277 0 R 1288 0 R 1290 0 R 1292 0 R 1301 0 R 1303 0 R 1305 0 R 1308 0 R 1310 0 R 1312 0 R 1314 0 R 1316 0 R 1319 0 R 1323 0 R 1328 0 R 1330 0 R 1332 0 R 1334 0 R 1339 0 R 1346 0 R 1351 0 R 1354 0 R 1356 0 R 1359 0 R 1363 0 R 1365 0 R 1368 0 R 1370 0 R 1373 0 R 1378 0 R 1383 0 R 1391 0 R 1396 0 R 1410 0 R 14 21 0 R 1425 0 R 1438 0 R 1447 0 R 1461 0 R 1467 0 R 1475 0 R 1490 0 R 1504 0 R 1515 0 R 1521 0 R 1528 0 R 1537 0 R 1543 0 R 1548 0 R 1557 0 R 1559 0 R 1568 0 R 1574 0 R 1577 0 R 1590 0 R 1592 0 R 1598 0 R 1604 0 R 1608 0 R 1616 0 R 1624 0 R 1628 0 R 1630 0 R 1632 0 R 1645 0 R 1652 0 R 1661 0 R 1666 0 R 1680 0 R 1690 0 R 1694 0 R 1707 0 R 1717 0 R 1722 0 R 1726 0 R 1731 0 R 1734 0 R 1741 0 R 1745 0 R 1753 0 R 1757 0 R 1762 0 R 1770 0 R 1775 0 R 1781 0 R 1790 0 R 1797 0 R 1803 0 R 1808 0 R 1812 0 R 1815 0 R 1820 0 R 1826 0 R 1832 0 R 1834 0 R 1836 0 R 1839 0 R 1847 0 R 1850 0 R 1857 0 R 1866 0 R 1869 0 R 1874 0 R 1878 0 R 1880 0 R 1885 0 R 1888 0 R 1890 0 R 1895 0 R 1905 0 R 1907 0 R 1909 0 R 1911 0 R 1914 0 R 1916 0 R 1918 0 R 1921 0 R 1923 0 R 1925 0 R 1931 0 R 1935 0 R 1944 0 R 1946 0 R 1948 0 R 1955 0 R 1957 0 R 1962 0 R 1964 0 R 1971 0 R 1976 0 R 1980 0 R 1984 0 R 1988 0 R 1990 0 R 1994 0 R 1997 0 R 1999 0 R 2001 0 R 2005 0 R 2007 0 R 2010 0 R 2012 0 R 2014 0 R 2016 0 R 2023 0 R 2026 0 R 2031 0 R 2033 0 R 2035 0 R 2037 0 R 2039 0 R 2047 0 R 2058 0 R 2073 0 R 2088 0 R 2094 0 R 2099 0 R 2103 0 R 2106 0 R 2111 0 R 2118 0 R 2120 0 R 2123 0 R 2125 0 R 2127 0 R 2129 0 R 2133 0 R 2135 0 R 2143 0 R 2150 0 R 2157 0 R 2169 0 R 2185 0 R 2197 0 R 2216 0 R 2218 0 R 2220 0 R 2224 0 R 2242 0 R 2248 0 R 2256 0 R 2264 0 R 2268 0 R 2277 0 R 2288 0 R 2294 0 R 2304 0 R 2317 0 R 2335 0 R 2343 0 R 2345 0 R 2354 0 R 2371 0 R 2378 0 R 2381 0 R 2386 0 R 2391 0 R 2400 0 R 2409 0 R 2412 0 R 2414 0 R 2418 0 R 2430 0 R 2439 0 R 2444 0 R 2448 0 R 2451 0 R 2453 0 R 2455 0 R 2457 0 R 2463 0 R 2476 0 R 2486 0 R 2495 0 R 2504 0 R 2510 0 R 2521 0 R 2528 0 R 2534 0 R 2536 0 R 2547 0 R 2555 0 R 2564 0 R 2569 0 R 2581 0 R 2585 0 R 2595 0 R 2603 0 R 2611 0 R 2617 0 R 2621 0 R 2625 0 R 2628
[14/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.html b/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.html index d91f532..3dfcedd 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.html +++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.html @@ -18,7 +18,7 @@ catch(err) { } //--> -var methods = {"i0":9,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":9,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10}; +var methods = {"i0":9,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":9,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10}; var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]}; var altColor = "altColor"; var rowColor = "rowColor"; @@ -49,7 +49,7 @@ var activeTableTab = "activeTableTab"; -PrevClass +PrevClass NextClass @@ -315,29 +315,37 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? numTries +protected int +operationTimeout + + protected long pause - + protected http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true; title="class or interface in java.util.concurrent">ExecutorService pool - + static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String PRIMARY_CALL_TIMEOUT_KEY - + protected long primaryCallTimeoutMicroseconds - + protected RpcRetryingCallerFactory rpcCallerFactory - + protected RpcControllerFactory rpcFactory + +protected int +rpcTimeout + protected int serverTrackerTimeout @@ -376,10 +384,6 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? protected int thresholdToLogUndoneTaskDetails - -protected int -timeout - @@ -395,13 +399,14 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? Constructor and Description -AsyncProcess(ClusterConnectionhc, +AsyncProcess(ClusterConnectionhc, org.apache.hadoop.conf.Configurationconf, http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true; title="class or interface in java.util.concurrent">ExecutorServicepool, RpcRetryingCallerFactoryrpcCaller, booleanuseGlobalErrors, RpcControllerFactoryrpcFactory, -intrpcTimeout) +intrpcTimeout, +intoperationTimeout) @@ -438,11 +443,12 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object[]results, booleanneedResults, CancellableRegionServerCallablecallable, -intcurTimeout) +intrpcTimeout) protected RpcRetryingCallerAbstractResponse -createCaller(CancellableRegionServerCallablecallable) +createCaller(CancellableRegionServerCallablecallable, +intrpcTimeout) Create a caller. @@ -495,6 +501,14 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? ActionRowaction) +void +setOperationTimeout(intoperationTimeout) + + +void +setRpcTimeout(intrpcTimeout) + + CResultAsyncRequestFuture submit(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true; title="class or interface in java.util.concurrent">ExecutorServicepool, TableNametableName, @@ -505,7 +519,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? See submit(ExecutorService, TableName, RowAccess, boolean, Batch.Callback, boolean). - + CResultAsyncRequestFuture submit(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true; title="class or interface in java.util.concurrent">ExecutorServicepool, TableNametableName, @@ -516,7 +530,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? Extract from the rows list what we can submit. - + CResultAsyncRequestFuture submit(TableNametableName, RowAccess? extends Rowrows, @@ -526,7 +540,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? See #submit(ExecutorService, TableName, RowAccess, boolean, Batch.Callback, boolean). - + CResultAsyncRequestFuture submitAll(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true; title="class or interface in
[34/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html index 07f6210..6d25806 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html @@ -370,881 +370,904 @@ 362 * @param regionLocator region locator 363 * @param silence true to ignore unmatched column families 364 * @param copyFile always copy hfiles if true -365 * @throws TableNotFoundException if table does not yet exist -366 */ -367 public void doBulkLoad(Mapbyte[], ListPath map, final Admin admin, Table table, -368 RegionLocator regionLocator, boolean silence, boolean copyFile) -369 throws TableNotFoundException, IOException { -370if (!admin.isTableAvailable(regionLocator.getName())) { -371 throw new TableNotFoundException("Table " + table.getName() + " is not currently available."); -372} -373// LQI queue does not need to be threadsafe -- all operations on this queue -374// happen in this thread -375DequeLoadQueueItem queue = new LinkedList(); -376ExecutorService pool = null; -377SecureBulkLoadClient secureClient = null; -378try { -379 prepareHFileQueue(map, table, queue, silence); -380 if (queue.isEmpty()) { -381LOG.warn("Bulk load operation did not get any files to load"); -382return; -383 } -384 pool = createExecutorService(); -385 secureClient = new SecureBulkLoadClient(table.getConfiguration(), table); -386 for (Map.Entrybyte[], ListPath entry : map.entrySet()) { -387for (Path p : entry.getValue()) { -388 fs = p.getFileSystem(table.getConfiguration()); -389 break; -390} -391 } -392 performBulkLoad(admin, table, regionLocator, queue, pool, secureClient, copyFile); -393} finally { -394 cleanup(admin, queue, pool, secureClient); -395} -396 } -397 -398 /** -399 * Perform a bulk load of the given directory into the given -400 * pre-existing table. This method is not threadsafe. -401 * -402 * @param hfofDir the directory that was provided as the output path -403 * of a job using HFileOutputFormat -404 * @param admin the Admin -405 * @param table the table to load into -406 * @param regionLocator region locator -407 * @param silence true to ignore unmatched column families -408 * @param copyFile always copy hfiles if true -409 * @throws TableNotFoundException if table does not yet exist -410 */ -411 public void doBulkLoad(Path hfofDir, final Admin admin, Table table, -412 RegionLocator regionLocator, boolean silence, boolean copyFile) -413 throws TableNotFoundException, IOException { -414if (!admin.isTableAvailable(regionLocator.getName())) { -415 throw new TableNotFoundException("Table " + table.getName() + " is not currently available."); -416} -417 -418/* -419 * Checking hfile format is a time-consuming operation, we should have an option to skip -420 * this step when bulkloading millions of HFiles. See HBASE-13985. -421 */ -422boolean validateHFile = getConf().getBoolean("hbase.loadincremental.validate.hfile", true); -423if (!validateHFile) { -424 LOG.warn("You are skipping HFiles validation, it might cause some data loss if files " + -425 "are not correct. If you fail to read data from your table after using this " + -426 "option, consider removing the files and bulkload again without this option. " + -427 "See HBASE-13985"); -428} -429// LQI queue does not need to be threadsafe -- all operations on this queue -430// happen in this thread -431DequeLoadQueueItem queue = new LinkedList(); -432ExecutorService pool = null; -433SecureBulkLoadClient secureClient = null; -434try { -435 prepareHFileQueue(hfofDir, table, queue, validateHFile, silence); -436 -437 if (queue.isEmpty()) { -438LOG.warn("Bulk load operation did not find any files to load in " + -439"directory " + hfofDir != null ? hfofDir.toUri() : "" + ". Does it contain files in " + -440"subdirectories that correspond to column family names?"); -441return; -442 } -443 pool = createExecutorService(); -444 secureClient = new SecureBulkLoadClient(table.getConfiguration(), table); -445 performBulkLoad(admin, table, regionLocator, queue, pool, secureClient, copyFile); -446} finally { -447 cleanup(admin, queue, pool, secureClient); -448} -449 } -450 -451 void performBulkLoad(final Admin admin, Table table,
[30/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/checkstyle-aggregate.html -- diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html index ee02ddd..ec2440d 100644 --- a/checkstyle-aggregate.html +++ b/checkstyle-aggregate.html @@ -7,7 +7,7 @@ - + Apache HBase Checkstyle Results @@ -280,10 +280,10 @@ Warnings Errors -2014 +2023 0 0 -13783 +13747 Files @@ -626,7 +626,7 @@ org/apache/hadoop/hbase/client/AsyncProcess.java 0 0 -10 +9 org/apache/hadoop/hbase/client/AsyncRequestFuture.java 0 @@ -636,82 +636,92 @@ org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java 0 0 -24 +22 -org/apache/hadoop/hbase/client/BatchErrors.java +org/apache/hadoop/hbase/client/AsyncSingleRequestRpcRetryingCaller.java 0 0 1 -org/apache/hadoop/hbase/client/BufferedMutator.java +org/apache/hadoop/hbase/client/BatchErrors.java 0 0 1 -org/apache/hadoop/hbase/client/BufferedMutatorImpl.java +org/apache/hadoop/hbase/client/BufferedMutator.java 0 0 -2 +1 +org/apache/hadoop/hbase/client/BufferedMutatorImpl.java +0 +0 +1 + org/apache/hadoop/hbase/client/CancellableRegionServerCallable.java 0 0 3 - + org/apache/hadoop/hbase/client/ClientAsyncPrefetchScanner.java 0 0 2 - + org/apache/hadoop/hbase/client/ClientIdGenerator.java 0 0 1 - + org/apache/hadoop/hbase/client/ClientScanner.java 0 0 206 - + org/apache/hadoop/hbase/client/ClientServiceCallable.java 0 0 2 - + org/apache/hadoop/hbase/client/ClientSimpleScanner.java 0 0 1 - + org/apache/hadoop/hbase/client/ClientSmallReversedScanner.java 0 0 4 - + org/apache/hadoop/hbase/client/ClientSmallScanner.java 0 0 6 - + org/apache/hadoop/hbase/client/ClusterStatusListener.java 0 0 1 - + org/apache/hadoop/hbase/client/CompactType.java 0 0 2 - + org/apache/hadoop/hbase/client/ConnectionConfiguration.java 0 0 1 + +org/apache/hadoop/hbase/client/ConnectionFactory.java +0 +0 +1 org/apache/hadoop/hbase/client/ConnectionImplementation.java 0 0 -8 +3 org/apache/hadoop/hbase/client/ConnectionUtils.java 0 @@ -761,7 +771,7 @@ org/apache/hadoop/hbase/client/HTableMultiplexer.java 0 0 -6 +5 org/apache/hadoop/hbase/client/HTableWrapper.java 0 @@ -926,7 +936,7 @@ org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java 0 0 -3 +2 org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java 0 @@ -2436,7 +2446,7 @@ org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java 0 0 -4 +7 org/apache/hadoop/hbase/mapreduce/MultiHFileOutputFormat.java 0 @@ -2686,7 +2696,7 @@ org/apache/hadoop/hbase/master/MasterCoprocessorHost.java 0 0 -10 +12 org/apache/hadoop/hbase/master/MasterDumpServlet.java 0 @@ -3006,7 +3016,7 @@ org/apache/hadoop/hbase/master/procedure/DispatchMergingRegionsProcedure.java 0 0 -57 +58 org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java 0 @@ -3021,12 +3031,12 @@ org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.java 0 0 -1 +2 org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java 0 0 -21 +17 org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java 0 @@ -3148,26 +3158,6 @@ 0 1 -org/apache/hadoop/hbase/mob/mapreduce/MemStoreWrapper.java -0 -0 -5 - -org/apache/hadoop/hbase/mob/mapreduce/SweepJob.java -0 -0 -8 - -org/apache/hadoop/hbase/mob/mapreduce/SweepReducer.java -0 -0 -9 - -org/apache/hadoop/hbase/mob/mapreduce/Sweeper.java -0 -0 -6 - org/apache/hadoop/hbase/monitoring/LogMonitoring.java 0 0 @@ -3298,20 +3288,30 @@ 0 10 +org/apache/hadoop/hbase/procedure2/AbstractProcedureScheduler.java +0 +0 +4 + +org/apache/hadoop/hbase/procedure2/BadProcedureException.java +0 +0 +1 + org/apache/hadoop/hbase/procedure2/Procedure.java 0 0 -3 +2 org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java 0 0 -11 +10 org/apache/hadoop/hbase/procedure2/ProcedureUtil.java 0 0 -2 +3 org/apache/hadoop/hbase/procedure2/RemoteProcedureException.java 0 @@ -3353,25 +3353,15 @@ 0 9 -org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPrettyPrinter.java -0 -0 -1 - org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java 0 0 -8 - +9 + org/apache/hadoop/hbase/procedure2/util/StringUtils.java 0 0 4 - -org/apache/hadoop/hbase/procedure2/util/TimeoutBlockingQueue.java -0 -0 -5 org/apache/hadoop/hbase/protobuf/ProtobufMagic.java 0 @@ -3416,3263 +3406,3268 @@ org/apache/hadoop/hbase/quotas/NoopQuotaLimiter.java 0 0 -1 +2 org/apache/hadoop/hbase/quotas/QuotaCache.java 0 0 2 -org/apache/hadoop/hbase/quotas/QuotaLimiterFactory.java +org/apache/hadoop/hbase/quotas/QuotaLimiter.java 0 0 1 +org/apache/hadoop/hbase/quotas/QuotaLimiterFactory.java +0 +0 +1 + org/apache/hadoop/hbase/quotas/QuotaRetriever.java 0 0 2 - + org/apache/hadoop/hbase/quotas/QuotaSettings.java 0 0 6 - +
[43/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/org/apache/hadoop/hbase/filter/FilterList.html -- diff --git a/apidocs/org/apache/hadoop/hbase/filter/FilterList.html b/apidocs/org/apache/hadoop/hbase/filter/FilterList.html index 8bf8c8b..2a50d5b 100644 --- a/apidocs/org/apache/hadoop/hbase/filter/FilterList.html +++ b/apidocs/org/apache/hadoop/hbase/filter/FilterList.html @@ -18,7 +18,7 @@ catch(err) { } //--> -var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":9,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10}; +var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":9,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10}; var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]}; var altColor = "altColor"; var rowColor = "rowColor"; @@ -103,12 +103,17 @@ var activeTableTab = "activeTableTab"; org.apache.hadoop.hbase.filter.Filter +org.apache.hadoop.hbase.filter.FilterBase + + org.apache.hadoop.hbase.filter.FilterList + + @@ -117,7 +122,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Public @InterfaceStability.Stable public final class FilterList -extends Filter +extends org.apache.hadoop.hbase.filter.FilterBase Implementation of Filter that represents an ordered List of Filters which will be evaluated with a specified boolean operator FilterList.Operator.MUST_PASS_ALL (AND) or FilterList.Operator.MUST_PASS_ONE (OR). @@ -242,117 +247,134 @@ extends +void +addFilter(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListFilterfilters) + + boolean filterAllRemaining() -If this returns true, the scan will terminate. +Filters that never filter all remaining can inherit this implementation that + never stops the filter early. - + Filter.ReturnCode filterKeyValue(Cellc) A way to filter based on the column family, column qualifier and/or the column value. - + boolean filterRow() -Last chance to veto row based on previous Filter.filterKeyValue(Cell) calls. +Filters that never filter by rows based on previously gathered state from + Filter.filterKeyValue(Cell) can inherit this implementation that + never filters a row. - + void filterRowCells(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListCellcells) Filters that never filter by modifying the returned List of Cells can inherit this implementation that does nothing. - + boolean filterRowKey(byte[]rowKey, intoffset, intlength) -Filters a row based on the row key. +Filters that do not filter by row key can inherit this implementation that + never filters anything. - + boolean filterRowKey(CellfirstRowCell) Filters a row based on the row key. - + http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListFilter getFilters() Get the filters. - + Cell getNextCellHint(CellcurrentCell) -If the filter returns the match code SEEK_NEXT_USING_HINT, then it should also tell which is - the next key it must seek to. +Filters that are not sure which key must be next seeked to, can inherit + this implementation that, by default, returns a null Cell. - + FilterList.Operator getOperator() Get the operator. - + boolean hasFilterRow() -Primarily used to check for conflicts with scans(such as scans that do not read a full row at a - time). +Fitlers that never filter by modifying the returned List of Cells can + inherit this implementation that does nothing. - + boolean isFamilyEssential(byte[]name) -Check that given column family is essential for filter to check row. +By default, we require all scan's column families to be present. - + static FilterList parseFrom(byte[]pbBytes) - + void reset() -Reset the state of the filter between rows. +Filters that are purely stateless and do nothing in their reset() methods can inherit + this null/empty implementation. - + void setReversed(booleanreversed) alter the reversed scan flag - + byte[] toByteArray() -TODO: JAVADOC - - Concrete implementers can signal a failure condition in their code by throwing an - http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException. +Return length 0 byte array for Filters that don't require special serialization - + http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String -toString() +toString() +Return filter's info for debugging and logging purpose. +
[40/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/src-html/org/apache/hadoop/hbase/client/ConnectionFactory.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/ConnectionFactory.html b/apidocs/src-html/org/apache/hadoop/hbase/client/ConnectionFactory.html index 01681a9..16beebf 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/client/ConnectionFactory.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/client/ConnectionFactory.html @@ -36,212 +36,265 @@ 028import org.apache.hadoop.hbase.classification.InterfaceStability; 029import org.apache.hadoop.hbase.security.User; 030import org.apache.hadoop.hbase.security.UserProvider; -031 +031import org.apache.hadoop.hbase.util.ReflectionUtils; 032 -033/** -034 * A non-instantiable class that manages creation of {@link Connection}s. -035 * Managing the lifecycle of the {@link Connection}s to the cluster is the responsibility of -036 * the caller. -037 * From a {@link Connection}, {@link Table} implementations are retrieved -038 * with {@link Connection#getTable(TableName)}. Example: -039 * pre -040 * Connection connection = ConnectionFactory.createConnection(config); -041 * Table table = connection.getTable(TableName.valueOf("table1")); -042 * try { -043 * // Use the table as needed, for a single operation and a single thread -044 * } finally { -045 * table.close(); -046 * connection.close(); -047 * } -048 * /pre -049 * -050 * Similarly, {@link Connection} also returns {@link Admin} and {@link RegionLocator} -051 * implementations. -052 * -053 * @see Connection -054 * @since 0.99.0 -055 */ -056@InterfaceAudience.Public -057@InterfaceStability.Evolving -058public class ConnectionFactory { -059 -060 /** No public c.tors */ -061 protected ConnectionFactory() { -062 } +033 +034/** +035 * A non-instantiable class that manages creation of {@link Connection}s. +036 * Managing the lifecycle of the {@link Connection}s to the cluster is the responsibility of +037 * the caller. +038 * From a {@link Connection}, {@link Table} implementations are retrieved +039 * with {@link Connection#getTable(TableName)}. Example: +040 * pre +041 * Connection connection = ConnectionFactory.createConnection(config); +042 * Table table = connection.getTable(TableName.valueOf("table1")); +043 * try { +044 * // Use the table as needed, for a single operation and a single thread +045 * } finally { +046 * table.close(); +047 * connection.close(); +048 * } +049 * /pre +050 * +051 * Similarly, {@link Connection} also returns {@link Admin} and {@link RegionLocator} +052 * implementations. +053 * +054 * @see Connection +055 * @since 0.99.0 +056 */ +057@InterfaceAudience.Public +058@InterfaceStability.Evolving +059public class ConnectionFactory { +060 +061 public static final String HBASE_CLIENT_ASYNC_CONNECTION_IMPL = +062 "hbase.client.async.connection.impl"; 063 -064 /** -065 * Create a new Connection instance using default HBaseConfiguration. Connection -066 * encapsulates all housekeeping for a connection to the cluster. All tables and interfaces -067 * created from returned connection share zookeeper connection, meta cache, and connections -068 * to region servers and masters. -069 * br -070 * The caller is responsible for calling {@link Connection#close()} on the returned -071 * connection instance. -072 * -073 * Typical usage: -074 * pre -075 * Connection connection = ConnectionFactory.createConnection(); -076 * Table table = connection.getTable(TableName.valueOf("mytable")); -077 * try { -078 * table.get(...); -079 * ... -080 * } finally { -081 * table.close(); -082 * connection.close(); -083 * } -084 * /pre -085 * -086 * @return Connection object for codeconf/code -087 */ -088 public static Connection createConnection() throws IOException { -089return createConnection(HBaseConfiguration.create(), null, null); -090 } -091 -092 /** -093 * Create a new Connection instance using the passed codeconf/code instance. Connection -094 * encapsulates all housekeeping for a connection to the cluster. All tables and interfaces -095 * created from returned connection share zookeeper connection, meta cache, and connections -096 * to region servers and masters. -097 * br -098 * The caller is responsible for calling {@link Connection#close()} on the returned -099 * connection instance. -100 * -101 * Typical usage: -102 * pre -103 * Connection connection = ConnectionFactory.createConnection(conf); -104 * Table table = connection.getTable(TableName.valueOf("mytable")); -105 * try { -106 * table.get(...); -107 * ... -108 * } finally { -109 * table.close(); -110 * connection.close(); -111 * } -112 * /pre -113 * -114 * @param conf configuration -115 * @return Connection object for codeconf/code -116 */ -117 public static
[37/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/src-html/org/apache/hadoop/hbase/client/Query.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Query.html b/apidocs/src-html/org/apache/hadoop/hbase/client/Query.html index 417e13a..13eb321 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/client/Query.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Query.html @@ -52,7 +52,7 @@ 044 protected int targetReplicaId = -1; 045 protected Consistency consistency = Consistency.STRONG; 046 protected Mapbyte[], TimeRange colFamTimeRangeMap = Maps.newTreeMap(Bytes.BYTES_COMPARATOR); -047 +047 protected Boolean loadColumnFamiliesOnDemand = null; 048 /** 049 * @return Filter 050 */ @@ -187,32 +187,67 @@ 179 IsolationLevel.fromBytes(attr); 180 } 181 -182 -183 /** -184 * Get versions of columns only within the specified timestamp range, -185 * [minStamp, maxStamp) on a per CF bases. Note, default maximum versions to return is 1. If -186 * your time range spans more than one version and you want all versions -187 * returned, up the number of versions beyond the default. -188 * Column Family time ranges take precedence over the global time range. -189 * -190 * @param cf the column family for which you want to restrict -191 * @param minStamp minimum timestamp value, inclusive -192 * @param maxStamp maximum timestamp value, exclusive -193 * @return this -194 */ -195 -196 public Query setColumnFamilyTimeRange(byte[] cf, long minStamp, long maxStamp) { -197colFamTimeRangeMap.put(cf, new TimeRange(minStamp, maxStamp)); -198return this; -199 } -200 -201 /** -202 * @return A map of column families to time ranges -203 */ -204 public Mapbyte[], TimeRange getColumnFamilyTimeRange() { -205return this.colFamTimeRangeMap; -206 } -207} +182 /** +183 * Set the value indicating whether loading CFs on demand should be allowed (cluster +184 * default is false). On-demand CF loading doesn't load column families until necessary, e.g. +185 * if you filter on one column, the other column family data will be loaded only for the rows +186 * that are included in result, not all rows like in normal case. +187 * With column-specific filters, like SingleColumnValueFilter w/filterIfMissing == true, +188 * this can deliver huge perf gains when there's a cf with lots of data; however, it can +189 * also lead to some inconsistent results, as follows: +190 * - if someone does a concurrent update to both column families in question you may get a row +191 * that never existed, e.g. for { rowKey = 5, { cat_videos =gt; 1 }, { video =gt; "my cat" } } +192 * someone puts rowKey 5 with { cat_videos =gt; 0 }, { video =gt; "my dog" }, concurrent scan +193 * filtering on "cat_videos == 1" can get { rowKey = 5, { cat_videos =gt; 1 }, +194 * { video =gt; "my dog" } }. +195 * - if there's a concurrent split and you have more than 2 column families, some rows may be +196 * missing some column families. +197 */ +198 public Query setLoadColumnFamiliesOnDemand(boolean value) { +199this.loadColumnFamiliesOnDemand = value; +200return this; +201 } +202 +203 /** +204 * Get the raw loadColumnFamiliesOnDemand setting; if it's not set, can be null. +205 */ +206 public Boolean getLoadColumnFamiliesOnDemandValue() { +207return this.loadColumnFamiliesOnDemand; +208 } +209 +210 /** +211 * Get the logical value indicating whether on-demand CF loading should be allowed. +212 */ +213 public boolean doLoadColumnFamiliesOnDemand() { +214return (this.loadColumnFamiliesOnDemand != null) +215 this.loadColumnFamiliesOnDemand; +216 } +217 +218 /** +219 * Get versions of columns only within the specified timestamp range, +220 * [minStamp, maxStamp) on a per CF bases. Note, default maximum versions to return is 1. If +221 * your time range spans more than one version and you want all versions +222 * returned, up the number of versions beyond the default. +223 * Column Family time ranges take precedence over the global time range. +224 * +225 * @param cf the column family for which you want to restrict +226 * @param minStamp minimum timestamp value, inclusive +227 * @param maxStamp maximum timestamp value, exclusive +228 * @return this +229 */ +230 +231 public Query setColumnFamilyTimeRange(byte[] cf, long minStamp, long maxStamp) { +232colFamTimeRangeMap.put(cf, new TimeRange(minStamp, maxStamp)); +233return this; +234 } +235 +236 /** +237 * @return A map of column families to time ranges +238 */ +239 public Mapbyte[], TimeRange getColumnFamilyTimeRange() { +240return this.colFamTimeRangeMap; +241 } +242}
[29/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/coc.html -- diff --git a/coc.html b/coc.html index cd135fe..a188015 100644 --- a/coc.html +++ b/coc.html @@ -7,7 +7,7 @@ - + Apache HBase Code of Conduct Policy @@ -331,7 +331,7 @@ For flagrant violations requiring a firm response the PMC may opt to skip early http://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2016-10-09 + Last Published: 2016-10-17 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/cygwin.html -- diff --git a/cygwin.html b/cygwin.html index 7a9259b..16df94a 100644 --- a/cygwin.html +++ b/cygwin.html @@ -7,7 +7,7 @@ - + Apache HBase Installing Apache HBase (TM) on Windows using Cygwin @@ -673,7 +673,7 @@ Now your HBase server is running, start coding and build that next http://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2016-10-09 + Last Published: 2016-10-17 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/dependencies.html -- diff --git a/dependencies.html b/dependencies.html index c4feba0..17060df 100644 --- a/dependencies.html +++ b/dependencies.html @@ -7,7 +7,7 @@ - + Apache HBase Project Dependencies @@ -518,7 +518,7 @@ http://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2016-10-09 + Last Published: 2016-10-17 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/dependency-convergence.html -- diff --git a/dependency-convergence.html b/dependency-convergence.html index 88f7d2a..d132935 100644 --- a/dependency-convergence.html +++ b/dependency-convergence.html @@ -7,7 +7,7 @@ - + Apache HBase Reactor Dependency Convergence @@ -1775,7 +1775,7 @@ http://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2016-10-09 + Last Published: 2016-10-17 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/dependency-info.html -- diff --git a/dependency-info.html b/dependency-info.html index dd228d3..a9ef8b6 100644 --- a/dependency-info.html +++ b/dependency-info.html @@ -7,7 +7,7 @@ - + Apache HBase Dependency Information @@ -312,7 +312,7 @@ http://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2016-10-09 + Last Published: 2016-10-17 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/dependency-management.html -- diff --git a/dependency-management.html b/dependency-management.html index 6e000ae..ddcb074 100644 --- a/dependency-management.html +++ b/dependency-management.html @@ -7,7 +7,7 @@ - + Apache HBase Project Dependency Management @@ -828,7 +828,7 @@ http://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2016-10-09 + Last Published: 2016-10-17 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/allclasses-frame.html -- diff --git a/devapidocs/allclasses-frame.html b/devapidocs/allclasses-frame.html index 756908b..0671430 100644 --- a/devapidocs/allclasses-frame.html +++ b/devapidocs/allclasses-frame.html @@ -29,6 +29,7 @@ AbstractMultiFileWriter.WriterFactory AbstractMultiOutputCompactor AbstractPositionedByteRange +AbstractProcedureScheduler AbstractProtobufLogWriter AbstractResponse AbstractResponse.ResponseType @@ -74,6 +75,9 @@ AssignmentManagerStatusTmpl.Intf AssignmentManagerStatusTmplImpl AssignmentVerificationReport +AsyncConnection
[19/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html index 166303b..9d9025d 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html @@ -177,85 +177,81 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. -org.apache.hadoop.hbase.mob.mapreduce - - - org.apache.hadoop.hbase.namespace - + org.apache.hadoop.hbase.procedure.flush - + org.apache.hadoop.hbase.quotas - + org.apache.hadoop.hbase.regionserver - + org.apache.hadoop.hbase.regionserver.wal - + org.apache.hadoop.hbase.replication Multi Cluster Replication - + org.apache.hadoop.hbase.replication.regionserver - + org.apache.hadoop.hbase.rest HBase REST - + org.apache.hadoop.hbase.rest.client - + org.apache.hadoop.hbase.rsgroup - + org.apache.hadoop.hbase.security.access - + org.apache.hadoop.hbase.security.visibility - + org.apache.hadoop.hbase.snapshot - + org.apache.hadoop.hbase.thrift Provides an HBase http://incubator.apache.org/thrift/;>Thrift service. - + org.apache.hadoop.hbase.tool - + org.apache.hadoop.hbase.util - + org.apache.hadoop.hbase.wal - + org.apache.hbase.archetypes.exemplars.client This package provides fully-functional exemplar Java code demonstrating @@ -263,7 +259,7 @@ service. archetype with hbase-client dependency. - + org.apache.hbase.archetypes.exemplars.shaded_client This package provides fully-functional exemplar Java code demonstrating @@ -783,8 +779,16 @@ service. private TableName +AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.tableName + + +private TableName AsyncRequestFutureImpl.tableName + +private TableName +AsyncTableImpl.tableName + protected TableName RpcRetryingCallerWithReadReplicas.tableName @@ -799,16 +803,24 @@ service. private TableName -TableState.tableName +AsyncTableRegionLocatorImpl.tableName private TableName -ClientScanner.tableName +TableState.tableName private TableName +ClientScanner.tableName + + +private TableName HTable.tableName + +private TableName +AsyncSingleRequestRpcRetryingCaller.tableName + @@ -851,10 +863,20 @@ service. TableName +AsyncTableRegionLocator.getName() +Gets the fully qualified table name instance of the table whose region we want to locate. + + + +TableName RegionLocator.getName() Gets the fully qualified table name instance of this table. + +TableName +AsyncTableImpl.getName() + TableName BufferedMutator.getName() @@ -867,6 +889,16 @@ service. TableName +AsyncTable.getName() +Gets the fully qualified table name instance of this table. + + + +TableName +AsyncTableRegionLocatorImpl.getName() + + +TableName HTable.getName() @@ -1215,7 +1247,7 @@ service. http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object[]results, booleanneedResults, CancellableRegionServerCallablecallable, -intcurTimeout) +intrpcTimeout) private MultiServerCallableRow @@ -1461,6 +1493,12 @@ service. ConnectionImplementation.getNumberOfCachedRegionLocations(TableNametableName) +(package private) http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFutureHRegionLocation +AsyncRegionLocator.getRegionLocation(TableNametableName, + byte[]row, + booleanreload) + + HRegionLocation ClusterConnection.getRegionLocation(TableNametableName, byte[]row, @@ -1468,13 +1506,13 @@ service. Find region location hosting passed row - + HRegionLocation ConnectionImplementation.getRegionLocation(TableNametableName, byte[]row, booleanreload) - + (package private) static RegionLocations RpcRetryingCallerWithReadReplicas.getRegionLocations(booleanuseCache, intreplicaId, @@ -1482,7 +1520,7 @@ service. TableNametableName, byte[]row) - + static RegionLocations RegionAdminServiceCallable.getRegionLocations(ClusterConnectionconnection, TableNametableName, @@ -1490,6 +1528,12 @@ service. booleanuseCache, intreplicaId) + +AsyncTableRegionLocator +AsyncConnection.getRegionLocator(TableNametableName) +Retrieve a AsyncRegionLocator implementation to inspect region information on a table. + + RegionLocator
[47/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/org/apache/hadoop/hbase/client/AsyncTable.html -- diff --git a/apidocs/org/apache/hadoop/hbase/client/AsyncTable.html b/apidocs/org/apache/hadoop/hbase/client/AsyncTable.html new file mode 100644 index 000..a02115c --- /dev/null +++ b/apidocs/org/apache/hadoop/hbase/client/AsyncTable.html @@ -0,0 +1,454 @@ +http://www.w3.org/TR/html4/loose.dtd;> + + + + + +AsyncTable (Apache HBase 2.0.0-SNAPSHOT API) + + + + + +var methods = {"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6}; +var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],4:["t3","Abstract Methods"]}; +var altColor = "altColor"; +var rowColor = "rowColor"; +var tableTab = "tableTab"; +var activeTableTab = "activeTableTab"; + + +JavaScript is disabled on your browser. + + + + + +Skip navigation links + + + + +Overview +Package +Class +Use +Tree +Deprecated +Index +Help + + + + +PrevClass +NextClass + + +Frames +NoFrames + + +AllClasses + + + + + + + +Summary: +Nested| +Field| +Constr| +Method + + +Detail: +Field| +Constr| +Method + + + + + + + + +org.apache.hadoop.hbase.client +Interface AsyncTable + + + + + + + +@InterfaceAudience.Public + @InterfaceStability.Unstable +public interface AsyncTable +The asynchronous version of Table. Obtain an instance from a AsyncConnection. + + The implementation is NOT required to be thread safe. Do NOT access it from multiple threads + concurrently. + + Usually the implementations will not throw any exception directly, you need to get the exception + from the returned http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFuture. + + + + + + + + + + + +Method Summary + +All MethodsInstance MethodsAbstract Methods + +Modifier and Type +Method and Description + + +http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true; title="class or interface in java.lang">Void +delete(Deletedelete) +Deletes the specified cells/row. + + + +http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true; title="class or interface in java.lang">Boolean +exists(Getget) +Test for the existence of columns in the table, as specified by the Get. + + + +http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFutureResult +get(Getget) +Extracts certain cells from a given row. + + + +org.apache.hadoop.conf.Configuration +getConfiguration() +Returns the Configuration object used by this instance. + + + +TableName +getName() +Gets the fully qualified table name instance of this table. + + + +long +getOperationTimeout(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true; title="class or interface in java.util.concurrent">TimeUnitunit) +Get timeout of each operation in Table instance. + + + +long +getReadRpcTimeout(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true; title="class or interface in java.util.concurrent">TimeUnitunit) +Get timeout of each rpc read request in this Table instance. + + + +long +getWriteRpcTimeout(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true; title="class or interface in java.util.concurrent">TimeUnitunit) +Get timeout of each rpc write request in this Table instance. + + + +http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true; title="class or interface in java.lang">Void +put(Putput) +Puts some data to the table. + + + +void +setOperationTimeout(longtimeout, + http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true; title="class or interface in java.util.concurrent">TimeUnitunit) +Set timeout of each operation in
[33/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/src-html/org/apache/hadoop/hbase/mob/mapreduce/Sweeper.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/mob/mapreduce/Sweeper.html b/apidocs/src-html/org/apache/hadoop/hbase/mob/mapreduce/Sweeper.html deleted file mode 100644 index 2c22213..000 --- a/apidocs/src-html/org/apache/hadoop/hbase/mob/mapreduce/Sweeper.html +++ /dev/null @@ -1,198 +0,0 @@ -http://www.w3.org/TR/html4/loose.dtd;> - - -Source code - - - - -001/** -002 * -003 * Licensed to the Apache Software Foundation (ASF) under one -004 * or more contributor license agreements. See the NOTICE file -005 * distributed with this work for additional information -006 * regarding copyright ownership. The ASF licenses this file -007 * to you under the Apache License, Version 2.0 (the -008 * "License"); you may not use this file except in compliance -009 * with the License. You may obtain a copy of the License at -010 * -011 * http://www.apache.org/licenses/LICENSE-2.0 -012 * -013 * Unless required by applicable law or agreed to in writing, software -014 * distributed under the License is distributed on an "AS IS" BASIS, -015 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -016 * See the License for the specific language governing permissions and -017 * limitations under the License. -018 */ -019package org.apache.hadoop.hbase.mob.mapreduce; -020 -021import java.io.IOException; -022 -023import org.apache.hadoop.conf.Configuration; -024import org.apache.hadoop.conf.Configured; -025import org.apache.hadoop.fs.FileSystem; -026import org.apache.hadoop.hbase.HBaseConfiguration; -027import org.apache.hadoop.hbase.HColumnDescriptor; -028import org.apache.hadoop.hbase.HTableDescriptor; -029import org.apache.hadoop.hbase.TableName; -030import org.apache.hadoop.hbase.classification.InterfaceAudience; -031import org.apache.hadoop.hbase.classification.InterfaceStability; -032import org.apache.hadoop.hbase.client.Admin; -033import org.apache.hadoop.hbase.client.Connection; -034import org.apache.hadoop.hbase.client.ConnectionFactory; -035import org.apache.hadoop.hbase.client.HBaseAdmin; -036import org.apache.hadoop.hbase.util.Bytes; -037import org.apache.hadoop.util.Tool; -038import org.apache.hadoop.util.ToolRunner; -039import org.apache.zookeeper.KeeperException; -040 -041/** -042 * The sweep tool. It deletes the mob files that are not used and merges the small mob files to -043 * bigger ones. Each run of this sweep tool only handles one column family. The runs on -044 * the same column family are mutually exclusive. And the major compaction and sweep tool on the -045 * same column family are mutually exclusive too. -046 */ -047@InterfaceAudience.Public -048@InterfaceStability.Evolving -049public class Sweeper extends Configured implements Tool { -050 -051 /** -052 * Sweeps the mob files on one column family. It deletes the unused mob files and merges -053 * the small mob files into bigger ones. -054 * @param tableName The current table name in string format. -055 * @param familyName The column family name. -056 * @return 0 if success, 2 if job aborted with an exception, 3 if unable to start due to -057 * other compaction,4 if mr job was unsuccessful -058 * @throws IOException -059 * @throws InterruptedException -060 * @throws ClassNotFoundException -061 * @throws KeeperException -062 * @throws ServiceException -063 */ -064 int sweepFamily(String tableName, String familyName) throws IOException, InterruptedException, -065 ClassNotFoundException, KeeperException { -066Configuration conf = getConf(); -067// make sure the target HBase exists. -068HBaseAdmin.available(conf); -069Connection connection = ConnectionFactory.createConnection(getConf()); -070Admin admin = connection.getAdmin(); -071try { -072 FileSystem fs = FileSystem.get(conf); -073 TableName tn = TableName.valueOf(tableName); -074 HTableDescriptor htd = admin.getTableDescriptor(tn); -075 HColumnDescriptor family = htd.getFamily(Bytes.toBytes(familyName)); -076 if (family == null || !family.isMobEnabled()) { -077 throw new IOException("Column family " + familyName + " is not a MOB column family"); -078 } -079 SweepJob job = new SweepJob(conf, fs); -080 // Run the sweeping -081 return job.sweep(tn, family); -082} catch (Exception e) { -083 System.err.println("Job aborted due to exception " + e); -084 return 2; // job failed -085} finally { -086 try { -087admin.close(); -088 } catch (IOException e) { -089System.out.println("Failed to close the HBaseAdmin: " + e.getMessage()); -090 } -091 try { -092connection.close(); -093 } catch (IOException e) { -094System.out.println("Failed to
[26/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/Abortable.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/Abortable.html b/devapidocs/org/apache/hadoop/hbase/Abortable.html index ceb5289..411d627 100644 --- a/devapidocs/org/apache/hadoop/hbase/Abortable.html +++ b/devapidocs/org/apache/hadoop/hbase/Abortable.html @@ -105,7 +105,7 @@ var activeTableTab = "activeTableTab"; All Known Implementing Classes: -ConnectionImplementation, ConnectionUtils.MasterlessConnection, CoprocessorHConnection, DumpReplicationQueues.WarnOnlyAbortable, HBaseAdmin, HBaseAdmin.ThrowableAbortable, HBaseInterClusterReplicationEndpoint, HBaseReplicationEndpoint, HMaster, HMasterCommandLine.LocalHMaster, HRegionServer, RegionReplicaReplicationEndpoint, ReplicationHFileCleaner.WarnOnlyAbortable, ReplicationLogCleaner.WarnOnlyAbortable, ReplicationPeerZKImpl, ReplicationSyncUp.DummyServer, SweepJob.DummyMobAbortable, ZooKeeperKeepAliveConnection, ZooKeeperWatcher +ConnectionImplementation, ConnectionUtils.MasterlessConnection, CoprocessorHConnection, DumpReplicationQueues.WarnOnlyAbortable, HBaseAdmin, HBaseAdmin.ThrowableAbortable, HBaseInterClusterReplicationEndpoint, HBaseReplicationEndpoint, HMaster, HMasterCommandLine.LocalHMaster, HRegionServer, RegionReplicaReplicationEndpoint, ReplicationHFileCleaner.WarnOnlyAbortable, ReplicationLogCleaner.WarnOnlyAbortable, ReplicationPeerZKImpl, ReplicationSyncUp.DummyServer, ZooKeeperKeepAliveConnection, ZooKeeperWatcher http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/HBaseIOException.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/HBaseIOException.html b/devapidocs/org/apache/hadoop/hbase/HBaseIOException.html index 6443399..d7cabbf 100644 --- a/devapidocs/org/apache/hadoop/hbase/HBaseIOException.html +++ b/devapidocs/org/apache/hadoop/hbase/HBaseIOException.html @@ -122,7 +122,7 @@ Direct Known Subclasses: -CallCancelledException, CallTimeoutException, CellScannerButNoCodecException, ClusterSchemaException, CodecException, CorruptedWALProcedureStoreException, DamagedWALException, DoNotRetryIOException, FailedServerException, FallbackDisallowedException, LeaseNotRecoveredException, PleaseHoldException, RegionException, ServiceNotRunningException, StoppedRpcClientException, TableInfoMissingException, UnexpectedStateException, WrongRowIOException +BadProcedureException, CallCancelledException, CallTimeoutException, CellScannerButNoCodecException, ClusterSchemaException, CodecException, CorruptedWALProcedureStoreException, DamagedWALException, DoNotRetryIOException, FailedServerException, FallbackDisallowedException, LeaseNotRecoveredException, PleaseHoldException, RegionException, ServiceNotRunningException, StoppedRpcClientException, TableInfoMissingException, UnexpectedStateException, WrongRowIOException http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/KeyValue.KVComparator.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/KeyValue.KVComparator.html b/devapidocs/org/apache/hadoop/hbase/KeyValue.KVComparator.html index 9af407e..769c286 100644 --- a/devapidocs/org/apache/hadoop/hbase/KeyValue.KVComparator.html +++ b/devapidocs/org/apache/hadoop/hbase/KeyValue.KVComparator.html @@ -125,7 +125,7 @@ var activeTableTab = "activeTableTab"; http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true; title="class or interface in java.lang">@Deprecated -public static class KeyValue.KVComparator +public static class KeyValue.KVComparator extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object implements org.apache.hadoop.io.RawComparatorCell, KeyValue.SamePrefixComparatorbyte[] Compare KeyValues. When we compare KeyValues, we only compare the Key @@ -446,7 +446,7 @@ implements org.apache.hadoop.io.RawComparator KVComparator -publicKVComparator() +publicKVComparator() Deprecated. @@ -464,7 +464,7 @@ implements org.apache.hadoop.io.RawComparator getLegacyKeyComparatorName -publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringgetLegacyKeyComparatorName() +publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringgetLegacyKeyComparatorName() Deprecated. The HFileV2 file format's trailer contains this class name. We reinterpret this and instantiate the appropriate comparator. @@ -481,7 +481,7 @@ implements org.apache.hadoop.io.RawComparator compare -publicintcompare(byte[]l,
[51/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14. Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/f07ee53f Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/f07ee53f Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/f07ee53f Branch: refs/heads/asf-site Commit: f07ee53f300b8457f08319a9e1b546b53a0a69a9 Parents: 344fa32 Author: jenkinsAuthored: Mon Oct 17 14:55:12 2016 + Committer: Dima Spivak Committed: Mon Oct 17 14:20:11 2016 -0700 -- acid-semantics.html | 4 +- apache_hbase_reference_guide.pdf| 722473 +--- apache_hbase_reference_guide.pdfmarks | 14 +- apidocs/allclasses-frame.html | 4 +- apidocs/allclasses-noframe.html | 4 +- apidocs/constant-values.html| 19 + apidocs/index-all.html |144 +- .../org/apache/hadoop/hbase/ProcedureInfo.html | 21 +- .../hadoop/hbase/class-use/HRegionLocation.html | 13 + .../hadoop/hbase/class-use/TableName.html | 26 +- .../hbase/class-use/TableNotFoundException.html | 2 +- .../org/apache/hadoop/hbase/client/Append.html | 4 +- .../hadoop/hbase/client/AsyncConnection.html|294 + .../apache/hadoop/hbase/client/AsyncTable.html |454 + .../hbase/client/AsyncTableRegionLocator.html |278 + .../apache/hadoop/hbase/client/Attributes.html | 4 +- .../BufferedMutator.ExceptionListener.html | 4 +- .../hadoop/hbase/client/BufferedMutator.html| 36 +- .../hadoop/hbase/client/ConnectionFactory.html |167 +- apidocs/org/apache/hadoop/hbase/client/Get.html |131 +- .../org/apache/hadoop/hbase/client/Query.html |111 +- .../org/apache/hadoop/hbase/client/Scan.html|211 +- .../hadoop/hbase/client/class-use/Admin.html| 2 +- .../hbase/client/class-use/AsyncConnection.html |183 + .../hbase/client/class-use/AsyncTable.html |169 + .../class-use/AsyncTableRegionLocator.html |169 + .../hadoop/hbase/client/class-use/Delete.html | 6 + .../hadoop/hbase/client/class-use/Get.html | 28 +- .../hadoop/hbase/client/class-use/Put.html | 8 +- .../hadoop/hbase/client/class-use/Query.html| 7 + .../hbase/client/class-use/RegionLocator.html | 2 +- .../hadoop/hbase/client/class-use/Result.html | 15 + .../hadoop/hbase/client/class-use/Scan.html | 5 +- .../hadoop/hbase/client/class-use/Table.html| 4 +- .../hadoop/hbase/client/package-frame.html | 3 + .../hadoop/hbase/client/package-summary.html| 34 +- .../hadoop/hbase/client/package-tree.html | 3 + .../apache/hadoop/hbase/client/package-use.html | 81 +- .../org/apache/hadoop/hbase/filter/Filter.html | 2 +- .../apache/hadoop/hbase/filter/FilterList.html |262 +- .../hadoop/hbase/filter/class-use/Filter.html | 6 +- .../hadoop/hbase/filter/package-tree.html | 2 +- .../hbase/mapreduce/LoadIncrementalHFiles.html | 68 +- .../hbase/mob/compactions/package-summary.html | 4 +- .../hbase/mob/compactions/package-tree.html | 4 +- .../hadoop/hbase/mob/mapreduce/Sweeper.html |333 - .../hbase/mob/mapreduce/class-use/Sweeper.html |125 - .../hbase/mob/mapreduce/package-frame.html | 20 - .../hbase/mob/mapreduce/package-summary.html|145 - .../hbase/mob/mapreduce/package-tree.html |142 - .../hadoop/hbase/mob/mapreduce/package-use.html |125 - .../hadoop/hbase/namespace/package-summary.html | 4 +- .../hadoop/hbase/namespace/package-tree.html| 4 +- .../hadoop/hbase/security/class-use/User.html | 9 +- .../org/apache/hadoop/hbase/util/Counter.html | 36 +- .../hadoop/hbase/util/class-use/Pair.html | 11 +- apidocs/overview-frame.html | 1 - apidocs/overview-summary.html | 52 +- apidocs/overview-tree.html | 8 +- apidocs/package-list| 1 - .../org/apache/hadoop/hbase/ProcedureInfo.html | 2 +- .../org/apache/hadoop/hbase/client/Admin.html | 2 +- .../hadoop/hbase/client/AsyncConnection.html|134 + .../apache/hadoop/hbase/client/AsyncTable.html |198 + .../hbase/client/AsyncTableRegionLocator.html |132 + .../BufferedMutator.ExceptionListener.html | 26 +- .../hadoop/hbase/client/BufferedMutator.html| 26 +- .../hadoop/hbase/client/ConnectionFactory.html |461 +- .../org/apache/hadoop/hbase/client/Get.html |783 +- ...ableMultiplexer.HTableMultiplexerStatus.html |410 +- .../hadoop/hbase/client/HTableMultiplexer.html |410 +-
[15/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.RowCheckerHost.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.RowCheckerHost.html b/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.RowCheckerHost.html index a8ade35..3087937 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.RowCheckerHost.html +++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.RowCheckerHost.html @@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab"; -static class AsyncProcess.RowCheckerHost +static class AsyncProcess.RowCheckerHost extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object Collect all advices from checkers and make the final decision. @@ -211,7 +211,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? checkers -private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListAsyncProcess.RowChecker checkers +private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListAsyncProcess.RowChecker checkers @@ -220,7 +220,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? isEnd -privateboolean isEnd +privateboolean isEnd @@ -237,7 +237,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? RowCheckerHost -RowCheckerHost(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListAsyncProcess.RowCheckercheckers) +RowCheckerHost(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListAsyncProcess.RowCheckercheckers) @@ -254,7 +254,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? reset -voidreset() +voidreset() throws http://docs.oracle.com/javase/8/docs/api/java/io/InterruptedIOException.html?is-external=true; title="class or interface in java.io">InterruptedIOException Throws: @@ -268,7 +268,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? canTakeOperation -AsyncProcess.RowChecker.ReturnCodecanTakeOperation(HRegionLocationloc, +AsyncProcess.RowChecker.ReturnCodecanTakeOperation(HRegionLocationloc, longrowSize) http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.SubmittedSizeChecker.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.SubmittedSizeChecker.html b/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.SubmittedSizeChecker.html index e29304a..b6ea032 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.SubmittedSizeChecker.html +++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.SubmittedSizeChecker.html @@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab"; -static class AsyncProcess.SubmittedSizeChecker +static class AsyncProcess.SubmittedSizeChecker extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object implements AsyncProcess.RowChecker limit the heapsize of total submitted data. @@ -243,7 +243,7 @@ implements maxHeapSizeSubmit -private finallong maxHeapSizeSubmit +private finallong maxHeapSizeSubmit @@ -252,7 +252,7 @@ implements heapSize -privatelong heapSize +privatelong heapSize @@ -269,7 +269,7 @@ implements SubmittedSizeChecker -SubmittedSizeChecker(longmaxHeapSizeSubmit) +SubmittedSizeChecker(longmaxHeapSizeSubmit) @@ -286,7 +286,7 @@ implements canTakeOperation -publicAsyncProcess.RowChecker.ReturnCodecanTakeOperation(HRegionLocationloc, +publicAsyncProcess.RowChecker.ReturnCodecanTakeOperation(HRegionLocationloc, longrowSize) Specified by: @@ -300,7 +300,7 @@ implements notifyFinal -publicvoidnotifyFinal(AsyncProcess.RowChecker.ReturnCodecode, +publicvoidnotifyFinal(AsyncProcess.RowChecker.ReturnCodecode, HRegionLocationloc, longrowSize) Description copied from interface:AsyncProcess.RowChecker @@ -319,7 +319,7 @@ implements reset -publicvoidreset() +publicvoidreset() Description copied from interface:AsyncProcess.RowChecker Reset the inner state. http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.TaskCountChecker.html -- diff --git
[48/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apache_hbase_reference_guide.pdfmarks -- diff --git a/apache_hbase_reference_guide.pdfmarks b/apache_hbase_reference_guide.pdfmarks index 43c257d..1aa9061 100644 --- a/apache_hbase_reference_guide.pdfmarks +++ b/apache_hbase_reference_guide.pdfmarks @@ -1,9 +1,9 @@ -[ /Title +[ /Title (Apache HBase Reference Guide) /Author (Apache HBase Team) - /Subject null - /Keywords null - /ModDate (D:20161009074600) - /CreationDate (D:20161009074600) - /Creator (Asciidoctor PDF 1.5.0.alpha.11, based on Prawn 1.3.0) - /Producer null + /Subject () + /Keywords () + /ModDate (D:20161017144547) + /CreationDate (D:20161017144547) + /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1) + /Producer () /DOCINFO pdfmark http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/allclasses-frame.html -- diff --git a/apidocs/allclasses-frame.html b/apidocs/allclasses-frame.html index 492f79c..cb4fc54 100644 --- a/apidocs/allclasses-frame.html +++ b/apidocs/allclasses-frame.html @@ -15,6 +15,9 @@ AccessDeniedException Admin Append +AsyncConnection +AsyncTable +AsyncTableRegionLocator Attributes AuthUtil BadAuthException @@ -306,7 +309,6 @@ StructBuilder StructIterator SubstringComparator -Sweeper Table TableExistsException TableInfoMissingException http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/allclasses-noframe.html -- diff --git a/apidocs/allclasses-noframe.html b/apidocs/allclasses-noframe.html index 0efd518..053863f 100644 --- a/apidocs/allclasses-noframe.html +++ b/apidocs/allclasses-noframe.html @@ -15,6 +15,9 @@ AccessDeniedException Admin Append +AsyncConnection +AsyncTable +AsyncTableRegionLocator Attributes AuthUtil BadAuthException @@ -306,7 +309,6 @@ StructBuilder StructIterator SubstringComparator -Sweeper Table TableExistsException TableInfoMissingException http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/constant-values.html -- diff --git a/apidocs/constant-values.html b/apidocs/constant-values.html index 07faf6f..54bf2e4 100644 --- a/apidocs/constant-values.html +++ b/apidocs/constant-values.html @@ -2757,6 +2757,25 @@ +org.apache.hadoop.hbase.client.ConnectionFactory + +Modifier and Type +Constant Field +Value + + + + + +publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String +HBASE_CLIENT_ASYNC_CONNECTION_IMPL +"hbase.client.async.connection.impl" + + + + + + org.apache.hadoop.hbase.client.HTableMultiplexer Modifier and Type http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/index-all.html -- diff --git a/apidocs/index-all.html b/apidocs/index-all.html index 0a9fd51..da0c7c8 100644 --- a/apidocs/index-all.html +++ b/apidocs/index-all.html @@ -307,6 +307,8 @@ Delete all columns of the specified family with a timestamp equal to the specified timestamp. +addFilter(ListFilter) - Method in class org.apache.hadoop.hbase.filter.FilterList + addFilter(Filter) - Method in class org.apache.hadoop.hbase.filter.FilterList Add a filter. @@ -487,6 +489,18 @@ assign(byte[]) - Method in interface org.apache.hadoop.hbase.client.Admin +AsyncConnection - Interface in org.apache.hadoop.hbase.client + +The asynchronous version of Connection. + +AsyncTable - Interface in org.apache.hadoop.hbase.client + +The asynchronous version of Table. + +AsyncTableRegionLocator - Interface in org.apache.hadoop.hbase.client + +The asynchronous version of RegionLocator. + ATTRIBUTE_SEPERATOR_CONF_KEY - Static variable in class org.apache.hadoop.hbase.mapreduce.ImportTsv Attributes - Interface in org.apache.hadoop.hbase.client @@ -1897,6 +1911,19 @@ CREATE_TABLE_CONF_KEY - Static variable in class org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles +createAsyncConnection() - Static method in class org.apache.hadoop.hbase.client.ConnectionFactory + +Call ConnectionFactory.createAsyncConnection(Configuration) using default HBaseConfiguration. + +createAsyncConnection(Configuration) - Static method in class org.apache.hadoop.hbase.client.ConnectionFactory + +Call ConnectionFactory.createAsyncConnection(Configuration, User) using the given conf and a + User object created by UserProvider. + +createAsyncConnection(Configuration, User) - Static method in class org.apache.hadoop.hbase.client.ConnectionFactory + +Create a new AsyncConnection instance using the passed conf and user. + createCell(byte[], byte[], byte[], long, byte, byte[]) - Static method in class
[45/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/org/apache/hadoop/hbase/client/Scan.html -- diff --git a/apidocs/org/apache/hadoop/hbase/client/Scan.html b/apidocs/org/apache/hadoop/hbase/client/Scan.html index 92fb615..3e29526 100644 --- a/apidocs/org/apache/hadoop/hbase/client/Scan.html +++ b/apidocs/org/apache/hadoop/hbase/client/Scan.html @@ -18,7 +18,7 @@ catch(err) { } //--> -var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10}; +var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10}; var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]}; var altColor = "altColor"; var rowColor = "rowColor"; @@ -220,7 +220,7 @@ extends Fields inherited from classorg.apache.hadoop.hbase.client.Query -colFamTimeRangeMap, consistency, filter, targetReplicaId +colFamTimeRangeMap, consistency, filter, loadColumnFamiliesOnDemand, targetReplicaId @@ -302,181 +302,169 @@ extends boolean -doLoadColumnFamiliesOnDemand() -Get the logical value indicating whether on-demand CF loading should be allowed. - - - -boolean getAllowPartialResults() - + int getBatch() - + boolean getCacheBlocks() Get whether blocks should be cached for this Scan. - + int getCaching() - + byte[][] getFamilies() - + http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/NavigableSet.html?is-external=true; title="class or interface in java.util">NavigableSetbyte[] getFamilyMap() Getting the familyMap - + Filter getFilter() - + http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object getFingerprint() Compile the table and column family (i.e. - -http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true; title="class or interface in java.lang">Boolean -getLoadColumnFamiliesOnDemandValue() -Get the raw loadColumnFamiliesOnDemand setting; if it's not set, can be null. - - - + long getMaxResultSize() - + int getMaxResultsPerColumnFamily() - + int getMaxVersions() - + int getRowOffsetPerColumnFamily() Method for retrieving the scan's offset per row per column family (#kvs to be skipped) - + org.apache.hadoop.hbase.client.metrics.ScanMetrics getScanMetrics() - + byte[] getStartRow() - + byte[] getStopRow() - + TimeRange getTimeRange() - + boolean hasFamilies() - + boolean hasFilter() - + http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true; title="class or interface in java.lang">Boolean isAsyncPrefetch() - + boolean isGetScan() - + boolean isRaw() - + boolean isReversed() Get whether this scan is a reversed one. - + boolean isScanMetricsEnabled() - + boolean isSmall() Get whether this scan is a small scan - + int numFamilies() - + Scan setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,org.apache.hadoop.hbase.security.access.Permissionperms) - + Scan setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringuser, org.apache.hadoop.hbase.security.access.Permissionperms) - + Scan setAllowPartialResults(booleanallowPartialResults) Setting whether the caller wants to see the partial results that may be returned from the
[44/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/org/apache/hadoop/hbase/client/class-use/AsyncTableRegionLocator.html -- diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/AsyncTableRegionLocator.html b/apidocs/org/apache/hadoop/hbase/client/class-use/AsyncTableRegionLocator.html new file mode 100644 index 000..e95ecf0 --- /dev/null +++ b/apidocs/org/apache/hadoop/hbase/client/class-use/AsyncTableRegionLocator.html @@ -0,0 +1,169 @@ +http://www.w3.org/TR/html4/loose.dtd;> + + + + + +Uses of Interface org.apache.hadoop.hbase.client.AsyncTableRegionLocator (Apache HBase 2.0.0-SNAPSHOT API) + + + + + + + +JavaScript is disabled on your browser. + + + + + +Skip navigation links + + + + +Overview +Package +Class +Use +Tree +Deprecated +Index +Help + + + + +Prev +Next + + +Frames +NoFrames + + +AllClasses + + + + + + + + + + +Uses of Interfaceorg.apache.hadoop.hbase.client.AsyncTableRegionLocator + + + + + +Packages that use AsyncTableRegionLocator + +Package +Description + + + +org.apache.hadoop.hbase.client + +Provides HBase Client + + + + + + + + + + +Uses of AsyncTableRegionLocator in org.apache.hadoop.hbase.client + +Methods in org.apache.hadoop.hbase.client that return AsyncTableRegionLocator + +Modifier and Type +Method and Description + + + +AsyncTableRegionLocator +AsyncConnection.getRegionLocator(TableNametableName) +Retrieve a AsyncRegionLocator implementation to inspect region information on a table. + + + + + + + + + + + + + +Skip navigation links + + + + +Overview +Package +Class +Use +Tree +Deprecated +Index +Help + + + + +Prev +Next + + +Frames +NoFrames + + +AllClasses + + + + + + + + + +Copyright 20072016 http://www.apache.org/;>The Apache Software Foundation. All rights reserved. + + http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/org/apache/hadoop/hbase/client/class-use/Delete.html -- diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Delete.html b/apidocs/org/apache/hadoop/hbase/client/class-use/Delete.html index 29f0c59..5510de2 100644 --- a/apidocs/org/apache/hadoop/hbase/client/class-use/Delete.html +++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Delete.html @@ -255,6 +255,12 @@ Deletes the specified cells/row. + +http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true; title="class or interface in java.lang">Void +AsyncTable.delete(Deletedelete) +Deletes the specified cells/row. + + http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/org/apache/hadoop/hbase/client/class-use/Get.html -- diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Get.html b/apidocs/org/apache/hadoop/hbase/client/class-use/Get.html index 68e0643..00cda3f 100644 --- a/apidocs/org/apache/hadoop/hbase/client/class-use/Get.html +++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Get.html @@ -181,33 +181,37 @@ Get +Get.setLoadColumnFamiliesOnDemand(booleanvalue) + + +Get Get.setMaxResultsPerColumnFamily(intlimit) Set the maximum number of values to return per row per Column Family - + Get Get.setMaxVersions() Get all available versions. - + Get Get.setMaxVersions(intmaxVersions) Get up to the specified number of versions of each column. - + Get Get.setReplicaId(intId) - + Get Get.setRowOffsetPerColumnFamily(intoffset) Set offset for the row per Column Family. - + Get Get.setTimeRange(longminStamp, longmaxStamp) @@ -215,7 +219,7 @@ [minStamp, maxStamp). - + Get Get.setTimeStamp(longtimestamp) Get versions of columns with the specified timestamp. @@ -237,11 +241,23 @@ +http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true; title="class or
[36/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.Operator.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.Operator.html b/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.Operator.html index 8b52a04..61a5457 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.Operator.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.Operator.html @@ -60,7 +60,7 @@ 052 */ 053@InterfaceAudience.Public 054@InterfaceStability.Stable -055final public class FilterList extends Filter { +055final public class FilterList extends FilterBase { 056 /** set operator */ 057 @InterfaceAudience.Public 058 @InterfaceStability.Stable @@ -73,7 +73,7 @@ 065 066 private static final int MAX_LOG_FILTERS = 5; 067 private Operator operator = Operator.MUST_PASS_ALL; -068 private ListFilter filters = new ArrayListFilter(); +068 private final ListFilter filters; 069 private Filter seekHintFilter = null; 070 071 /** Reference Cell used by {@link #transformCell(Cell)} for validation purpose. */ @@ -91,423 +91,476 @@ 083 /** 084 * Constructor that takes a set of {@link Filter}s. The default operator 085 * MUST_PASS_ALL is assumed. -086 * +086 * All filters are cloned to internal list. 087 * @param rowFilters list of filters 088 */ 089 public FilterList(final ListFilter rowFilters) { -090if (rowFilters instanceof ArrayList) { -091 this.filters = rowFilters; -092} else { -093 this.filters = new ArrayListFilter(rowFilters); -094} -095 } -096 -097 /** -098 * Constructor that takes a var arg number of {@link Filter}s. The fefault operator -099 * MUST_PASS_ALL is assumed. -100 * @param rowFilters -101 */ -102 public FilterList(final Filter... rowFilters) { -103this.filters = new ArrayListFilter(Arrays.asList(rowFilters)); -104 } -105 -106 /** -107 * Constructor that takes an operator. -108 * -109 * @param operator Operator to process filter set with. -110 */ -111 public FilterList(final Operator operator) { -112this.operator = operator; -113 } -114 -115 /** -116 * Constructor that takes a set of {@link Filter}s and an operator. -117 * -118 * @param operator Operator to process filter set with. -119 * @param rowFilters Set of row filters. -120 */ -121 public FilterList(final Operator operator, final ListFilter rowFilters) { -122this.filters = new ArrayListFilter(rowFilters); -123this.operator = operator; -124 } -125 -126 /** -127 * Constructor that takes a var arg number of {@link Filter}s and an operator. -128 * -129 * @param operator Operator to process filter set with. -130 * @param rowFilters Filters to use -131 */ -132 public FilterList(final Operator operator, final Filter... rowFilters) { -133this.filters = new ArrayListFilter(Arrays.asList(rowFilters)); -134this.operator = operator; -135 } -136 -137 /** -138 * Get the operator. -139 * -140 * @return operator -141 */ -142 public Operator getOperator() { -143return operator; -144 } -145 -146 /** -147 * Get the filters. -148 * -149 * @return filters -150 */ -151 public ListFilter getFilters() { -152return filters; -153 } -154 -155 /** -156 * Add a filter. -157 * -158 * @param filter another filter -159 */ -160 public void addFilter(Filter filter) { -161if (this.isReversed() != filter.isReversed()) { -162 throw new IllegalArgumentException( -163 "Filters in the list must have the same reversed flag, this.reversed=" -164 + this.isReversed()); -165} -166this.filters.add(filter); -167 } -168 -169 @Override -170 public void reset() throws IOException { -171int listize = filters.size(); -172for (int i = 0; i listize; i++) { -173 filters.get(i).reset(); -174} -175seekHintFilter = null; -176 } -177 -178 @Override -179 public boolean filterRowKey(byte[] rowKey, int offset, int length) throws IOException { -180boolean flag = (this.operator == Operator.MUST_PASS_ONE) ? true : false; -181int listize = filters.size(); -182for (int i = 0; i listize; i++) { -183 Filter filter = filters.get(i); -184 if (this.operator == Operator.MUST_PASS_ALL) { -185if (filter.filterAllRemaining() || -186filter.filterRowKey(rowKey, offset, length)) { -187 flag = true; -188} -189 } else if (this.operator == Operator.MUST_PASS_ONE) { -190if (!filter.filterAllRemaining() -191!filter.filterRowKey(rowKey, offset, length)) { -192 flag = false; -193} -194 } -195} -196return flag; -197 } -198 -199 @Override -200 public boolean filterRowKey(Cell firstRowCell) throws IOException { -201boolean flag = (this.operator ==
[23/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/KeyValueUtil.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/KeyValueUtil.html b/devapidocs/org/apache/hadoop/hbase/KeyValueUtil.html index dd14b89..7076ece 100644 --- a/devapidocs/org/apache/hadoop/hbase/KeyValueUtil.html +++ b/devapidocs/org/apache/hadoop/hbase/KeyValueUtil.html @@ -171,9 +171,10 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? static KeyValue -copyCellTo(Cellcell, +copyCellTo(Cellcell, byte[]buf, - intoffset) + intoffset, + intlen) Write the given cell in KeyValue serialization format into the given buf and return a new KeyValue object around that. @@ -401,7 +402,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html? -static void +static int oswrite(Cellcell, http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true; title="class or interface in java.io">OutputStreamout, booleanwithTags) @@ -1136,25 +1137,26 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/L oswrite -public staticvoidoswrite(Cellcell, - http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true; title="class or interface in java.io">OutputStreamout, - booleanwithTags) -throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException +public staticintoswrite(Cellcell, + http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true; title="class or interface in java.io">OutputStreamout, + booleanwithTags) + throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException Throws: http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException - + copyCellTo -public staticKeyValuecopyCellTo(Cellcell, +public staticKeyValuecopyCellTo(Cellcell, byte[]buf, - intoffset) + intoffset, + intlen) Write the given cell in KeyValue serialization format into the given buf and return a new KeyValue object around that. http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/ProcedureInfo.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/ProcedureInfo.html b/devapidocs/org/apache/hadoop/hbase/ProcedureInfo.html index 901474e..b0e126a 100644 --- a/devapidocs/org/apache/hadoop/hbase/ProcedureInfo.html +++ b/devapidocs/org/apache/hadoop/hbase/ProcedureInfo.html @@ -283,7 +283,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Cloneable hasClientAckTime() -private boolean +boolean hasOwner() @@ -519,7 +519,7 @@ public hasOwner -privatebooleanhasOwner() +publicbooleanhasOwner() http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html b/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html index 9deb4da..0361c8c 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html @@ -109,40 +109,36 @@ -org.apache.hadoop.hbase.mob.mapreduce - - - org.apache.hadoop.hbase.procedure.flush - + org.apache.hadoop.hbase.regionserver - + org.apache.hadoop.hbase.regionserver.snapshot - + org.apache.hadoop.hbase.replication Multi Cluster Replication - + org.apache.hadoop.hbase.replication.master - + org.apache.hadoop.hbase.replication.regionserver - + org.apache.hadoop.hbase.util.hbck - + org.apache.hadoop.hbase.zookeeper @@ -441,24 +437,6 @@ - - - -Uses of Abortable in org.apache.hadoop.hbase.mob.mapreduce - -Classes in org.apache.hadoop.hbase.mob.mapreduce that implement Abortable - -Modifier and Type -Class and Description - - - -static class -SweepJob.DummyMobAbortable - - - -
[10/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/AsyncSingleRequestRpcRetryingCaller.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncSingleRequestRpcRetryingCaller.html b/devapidocs/org/apache/hadoop/hbase/client/AsyncSingleRequestRpcRetryingCaller.html new file mode 100644 index 000..5a41be6 --- /dev/null +++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncSingleRequestRpcRetryingCaller.html @@ -0,0 +1,637 @@ +http://www.w3.org/TR/html4/loose.dtd;> + + + + + +AsyncSingleRequestRpcRetryingCaller (Apache HBase 2.0.0-SNAPSHOT API) + + + + + +var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":9}; +var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]}; +var altColor = "altColor"; +var rowColor = "rowColor"; +var tableTab = "tableTab"; +var activeTableTab = "activeTableTab"; + + +JavaScript is disabled on your browser. + + + + + +Skip navigation links + + + + +Overview +Package +Class +Use +Tree +Deprecated +Index +Help + + + + +PrevClass +NextClass + + +Frames +NoFrames + + +AllClasses + + + + + + + +Summary: +Nested| +Field| +Constr| +Method + + +Detail: +Field| +Constr| +Method + + + + + + + + +org.apache.hadoop.hbase.client +Class AsyncSingleRequestRpcRetryingCallerT + + + +http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">java.lang.Object + + +org.apache.hadoop.hbase.client.AsyncSingleRequestRpcRetryingCallerT + + + + + + + + +@InterfaceAudience.Private +class AsyncSingleRequestRpcRetryingCallerT +extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object +Retry caller for a single request, such as get, put, delete, etc. + + + + + + + + + + + +Nested Class Summary + +Nested Classes + +Modifier and Type +Class and Description + + +static interface +AsyncSingleRequestRpcRetryingCaller.CallableT + + + + + + + + + +Field Summary + +Fields + +Modifier and Type +Field and Description + + +private AsyncSingleRequestRpcRetryingCaller.CallableT +callable + + +private AsyncConnectionImpl +conn + + +private HBaseRpcController +controller + + +private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListRetriesExhaustedException.ThrowableWithExtraContext +exceptions + + +private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFutureT +future + + +private static org.apache.commons.logging.Log +LOG + + +private int +maxAttempts + + +private long +operationTimeoutNs + + +private long +pauseNs + + +private io.netty.util.HashedWheelTimer +retryTimer + + +private byte[] +row + + +private long +rpcTimeoutNs + + +private int +startLogErrorsCnt + + +private long +startNs + + +private TableName +tableName + + +private int +tries + + + + + + + + + +Constructor Summary + +Constructors + +Constructor and Description + + +AsyncSingleRequestRpcRetryingCaller(io.netty.util.HashedWheelTimerretryTimer, + AsyncConnectionImplconn, + TableNametableName, + byte[]row, + AsyncSingleRequestRpcRetryingCaller.CallableTcallable, + longpauseNs, + intmaxRetries, + longoperationTimeoutNs, + longrpcTimeoutNs, + intstartLogErrorsCnt) + + + + + + + + + +Method Summary + +All MethodsStatic MethodsInstance MethodsConcrete Methods + +Modifier and Type +Method and Description + + +http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFutureT +call() + + +private void +call(HRegionLocationloc) + + +private void +completeExceptionally() + + +private long +elapsedMs() + + +private void +locateThenCall() + + +private void +onError(http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true; title="class or interface in java.lang">Throwableerror, +
[13/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html b/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html index 1a50c3f..c68b2f9 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html +++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html @@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab"; -private final class AsyncRequestFutureImpl.ReplicaCallIssuingRunnable +private final class AsyncRequestFutureImpl.ReplicaCallIssuingRunnable extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true; title="class or interface in java.lang">Runnable Runnable (that can be submitted to thread pool) that waits for when it's time @@ -229,7 +229,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable. startTime -private finallong startTime +private finallong startTime @@ -238,7 +238,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable. initialActions -private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListActionRow initialActions +private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListActionRow initialActions @@ -255,7 +255,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable. ReplicaCallIssuingRunnable -publicReplicaCallIssuingRunnable(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListActionRowinitialActions, +publicReplicaCallIssuingRunnable(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListActionRowinitialActions, longstartTime) @@ -273,7 +273,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable. run -publicvoidrun() +publicvoidrun() Specified by: http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true#run--; title="class or interface in java.lang">runin interfacehttp://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true; title="class or interface in java.lang">Runnable @@ -286,7 +286,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable. addReplicaActions -privatevoidaddReplicaActions(intindex, +privatevoidaddReplicaActions(intindex, http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">MapServerName,MultiActionRowactionsByServer, http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListActionRowunknownReplicaActions) Add replica actions to action map by server. @@ -303,7 +303,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable. addReplicaActionsAgain -privatevoidaddReplicaActionsAgain(ActionRowaction, +privatevoidaddReplicaActionsAgain(ActionRowaction, http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">MapServerName,MultiActionRowactionsByServer) http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html b/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html index 77a7c12..4952610 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html +++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.ReplicaResultState.html @@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab"; -private static class AsyncRequestFutureImpl.ReplicaResultState +private static class AsyncRequestFutureImpl.ReplicaResultState extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object Sync point for calls to multiple replicas for the same user request (Get). Created and put in the results array (we assume
[12/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html b/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html index daa4efb..73f6f31 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html +++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.html @@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Private -class AsyncRequestFutureImplCResult +class AsyncRequestFutureImplCResult extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object implements AsyncRequestFuture The context, and return value, for a single submit/submitAll call. @@ -205,33 +205,33 @@ implements currentCallable -private int -currentCallTotalTimeout - - private BatchErrors errors - + private ConnectionImplementation.ServerErrorTracker errorsByServer - + private boolean hasAnyReplicaGets - + private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">MapServerName,http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true; title="class or interface in java.lang">Long heapSizesByServer - + private static org.apache.commons.logging.Log LOG - + private long nonceGroup + +private int +operationTimeout + private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true; title="class or interface in java.util.concurrent">ExecutorService pool @@ -255,9 +255,17 @@ implements +private int +rpcTimeout + + private TableName tableName + +private RetryingTimeTracker +tracker + @@ -273,7 +281,7 @@ implements Constructor and Description -AsyncRequestFutureImpl(TableNametableName, +AsyncRequestFutureImpl(TableNametableName, http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListActionRowactions, longnonceGroup, http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true; title="class or interface in java.util.concurrent">ExecutorServicepool, @@ -281,7 +289,8 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object[]results, Batch.CallbackCResultcallback, CancellableRegionServerCallablecallable, - inttimeout, + intoperationTimeout, + intrpcTimeout, AsyncProcessasyncProcess) @@ -537,7 +546,16 @@ implements LOG -private static finalorg.apache.commons.logging.Log LOG +private static finalorg.apache.commons.logging.Log LOG + + + + + + + +tracker +privateRetryingTimeTracker tracker @@ -546,7 +564,7 @@ implements callback -private finalBatch.CallbackCResult callback +private finalBatch.CallbackCResult callback @@ -555,7 +573,7 @@ implements errors -private finalBatchErrors errors +private finalBatchErrors errors @@ -564,7 +582,7 @@ implements errorsByServer -private finalConnectionImplementation.ServerErrorTracker errorsByServer +private finalConnectionImplementation.ServerErrorTracker errorsByServer @@ -573,7 +591,7 @@ implements pool -private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true; title="class or interface in java.util.concurrent">ExecutorService pool +private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true; title="class or interface in java.util.concurrent">ExecutorService pool @@ -582,7 +600,7 @@ implements callsInProgress -private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true; title="class or interface in java.util">SetCancellableRegionServerCallable callsInProgress +private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true; title="class or interface in java.util">SetCancellableRegionServerCallable callsInProgress @@ -591,7 +609,7 @@ implements tableName -private finalTableName tableName +private finalTableName tableName @@ -600,7 +618,7 @@ implements actionsInProgress -private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true; title="class or interface in java.util.concurrent.atomic">AtomicLong actionsInProgress +private
[11/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.html b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.html new file mode 100644 index 000..0c78430 --- /dev/null +++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder.html @@ -0,0 +1,449 @@ +http://www.w3.org/TR/html4/loose.dtd;> + + + + + +AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder (Apache HBase 2.0.0-SNAPSHOT API) + + + + + +var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10}; +var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]}; +var altColor = "altColor"; +var rowColor = "rowColor"; +var tableTab = "tableTab"; +var activeTableTab = "activeTableTab"; + + +JavaScript is disabled on your browser. + + + + + +Skip navigation links + + + + +Overview +Package +Class +Use +Tree +Deprecated +Index +Help + + + + +PrevClass +NextClass + + +Frames +NoFrames + + +AllClasses + + + + + + + +Summary: +Nested| +Field| +Constr| +Method + + +Detail: +Field| +Constr| +Method + + + + + + + + +org.apache.hadoop.hbase.client +Class AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilderT + + + +http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">java.lang.Object + + +org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilderT + + + + + + + +Enclosing class: +AsyncRpcRetryingCallerFactory + + + +public class AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilderT +extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object + + + + + + + + + + + +Field Summary + +Fields + +Modifier and Type +Field and Description + + +private AsyncSingleRequestRpcRetryingCaller.CallableT +callable + + +private long +operationTimeoutNs + + +private byte[] +row + + +private long +rpcTimeoutNs + + +private TableName +tableName + + + + + + + + + +Constructor Summary + +Constructors + +Constructor and Description + + +SingleRequestCallerBuilder() + + + + + + + + + +Method Summary + +All MethodsInstance MethodsConcrete Methods + +Modifier and Type +Method and Description + + +AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilderT +action(AsyncSingleRequestRpcRetryingCaller.CallableTcallable) + + +AsyncSingleRequestRpcRetryingCallerT +build() + + +http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true; title="class or interface in java.util.concurrent">CompletableFutureT +call() +Shortcut for build().call() + + + +AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilderT +operationTimeout(longoperationTimeout, +http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true; title="class or interface in java.util.concurrent">TimeUnitunit) + + +AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilderT +row(byte[]row) + + +AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilderT +rpcTimeout(longrpcTimeout, + http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true; title="class or interface in java.util.concurrent">TimeUnitunit) + + +AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilderT +table(TableNametableName) + + + + + + +Methods inherited from classjava.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object +http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--; title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-; title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--; title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--; title="class or interface in java.lang">getClass,
[20/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html index c24f9b9..03c8b00 100644 --- a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html +++ b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html @@ -133,52 +133,48 @@ -org.apache.hadoop.hbase.mob.mapreduce - - - org.apache.hadoop.hbase.regionserver - + org.apache.hadoop.hbase.regionserver.handler - + org.apache.hadoop.hbase.replication Multi Cluster Replication - + org.apache.hadoop.hbase.replication.regionserver - + org.apache.hadoop.hbase.rsgroup - + org.apache.hadoop.hbase.security.access - + org.apache.hadoop.hbase.tmpl.master - + org.apache.hadoop.hbase.tool - + org.apache.hadoop.hbase.util - + org.apache.hadoop.hbase.wal - + org.apache.hadoop.hbase.zookeeper @@ -839,6 +835,10 @@ intstopped) +private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.Interface +AsyncConnectionImpl.createRegionServerStub(ServerNameserverName) + + static ClusterConnection ConnectionUtils.createShortCircuitConnection(org.apache.hadoop.conf.Configurationconf, http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true; title="class or interface in java.util.concurrent">ExecutorServicepool, @@ -850,59 +850,63 @@ deserialization, networking, etc..) when talking to a local server. - + protected void AsyncProcess.decTaskCounters(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true; title="class or interface in java.util">Collectionbyte[]regions, ServerNamesn) Decrements the counters for a given region and the region server. - + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface ClusterConnection.getAdmin(ServerNameserverName) Establishes a connection to the region server at the specified address. - + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface ConnectionImplementation.getAdmin(ServerNameserverName) - + private http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true; title="class or interface in java.lang">Long AsyncRequestFutureImpl.getBackoff(ServerNameserver, byte[]regionName) - + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.BlockingInterface ClusterConnection.getClient(ServerNameserverName) Establishes a connection to the region server at the specified address, and returns a region client protocol. - + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.BlockingInterface ConnectionImplementation.getClient(ServerNamesn) - + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.BlockingInterface CoprocessorHConnection.getClient(ServerNameserverName) - + private http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true; title="class or interface in java.util">Collection? extends http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true; title="class or interface in java.lang">Runnable AsyncRequestFutureImpl.getNewMultiActionRunnable(ServerNameserver, MultiActionRowmultiAction, intnumAttempt) - + http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListHRegionInfo Admin.getOnlineRegions(ServerNamesn) Get all the online regions on a region server. - + http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListHRegionInfo HBaseAdmin.getOnlineRegions(ServerNamesn) + +(package private) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.Interface +AsyncConnectionImpl.getRegionServerStub(ServerNameserverName) + (package private) ServerStatistics ServerStatisticTracker.getServerStatsForTesting(ServerNameserver) @@ -912,39 +916,47 @@ ServerStatisticTracker.getStats(ServerNameserver) +(package private) static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String +ConnectionUtils.getStubKey(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringserviceName, + ServerNameserverName, + booleanhostnameCanChange) +Get a unique key for the rpc stub to the given server. + + + protected void PreemptiveFastFailInterceptor.handleFailureToServer(ServerNameserverName,
[02/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/NoncedRegionServerCallable.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/NoncedRegionServerCallable.html b/devapidocs/org/apache/hadoop/hbase/client/NoncedRegionServerCallable.html index 0a7f43d..e65933f 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/NoncedRegionServerCallable.html +++ b/devapidocs/org/apache/hadoop/hbase/client/NoncedRegionServerCallable.html @@ -106,9 +106,6 @@ var activeTableTab = "activeTableTab"; org.apache.hadoop.hbase.client.ClientServiceCallableT -org.apache.hadoop.hbase.client.CancellableRegionServerCallableT - - org.apache.hadoop.hbase.client.NoncedRegionServerCallableT @@ -117,8 +114,6 @@ var activeTableTab = "activeTableTab"; - - @@ -128,18 +123,18 @@ var activeTableTab = "activeTableTab"; All Implemented Interfaces: -Cancellable, RetryingCallableT +RetryingCallableT @InterfaceAudience.Private public abstract class NoncedRegionServerCallableT -extends CancellableRegionServerCallableT +extends ClientServiceCallableT Implementations make an rpc call against a RegionService via a protobuf Service. - Implement #rpcCall(RpcController) and then call CancellableRegionServerCallable.call(int) to - trigger the rpc. The CancellableRegionServerCallable.call(int) eventually invokes your + Implement #rpcCall(RpcController) and then call RegionServerCallable.call(int) to + trigger the rpc. The RegionServerCallable.call(int) eventually invokes your #rpcCall(RpcController) meanwhile saving you having to write a bunch of - boilerplate. The CancellableRegionServerCallable.call(int) implementation is from RpcRetryingCaller so rpcs are + boilerplate. The RegionServerCallable.call(int) implementation is from RpcRetryingCaller so rpcs are retried on fail. TODO: this class is actually tied to one region, because most of the paths make use of @@ -221,25 +216,18 @@ extends - - - -Methods inherited from classorg.apache.hadoop.hbase.client.CancellableRegionServerCallable -call, cancel, doBulkLoadHFile, doCleanupBulkLoad, doMulti, doPrepareBulkLoad, doScan, isCancelled, prepare, setStubByServiceName - - Methods inherited from classorg.apache.hadoop.hbase.client.ClientServiceCallable -doGet, doMutate +doGet, doMutate, setStubByServiceName Methods inherited from classorg.apache.hadoop.hbase.client.RegionServerCallable -getConnection, getExceptionMessageAdditionalDetail, getHRegionInfo, getLocation, getRow, getRpcController, getRpcControllerCellScanner, getStub, getTableName, rpcCall, setLocation, setRpcControllerCellScanner, setStub, sleep, throwable +call, getConnection, getExceptionMessageAdditionalDetail, getHRegionInfo, getLocation, getRow, getRpcController, getRpcControllerCellScanner, getStub, getTableName, prepare, rpcCall, setLocation, setRpcControllerCellScanner, setStub, sleep, throwable http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/Query.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/Query.html b/devapidocs/org/apache/hadoop/hbase/client/Query.html index e5c81db..1c40f21 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/Query.html +++ b/devapidocs/org/apache/hadoop/hbase/client/Query.html @@ -18,7 +18,7 @@ catch(err) { } //--> -var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10}; +var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10}; var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]}; var altColor = "altColor"; var rowColor = "rowColor"; @@ -166,6 +166,10 @@ extends ISOLATION_LEVEL +protected http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true; title="class or interface in java.lang">Boolean +loadColumnFamiliesOnDemand + + protected int targetReplicaId @@ -209,53 +213,65 @@ extends Method and Description +boolean +doLoadColumnFamiliesOnDemand() +Get the logical value indicating whether on-demand CF loading should be allowed. + + + byte[] getACL() - + Authorizations getAuthorizations() - + http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Mapbyte[],TimeRange getColumnFamilyTimeRange() - + Consistency getConsistency() Returns the consistency level for this operation - + Filter getFilter() - + IsolationLevel getIsolationLevel() - + +http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true; title="class or interface
[16/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionConfiguration.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionConfiguration.html b/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionConfiguration.html new file mode 100644 index 000..eddf9e3 --- /dev/null +++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncConnectionConfiguration.html @@ -0,0 +1,469 @@ +http://www.w3.org/TR/html4/loose.dtd;> + + + + + +AsyncConnectionConfiguration (Apache HBase 2.0.0-SNAPSHOT API) + + + + + +var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10}; +var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]}; +var altColor = "altColor"; +var rowColor = "rowColor"; +var tableTab = "tableTab"; +var activeTableTab = "activeTableTab"; + + +JavaScript is disabled on your browser. + + + + + +Skip navigation links + + + + +Overview +Package +Class +Use +Tree +Deprecated +Index +Help + + + + +PrevClass +NextClass + + +Frames +NoFrames + + +AllClasses + + + + + + + +Summary: +Nested| +Field| +Constr| +Method + + +Detail: +Field| +Constr| +Method + + + + + + + + +org.apache.hadoop.hbase.client +Class AsyncConnectionConfiguration + + + +http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">java.lang.Object + + +org.apache.hadoop.hbase.client.AsyncConnectionConfiguration + + + + + + + + +@InterfaceAudience.Private +class AsyncConnectionConfiguration +extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object +Timeout configs. + + + + + + + + + + + +Field Summary + +Fields + +Modifier and Type +Field and Description + + +private int +maxRetries + + +private long +metaOperationTimeoutNs + + +private long +operationTimeoutNs + + +private long +pauseNs + + +private long +readRpcTimeoutNs + + +private int +startLogErrorsCnt +How many retries are allowed before we start to log + + + +private long +writeRpcTimeoutNs + + + + + + + + + +Constructor Summary + +Constructors + +Constructor and Description + + +AsyncConnectionConfiguration(org.apache.hadoop.conf.Configurationconf) + + + + + + + + + +Method Summary + +All MethodsInstance MethodsConcrete Methods + +Modifier and Type +Method and Description + + +(package private) int +getMaxRetries() + + +(package private) long +getMetaOperationTimeoutNs() + + +(package private) long +getOperationTimeoutNs() + + +(package private) long +getPauseNs() + + +(package private) long +getReadRpcTimeoutNs() + + +(package private) int +getStartLogErrorsCnt() + + +(package private) long +getWriteRpcTimeoutNs() + + + + + + +Methods inherited from classjava.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object +http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--; title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-; title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--; title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--; title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--; title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--; title="class or interface in java.lang">notify, http://docs.oracle.com/javase/8/docs/api/java/lang /Object.html?is-external=true#notifyAll--" title="class or interface in java.lang">notifyAll, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--; title="class or interface in java.lang">toString, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--; title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-; title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-; title="class or
[27/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/index-all.html -- diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html index 5b130a5..c8e7e05 100644 --- a/devapidocs/index-all.html +++ b/devapidocs/index-all.html @@ -94,18 +94,12 @@ abort(String, Throwable) - Method in class org.apache.hadoop.hbase.master.HMaster -abort(MasterProcedureEnv) - Method in class org.apache.hadoop.hbase.master.procedure.DispatchMergingRegionsProcedure - abort(MasterProcedureEnv) - Method in class org.apache.hadoop.hbase.master.procedure.RestoreSnapshotProcedure abort(MasterProcedureEnv) - Method in class org.apache.hadoop.hbase.master.procedure.ServerCrashProcedure abort(MasterProcedureEnv) - Method in class org.apache.hadoop.hbase.master.procedure.TruncateTableProcedure -abort - Variable in class org.apache.hadoop.hbase.mob.mapreduce.SweepJob.DummyMobAbortable - -abort(String, Throwable) - Method in class org.apache.hadoop.hbase.mob.mapreduce.SweepJob.DummyMobAbortable - abort(String) - Method in interface org.apache.hadoop.hbase.monitoring.MonitoredTask abort(String) - Method in class org.apache.hadoop.hbase.monitoring.MonitoredTaskImpl @@ -380,6 +374,10 @@ AbstractPositionedByteRange() - Constructor for class org.apache.hadoop.hbase.util.AbstractPositionedByteRange +AbstractProcedureScheduler - Class in org.apache.hadoop.hbase.procedure2 + +AbstractProcedureScheduler() - Constructor for class org.apache.hadoop.hbase.procedure2.AbstractProcedureScheduler + AbstractProtobufLogWriter - Class in org.apache.hadoop.hbase.regionserver.wal Base class for Protobuf log writer. @@ -438,8 +436,6 @@ accept(Class?) - Method in class org.apache.hadoop.hbase.mapreduce.ResultSerialization -accept(Path) - Method in class org.apache.hadoop.hbase.mob.mapreduce.SweepReducer.PathPrefixFilter - accept(Path, Boolean) - Method in class org.apache.hadoop.hbase.util.AbstractFileStatusFilter Filters out a path. @@ -718,6 +714,8 @@ action - Variable in class org.apache.hadoop.hbase.client.Action +action(AsyncSingleRequestRpcRetryingCaller.CallableT) - Method in class org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder + action - Variable in class org.apache.hadoop.hbase.ipc.BufferCallBeforeInitHandler.BufferCallEvent Action(BaseLoadBalancer.Cluster.Action.Type) - Constructor for class org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action @@ -918,8 +916,6 @@ add(ProcedureProtos.Procedure) - Method in class org.apache.hadoop.hbase.procedure2.store.wal.ProcedureWALFormatReader.WalProcedureMap -add(E) - Method in class org.apache.hadoop.hbase.procedure2.util.TimeoutBlockingQueue - add(IterableCell) - Method in class org.apache.hadoop.hbase.regionserver.AbstractMemStore add(Cell) - Method in class org.apache.hadoop.hbase.regionserver.AbstractMemStore @@ -931,9 +927,13 @@ add(Cell) - Method in class org.apache.hadoop.hbase.regionserver.CellSet add(Cell) - Method in class org.apache.hadoop.hbase.regionserver.HStore - + +Adds a value to the memstore + add(IterableCell) - Method in class org.apache.hadoop.hbase.regionserver.HStore - + +Adds the specified value to the memstore + add(Cell) - Method in interface org.apache.hadoop.hbase.regionserver.MemStore Write an update @@ -954,14 +954,6 @@ Add the specified KeyValue to the list of deletes to check against for this row operation. -add(Cell) - Method in interface org.apache.hadoop.hbase.regionserver.Store - -Adds a value to the memstore - -add(IterableCell) - Method in interface org.apache.hadoop.hbase.regionserver.Store - -Adds the specified value to the memstore - add(Cell) - Method in class org.apache.hadoop.hbase.regionserver.wal.WALEdit add(String) - Method in class org.apache.hadoop.hbase.rest.client.Cluster @@ -1157,16 +1149,14 @@ Adds all the attributes into the Operation object -addBack(Procedure) - Method in class org.apache.hadoop.hbase.master.procedure.MasterProcedureScheduler - addBack(Procedure) - Method in class org.apache.hadoop.hbase.master.procedure.MasterProcedureScheduler.QueueImpl -addBack(Procedure) - Method in interface org.apache.hadoop.hbase.procedure2.ProcedureRunnableSet +addBack(Procedure) - Method in class org.apache.hadoop.hbase.procedure2.AbstractProcedureScheduler + +addBack(Procedure) - Method in interface org.apache.hadoop.hbase.procedure2.ProcedureScheduler Inserts the specified element at the end of this queue. -addBack(Procedure) - Method in class org.apache.hadoop.hbase.procedure2.ProcedureSimpleRunQueue - addBlock(ClientProtocol, String, String, ExtendedBlock, DatanodeInfo[], long, String[]) - Method in interface org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.BlockAdder addBloomFilter(BloomFilterWriter, BlockType) - Method in class
[31/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/bulk-loads.html -- diff --git a/bulk-loads.html b/bulk-loads.html index a41fa28..0469490 100644 --- a/bulk-loads.html +++ b/bulk-loads.html @@ -7,7 +7,7 @@ - + Apache HBase Bulk Loads in Apache HBase (TM) @@ -305,7 +305,7 @@ under the License. --> http://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2016-10-09 + Last Published: 2016-10-17
[17/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Public.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Public.html b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Public.html index 6351eea..305ceba 100644 --- a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Public.html +++ b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Public.html @@ -185,62 +185,58 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. -org.apache.hadoop.hbase.mob.mapreduce - - - org.apache.hadoop.hbase.quotas - + org.apache.hadoop.hbase.regionserver - + org.apache.hadoop.hbase.regionserver.wal - + org.apache.hadoop.hbase.replication Multi Cluster Replication - + org.apache.hadoop.hbase.rest HBase REST - + org.apache.hadoop.hbase.rest.client - + org.apache.hadoop.hbase.rsgroup - + org.apache.hadoop.hbase.security - + org.apache.hadoop.hbase.security.access - + org.apache.hadoop.hbase.security.token - + org.apache.hadoop.hbase.security.visibility - + org.apache.hadoop.hbase.snapshot - + org.apache.hadoop.hbase.types @@ -248,11 +244,11 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. extensible data type API. - + org.apache.hadoop.hbase.util - + org.apache.hadoop.hbase.zookeeper @@ -629,34 +625,52 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. interface -Attributes +AsyncConnection +The asynchronous version of Connection. + + + +interface +AsyncTable +The asynchronous version of Table. + + + +interface +AsyncTableRegionLocator +The asynchronous version of RegionLocator. + interface +Attributes + + +interface BufferedMutator Used to communicate with a single HBase table similar to Table but meant for batched, asynchronous puts. - + static interface BufferedMutator.ExceptionListener Listens for asynchronous exceptions on a BufferedMutator. - + class BufferedMutatorParams Parameters for instantiating a BufferedMutator. - + class CompactionState POJO representing the compaction state - + class CompactType Currently, there are only two compact types: @@ -664,148 +678,148 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. MOB means do mob files compaction. - + interface Connection A cluster connection encapsulating lower level individual connections to actual servers and a connection to zookeeper. - + class ConnectionFactory A non-instantiable class that manages creation of Connections. - + class Consistency Consistency defines the expected consistency level for an operation. - + class Delete Used to perform Delete operations on a single row. - + class DoNotRetryRegionException Similar to RegionException, but disables retries. - + class Durability Enum describing the durability guarantees for tables and Mutations Note that the items must be sorted in order of increasing durability - + class Get Used to perform Get operations on a single row. - + class HTableMultiplexer HTableMultiplexer provides a thread-safe non blocking PUT API across all the tables. - + static class HTableMultiplexer.HTableMultiplexerStatus HTableMultiplexerStatus keeps track of the current status of the HTableMultiplexer. - + class Increment Used to perform Increment operations on a single row. - + class IsolationLevel Specify Isolation levels in Scan operations. - + class MasterSwitchType Represents the master switch type - + class Mutation - + class NoServerForRegionException Thrown when no region server can be found for a region - + class Operation Superclass for any type that maps to a potentially application-level query. - + class OperationWithAttributes - + class Put Used to perform Put operations for a single row. - + class Query - + class RegionLoadStats POJO representing region server load - + interface RegionLocator Used to view region location information for a single HBase table. - + class RegionOfflineException Thrown when a table can not be located - + class Result Single row result of a Get or Scan query. - + interface ResultScanner Interface for client-side scanning. - + class RetriesExhaustedException Exception thrown by HTable methods when an attempt to do something (like commit changes) fails after a bunch of retries. - + class RetriesExhaustedWithDetailsException This subclass of RetriesExhaustedException @@ -813,25 +827,25 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. exceptions on what
[03/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/HTable.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/HTable.html b/devapidocs/org/apache/hadoop/hbase/client/HTable.html index 6e2b211..85fd23a 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/HTable.html +++ b/devapidocs/org/apache/hadoop/hbase/client/HTable.html @@ -307,7 +307,7 @@ implements void batch(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">List? extends Rowactions, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object[]results, - inttimeout) + intrpcTimeout) Rvoid @@ -1308,7 +1308,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/c batch publicvoidbatch(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">List? extends Rowactions, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object[]results, - inttimeout) + intrpcTimeout) throws http://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true; title="class or interface in java.lang">InterruptedException, http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException @@ -1324,7 +1324,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/c batchCallback -publicRvoidbatchCallback(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">List? extends Rowactions, +publicRvoidbatchCallback(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">List? extends Rowactions, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object[]results, Batch.CallbackRcallback) throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException, @@ -1345,7 +1345,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/c doBatchWithCallback -public staticRvoiddoBatchWithCallback(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">List? extends Rowactions, +public staticRvoiddoBatchWithCallback(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">List? extends Rowactions, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object[]results, Batch.CallbackRcallback, ClusterConnectionconnection, @@ -1366,7 +1366,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/c delete -publicvoiddelete(Deletedelete) +publicvoiddelete(Deletedelete) throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException Deletes the specified cells/row. @@ -1385,7 +1385,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/c delete -publicvoiddelete(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListDeletedeletes) +publicvoiddelete(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListDeletedeletes) throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException Deletes the specified cells/rows in bulk. @@ -1409,7 +1409,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/c put -publicvoidput(Putput) +publicvoidput(Putput) throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException Puts some data in the table. @@ -1428,7 +1428,7 @@ public statichttp://docs.oracle.com/javase/8/docs/api/java/util/c put -publicvoidput(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListPutputs) +publicvoidput(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in
[50/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/acid-semantics.html -- diff --git a/acid-semantics.html b/acid-semantics.html index e1a3825..dc87cb1 100644 --- a/acid-semantics.html +++ b/acid-semantics.html @@ -7,7 +7,7 @@ - + Apache HBase Apache HBase (TM) ACID Properties @@ -600,7 +600,7 @@ under the License. --> http://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2016-10-09 + Last Published: 2016-10-17
[38/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html b/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html index e22025b..87668ac 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/client/HTableMultiplexer.HTableMultiplexerStatus.html @@ -450,222 +450,226 @@ 442private final int maxRetryInQueue; 443private final AtomicInteger retryInQueue = new AtomicInteger(0); 444private final int writeRpcTimeout; // needed to pass in through AsyncProcess constructor -445 -446public FlushWorker(Configuration conf, ClusterConnection conn, HRegionLocation addr, -447HTableMultiplexer htableMultiplexer, int perRegionServerBufferQueueSize, -448ExecutorService pool, ScheduledExecutorService executor) { -449 this.addr = addr; -450 this.multiplexer = htableMultiplexer; -451 this.queue = new LinkedBlockingQueue(perRegionServerBufferQueueSize); -452 RpcRetryingCallerFactory rpcCallerFactory = RpcRetryingCallerFactory.instantiate(conf); -453 RpcControllerFactory rpcControllerFactory = RpcControllerFactory.instantiate(conf); -454 this.writeRpcTimeout = conf.getInt(HConstants.HBASE_RPC_WRITE_TIMEOUT_KEY, -455 conf.getInt(HConstants.HBASE_RPC_TIMEOUT_KEY, -456 HConstants.DEFAULT_HBASE_RPC_TIMEOUT)); -457 this.ap = new AsyncProcess(conn, conf, pool, rpcCallerFactory, false, rpcControllerFactory, writeRpcTimeout); -458 this.executor = executor; -459 this.maxRetryInQueue = conf.getInt(TABLE_MULTIPLEXER_MAX_RETRIES_IN_QUEUE, 1); -460} -461 -462protected LinkedBlockingQueuePutStatus getQueue() { -463 return this.queue; +445private final int operationTimeout; +446 +447public FlushWorker(Configuration conf, ClusterConnection conn, HRegionLocation addr, +448HTableMultiplexer htableMultiplexer, int perRegionServerBufferQueueSize, +449ExecutorService pool, ScheduledExecutorService executor) { +450 this.addr = addr; +451 this.multiplexer = htableMultiplexer; +452 this.queue = new LinkedBlockingQueue(perRegionServerBufferQueueSize); +453 RpcRetryingCallerFactory rpcCallerFactory = RpcRetryingCallerFactory.instantiate(conf); +454 RpcControllerFactory rpcControllerFactory = RpcControllerFactory.instantiate(conf); +455 this.writeRpcTimeout = conf.getInt(HConstants.HBASE_RPC_WRITE_TIMEOUT_KEY, +456 conf.getInt(HConstants.HBASE_RPC_TIMEOUT_KEY, +457 HConstants.DEFAULT_HBASE_RPC_TIMEOUT)); +458 this.operationTimeout = conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, +459 HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT); +460 this.ap = new AsyncProcess(conn, conf, pool, rpcCallerFactory, false, rpcControllerFactory, +461 writeRpcTimeout, operationTimeout); +462 this.executor = executor; +463 this.maxRetryInQueue = conf.getInt(TABLE_MULTIPLEXER_MAX_RETRIES_IN_QUEUE, 1); 464} 465 -466public long getTotalFailedCount() { -467 return totalFailedPutCount.get(); +466protected LinkedBlockingQueuePutStatus getQueue() { +467 return this.queue; 468} 469 -470public long getTotalBufferedCount() { -471 return queue.size() + currentProcessingCount.get(); +470public long getTotalFailedCount() { +471 return totalFailedPutCount.get(); 472} 473 -474public AtomicAverageCounter getAverageLatencyCounter() { -475 return this.averageLatency; +474public long getTotalBufferedCount() { +475 return queue.size() + currentProcessingCount.get(); 476} 477 -478public long getMaxLatency() { -479 return this.maxLatency.getAndSet(0); +478public AtomicAverageCounter getAverageLatencyCounter() { +479 return this.averageLatency; 480} 481 -482boolean resubmitFailedPut(PutStatus ps, HRegionLocation oldLoc) throws IOException { -483 // Decrease the retry count -484 final int retryCount = ps.maxAttempCount - 1; +482public long getMaxLatency() { +483 return this.maxLatency.getAndSet(0); +484} 485 -486 if (retryCount = 0) { -487// Update the failed counter and no retry any more. -488return false; -489 } -490 -491 int cnt = getRetryInQueue().incrementAndGet(); -492 if (cnt getMaxRetryInQueue()) { -493// Too many Puts in queue for resubmit, give up this -494 getRetryInQueue().decrementAndGet(); -495return false; -496 } -497 -498 final Put failedPut = ps.put; -499 // The currentPut is
[42/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html -- diff --git a/apidocs/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html b/apidocs/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html index f9579b2..a220262 100644 --- a/apidocs/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html +++ b/apidocs/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html @@ -207,7 +207,7 @@ implements org.apache.hadoop.util.Tool -void +http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String doBulkLoad(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listorg.apache.hadoop.fs.Pathmap, Adminadmin, Tabletable, @@ -241,7 +241,7 @@ implements org.apache.hadoop.util.Tool -protected http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listorg.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItem +protected Pairhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listorg.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItem,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String groupOrSplit(com.google.common.collect.Multimaphttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true; title="class or interface in java.nio">ByteBuffer,org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItemregionGroups, org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItemitem, Tabletable, @@ -315,7 +315,7 @@ implements org.apache.hadoop.util.Tool run(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String[]args) -int +http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String run(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringdirPath, http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listorg.apache.hadoop.fs.Pathmap, TableNametableName) @@ -507,14 +507,14 @@ implements org.apache.hadoop.util.Tool doBulkLoad -publicvoiddoBulkLoad(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listorg.apache.hadoop.fs.Pathmap, - Adminadmin, - Tabletable, - RegionLocatorregionLocator, - booleansilence, - booleancopyFile) -throws TableNotFoundException, - http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException +publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringdoBulkLoad(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listorg.apache.hadoop.fs.Pathmap, + Adminadmin, + Tabletable, + RegionLocatorregionLocator, + booleansilence, + booleancopyFile) +throws TableNotFoundException, + http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException Perform a bulk load of the given directory into the given
[04/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/Get.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/Get.html b/devapidocs/org/apache/hadoop/hbase/client/Get.html index 0ede724..c75f7db 100644 --- a/devapidocs/org/apache/hadoop/hbase/client/Get.html +++ b/devapidocs/org/apache/hadoop/hbase/client/Get.html @@ -18,7 +18,7 @@ catch(err) { } //--> -var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":42,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":42,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10}; +var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":42,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":42,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10}; var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]}; var altColor = "altColor"; var rowColor = "rowColor"; @@ -218,7 +218,7 @@ implements Fields inherited from classorg.apache.hadoop.hbase.client.Query -colFamTimeRangeMap, consistency, filter, targetReplicaId +colFamTimeRangeMap, consistency, filter, loadColumnFamiliesOnDemand, targetReplicaId @@ -445,35 +445,42 @@ implements Get +setLoadColumnFamiliesOnDemand(booleanvalue) +Set the value indicating whether loading CFs on demand should be allowed (cluster + default is false). + + + +Get setMaxResultsPerColumnFamily(intlimit) Set the maximum number of values to return per row per Column Family - + Get setMaxVersions() Get all available versions. - + Get setMaxVersions(intmaxVersions) Get up to the specified number of versions of each column. - + Get setReplicaId(intId) Specify region replica id where Query will fetch data from. - + Get setRowOffsetPerColumnFamily(intoffset) Set offset for the row per Column Family. - + Get setTimeRange(longminStamp, longmaxStamp) @@ -481,13 +488,13 @@ implements - + Get setTimeStamp(longtimestamp) Get versions of columns with the specified timestamp. - + http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true; title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object toMap(intmaxCols) Compile the details beyond the scope of getFingerprint (row, columns, @@ -500,7 +507,7 @@ implements Methods inherited from classorg.apache.hadoop.hbase.client.Query -getACL, getAuthorizations, getColumnFamilyTimeRange, getConsistency, getFilter, getIsolationLevel, getReplicaId +doLoadColumnFamiliesOnDemand, getACL, getAuthorizations, getColumnFamilyTimeRange, getConsistency, getFilter, getIsolationLevel, getLoadColumnFamiliesOnDemandValue, get ReplicaId @@ -680,7 +687,7 @@ implements isCheckExistenceOnly -publicbooleanisCheckExistenceOnly() +publicbooleanisCheckExistenceOnly() @@ -689,7 +696,7 @@ implements setCheckExistenceOnly -publicGetsetCheckExistenceOnly(booleancheckExistenceOnly) +publicGetsetCheckExistenceOnly(booleancheckExistenceOnly) @@ -699,7 +706,7 @@ implements isClosestRowBefore http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true; title="class or interface in java.lang">@Deprecated -publicbooleanisClosestRowBefore() +publicbooleanisClosestRowBefore() Deprecated.since 2.0.0 and will be removed in 3.0.0 This will always return the default value which is false as client cannot set the value to this property any more. @@ -712,7 +719,7 @@ publicboolean setClosestRowBefore http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true; title="class or interface in java.lang">@Deprecated -publicGetsetClosestRowBefore(booleanclosestRowBefore) +publicGetsetClosestRowBefore(booleanclosestRowBefore) Deprecated.since 2.0.0 and will be removed in 3.0.0 This is not used any more and does nothing. Use reverse scan instead. @@ -723,7 +730,7 @@ public addFamily -publicGetaddFamily(byte[]family) +publicGetaddFamily(byte[]family) Get all columns from the specified family. Overrides previous calls to addColumn for this family. @@ -741,7 +748,7 @@ public addColumn -publicGetaddColumn(byte[]family, +publicGetaddColumn(byte[]family, byte[]qualifier) Get the column from the specific family
[07/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/devapidocs/org/apache/hadoop/hbase/client/ClusterRegistryFactory.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/client/ClusterRegistryFactory.html b/devapidocs/org/apache/hadoop/hbase/client/ClusterRegistryFactory.html new file mode 100644 index 000..bded8a8 --- /dev/null +++ b/devapidocs/org/apache/hadoop/hbase/client/ClusterRegistryFactory.html @@ -0,0 +1,320 @@ +http://www.w3.org/TR/html4/loose.dtd;> + + + + + +ClusterRegistryFactory (Apache HBase 2.0.0-SNAPSHOT API) + + + + + +var methods = {"i0":9}; +var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],8:["t4","Concrete Methods"]}; +var altColor = "altColor"; +var rowColor = "rowColor"; +var tableTab = "tableTab"; +var activeTableTab = "activeTableTab"; + + +JavaScript is disabled on your browser. + + + + + +Skip navigation links + + + + +Overview +Package +Class +Use +Tree +Deprecated +Index +Help + + + + +PrevClass +NextClass + + +Frames +NoFrames + + +AllClasses + + + + + + + +Summary: +Nested| +Field| +Constr| +Method + + +Detail: +Field| +Constr| +Method + + + + + + + + +org.apache.hadoop.hbase.client +Class ClusterRegistryFactory + + + +http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">java.lang.Object + + +org.apache.hadoop.hbase.client.ClusterRegistryFactory + + + + + + + + +@InterfaceAudience.Private +final class ClusterRegistryFactory +extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object +Get instance of configured Registry. + + + + + + + + + + + +Field Summary + +Fields + +Modifier and Type +Field and Description + + +(package private) static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String +REGISTRY_IMPL_CONF_KEY + + + + + + + + + +Constructor Summary + +Constructors + +Modifier +Constructor and Description + + +private +ClusterRegistryFactory() + + + + + + + + + +Method Summary + +All MethodsStatic MethodsConcrete Methods + +Modifier and Type +Method and Description + + +(package private) static ClusterRegistry +getRegistry(org.apache.hadoop.conf.Configurationconf) + + + + + + +Methods inherited from classjava.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object +http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--; title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-; title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--; title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--; title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--; title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--; title="class or interface in java.lang">notify, http://docs.oracle.com/javase/8/docs/api/java/lang /Object.html?is-external=true#notifyAll--" title="class or interface in java.lang">notifyAll, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--; title="class or interface in java.lang">toString, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--; title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-; title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-; title="class or interface in java.lang">wait + + + + + + + + + + + + + + +Field Detail + + + + + +REGISTRY_IMPL_CONF_KEY +static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String REGISTRY_IMPL_CONF_KEY + +See Also: +Constant Field Values + + + + + + + + + + +Constructor Detail + + + + + +ClusterRegistryFactory +privateClusterRegistryFactory() + + + + + + + + + +Method Detail + + + + + +getRegistry
[39/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/src-html/org/apache/hadoop/hbase/client/Get.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Get.html b/apidocs/src-html/org/apache/hadoop/hbase/client/Get.html index cf4fb8f..bec00e7 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/client/Get.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Get.html @@ -118,418 +118,423 @@ 110this.storeOffset = get.getRowOffsetPerColumnFamily(); 111this.tr = get.getTimeRange(); 112this.checkExistenceOnly = get.isCheckExistenceOnly(); -113Mapbyte[], NavigableSetbyte[] fams = get.getFamilyMap(); -114for (Map.Entrybyte[],NavigableSetbyte[] entry : fams.entrySet()) { -115 byte [] fam = entry.getKey(); -116 NavigableSetbyte[] cols = entry.getValue(); -117 if (cols != null cols.size() 0) { -118for (byte[] col : cols) { -119 addColumn(fam, col); -120} -121 } else { -122addFamily(fam); -123 } -124} -125for (Map.EntryString, byte[] attr : get.getAttributesMap().entrySet()) { -126 setAttribute(attr.getKey(), attr.getValue()); -127} -128for (Map.Entrybyte[], TimeRange entry : get.getColumnFamilyTimeRange().entrySet()) { -129 TimeRange tr = entry.getValue(); -130 setColumnFamilyTimeRange(entry.getKey(), tr.getMin(), tr.getMax()); -131} -132 } -133 -134 public boolean isCheckExistenceOnly() { -135return checkExistenceOnly; -136 } -137 -138 public Get setCheckExistenceOnly(boolean checkExistenceOnly) { -139this.checkExistenceOnly = checkExistenceOnly; -140return this; -141 } -142 -143 /** -144 * This will always return the default value which is false as client cannot set the value to this -145 * property any more. -146 * @deprecated since 2.0.0 and will be removed in 3.0.0 -147 */ -148 @Deprecated -149 public boolean isClosestRowBefore() { -150return closestRowBefore; -151 } -152 -153 /** -154 * This is not used any more and does nothing. Use reverse scan instead. -155 * @deprecated since 2.0.0 and will be removed in 3.0.0 -156 */ -157 @Deprecated -158 public Get setClosestRowBefore(boolean closestRowBefore) { -159// do Nothing -160return this; -161 } -162 -163 /** -164 * Get all columns from the specified family. -165 * p -166 * Overrides previous calls to addColumn for this family. -167 * @param family family name -168 * @return the Get object -169 */ -170 public Get addFamily(byte [] family) { -171familyMap.remove(family); -172familyMap.put(family, null); -173return this; -174 } -175 -176 /** -177 * Get the column from the specific family with the specified qualifier. -178 * p -179 * Overrides previous calls to addFamily for this family. -180 * @param family family name -181 * @param qualifier column qualifier -182 * @return the Get objec -183 */ -184 public Get addColumn(byte [] family, byte [] qualifier) { -185NavigableSetbyte [] set = familyMap.get(family); -186if(set == null) { -187 set = new TreeSetbyte [](Bytes.BYTES_COMPARATOR); -188} -189if (qualifier == null) { -190 qualifier = HConstants.EMPTY_BYTE_ARRAY; -191} -192set.add(qualifier); -193familyMap.put(family, set); -194return this; -195 } -196 -197 /** -198 * Get versions of columns only within the specified timestamp range, -199 * [minStamp, maxStamp). -200 * @param minStamp minimum timestamp value, inclusive -201 * @param maxStamp maximum timestamp value, exclusive -202 * @throws IOException -203 * @return this for invocation chaining -204 */ -205 public Get setTimeRange(long minStamp, long maxStamp) throws IOException { -206tr = new TimeRange(minStamp, maxStamp); -207return this; -208 } -209 -210 /** -211 * Get versions of columns with the specified timestamp. -212 * @param timestamp version timestamp -213 * @return this for invocation chaining -214 */ -215 public Get setTimeStamp(long timestamp) -216 throws IOException { -217try { -218 tr = new TimeRange(timestamp, timestamp+1); -219} catch(Exception e) { -220 // This should never happen, unless integer overflow or something extremely wrong... -221 LOG.error("TimeRange failed, likely caused by integer overflow. ", e); -222 throw e; -223} -224return this; -225 } -226 -227 @Override public Get setColumnFamilyTimeRange(byte[] cf, long minStamp, long maxStamp) { -228return (Get) super.setColumnFamilyTimeRange(cf, minStamp, maxStamp); -229 } -230 -231 /** -232 * Get all available versions. -233 * @return this for invocation chaining -234 */ -235 public Get setMaxVersions() { -236this.maxVersions = Integer.MAX_VALUE; -237return this; -238 } -239 -240 /** -241 * Get up to the specified number of versions of
[52/52] hbase-site git commit: Empty commit
Empty commit Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/9d13f2d9 Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/9d13f2d9 Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/9d13f2d9 Branch: refs/heads/asf-site Commit: 9d13f2d98f82b59344783be0545b30e1bcc767d7 Parents: f07ee53 Author: Dima SpivakAuthored: Mon Oct 17 14:26:32 2016 -0700 Committer: Dima Spivak Committed: Mon Oct 17 14:26:32 2016 -0700 -- --
[35/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.html b/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.html index 8b52a04..61a5457 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/filter/FilterList.html @@ -60,7 +60,7 @@ 052 */ 053@InterfaceAudience.Public 054@InterfaceStability.Stable -055final public class FilterList extends Filter { +055final public class FilterList extends FilterBase { 056 /** set operator */ 057 @InterfaceAudience.Public 058 @InterfaceStability.Stable @@ -73,7 +73,7 @@ 065 066 private static final int MAX_LOG_FILTERS = 5; 067 private Operator operator = Operator.MUST_PASS_ALL; -068 private ListFilter filters = new ArrayListFilter(); +068 private final ListFilter filters; 069 private Filter seekHintFilter = null; 070 071 /** Reference Cell used by {@link #transformCell(Cell)} for validation purpose. */ @@ -91,423 +91,476 @@ 083 /** 084 * Constructor that takes a set of {@link Filter}s. The default operator 085 * MUST_PASS_ALL is assumed. -086 * +086 * All filters are cloned to internal list. 087 * @param rowFilters list of filters 088 */ 089 public FilterList(final ListFilter rowFilters) { -090if (rowFilters instanceof ArrayList) { -091 this.filters = rowFilters; -092} else { -093 this.filters = new ArrayListFilter(rowFilters); -094} -095 } -096 -097 /** -098 * Constructor that takes a var arg number of {@link Filter}s. The fefault operator -099 * MUST_PASS_ALL is assumed. -100 * @param rowFilters -101 */ -102 public FilterList(final Filter... rowFilters) { -103this.filters = new ArrayListFilter(Arrays.asList(rowFilters)); -104 } -105 -106 /** -107 * Constructor that takes an operator. -108 * -109 * @param operator Operator to process filter set with. -110 */ -111 public FilterList(final Operator operator) { -112this.operator = operator; -113 } -114 -115 /** -116 * Constructor that takes a set of {@link Filter}s and an operator. -117 * -118 * @param operator Operator to process filter set with. -119 * @param rowFilters Set of row filters. -120 */ -121 public FilterList(final Operator operator, final ListFilter rowFilters) { -122this.filters = new ArrayListFilter(rowFilters); -123this.operator = operator; -124 } -125 -126 /** -127 * Constructor that takes a var arg number of {@link Filter}s and an operator. -128 * -129 * @param operator Operator to process filter set with. -130 * @param rowFilters Filters to use -131 */ -132 public FilterList(final Operator operator, final Filter... rowFilters) { -133this.filters = new ArrayListFilter(Arrays.asList(rowFilters)); -134this.operator = operator; -135 } -136 -137 /** -138 * Get the operator. -139 * -140 * @return operator -141 */ -142 public Operator getOperator() { -143return operator; -144 } -145 -146 /** -147 * Get the filters. -148 * -149 * @return filters -150 */ -151 public ListFilter getFilters() { -152return filters; -153 } -154 -155 /** -156 * Add a filter. -157 * -158 * @param filter another filter -159 */ -160 public void addFilter(Filter filter) { -161if (this.isReversed() != filter.isReversed()) { -162 throw new IllegalArgumentException( -163 "Filters in the list must have the same reversed flag, this.reversed=" -164 + this.isReversed()); -165} -166this.filters.add(filter); -167 } -168 -169 @Override -170 public void reset() throws IOException { -171int listize = filters.size(); -172for (int i = 0; i listize; i++) { -173 filters.get(i).reset(); -174} -175seekHintFilter = null; -176 } -177 -178 @Override -179 public boolean filterRowKey(byte[] rowKey, int offset, int length) throws IOException { -180boolean flag = (this.operator == Operator.MUST_PASS_ONE) ? true : false; -181int listize = filters.size(); -182for (int i = 0; i listize; i++) { -183 Filter filter = filters.get(i); -184 if (this.operator == Operator.MUST_PASS_ALL) { -185if (filter.filterAllRemaining() || -186filter.filterRowKey(rowKey, offset, length)) { -187 flag = true; -188} -189 } else if (this.operator == Operator.MUST_PASS_ONE) { -190if (!filter.filterAllRemaining() -191!filter.filterRowKey(rowKey, offset, length)) { -192 flag = false; -193} -194 } -195} -196return flag; -197 } -198 -199 @Override -200 public boolean filterRowKey(Cell firstRowCell) throws IOException { -201boolean flag = (this.operator == Operator.MUST_PASS_ONE) ? true : false; +090
[41/52] [partial] hbase-site git commit: Published site at 278625312047a2100f4dbb2d2eaa4e2219d00e14.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/org/apache/hadoop/hbase/util/class-use/Pair.html -- diff --git a/apidocs/org/apache/hadoop/hbase/util/class-use/Pair.html b/apidocs/org/apache/hadoop/hbase/util/class-use/Pair.html index 742a58d..4c2f737 100644 --- a/apidocs/org/apache/hadoop/hbase/util/class-use/Pair.html +++ b/apidocs/org/apache/hadoop/hbase/util/class-use/Pair.html @@ -183,6 +183,15 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. protected Pairbyte[][],byte[][] TableInputFormatBase.getStartEndKeys() + +protected Pairhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listorg.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItem,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String +LoadIncrementalHFiles.groupOrSplit(com.google.common.collect.Multimaphttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true; title="class or interface in java.nio">ByteBuffer,org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItemregionGroups, + org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItemitem, +Tabletable, +Pairbyte[][],byte[][]startEndKeys) +Attempt to assign the given load queue item into its target region group. + + @@ -193,7 +202,7 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. -protected http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listorg.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItem +protected Pairhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listorg.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItem,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String LoadIncrementalHFiles.groupOrSplit(com.google.common.collect.Multimaphttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true; title="class or interface in java.nio">ByteBuffer,org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItemregionGroups, org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.LoadQueueItemitem, Tabletable, http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/overview-frame.html -- diff --git a/apidocs/overview-frame.html b/apidocs/overview-frame.html index cf4f3d4..14765bf 100644 --- a/apidocs/overview-frame.html +++ b/apidocs/overview-frame.html @@ -47,7 +47,6 @@ org.apache.hadoop.hbase.master org.apache.hadoop.hbase.mob org.apache.hadoop.hbase.mob.compactions -org.apache.hadoop.hbase.mob.mapreduce org.apache.hadoop.hbase.namespace org.apache.hadoop.hbase.nio org.apache.hadoop.hbase.quotas http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f07ee53f/apidocs/overview-summary.html -- diff --git a/apidocs/overview-summary.html b/apidocs/overview-summary.html index 65cd8e2..2d1d380 100644 --- a/apidocs/overview-summary.html +++ b/apidocs/overview-summary.html @@ -242,86 +242,82 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. -org.apache.hadoop.hbase.mob.mapreduce - - - org.apache.hadoop.hbase.namespace - + org.apache.hadoop.hbase.nio - + org.apache.hadoop.hbase.quotas - + org.apache.hadoop.hbase.regionserver - + org.apache.hadoop.hbase.regionserver.querymatcher - + org.apache.hadoop.hbase.regionserver.throttle - + org.apache.hadoop.hbase.regionserver.wal - + org.apache.hadoop.hbase.replication Multi Cluster Replication - + org.apache.hadoop.hbase.rest HBase REST - + org.apache.hadoop.hbase.rest.client - + org.apache.hadoop.hbase.rsgroup - + org.apache.hadoop.hbase.security - + org.apache.hadoop.hbase.shaded.com.google.protobuf - + org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler - + org.apache.hadoop.hbase.shaded.protobuf - + org.apache.hadoop.hbase.snapshot - + org.apache.hadoop.hbase.spark - + org.apache.hadoop.hbase.spark.example.hbasecontext - + org.apache.hadoop.hbase.types @@ -329,23 +325,23 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods. extensible data type API. - + org.apache.hadoop.hbase.util - + org.apache.hadoop.hbase.util.hbck - + org.apache.hadoop.hbase.wal - + org.apache.hadoop.hbase.zookeeper - + org.apache.hbase.archetypes.exemplars.client This package provides fully-functional exemplar Java code
hbase git commit: HBASE-16283 Batch Append/Increment will always fail if set ReturnResults to false (Allan Yang)
Repository: hbase Updated Branches: refs/heads/master e1a6c9403 -> 0c304a049 HBASE-16283 Batch Append/Increment will always fail if set ReturnResults to false (Allan Yang) Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0c304a04 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0c304a04 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0c304a04 Branch: refs/heads/master Commit: 0c304a049bff130b971ef030746129cd6daaba7b Parents: e1a6c94 Author: tedyuAuthored: Mon Oct 17 15:42:06 2016 -0700 Committer: tedyu Committed: Mon Oct 17 15:42:06 2016 -0700 -- .../hadoop/hbase/regionserver/HRegion.java | 2 +- .../hadoop/hbase/client/TestFromClientSide.java | 26 +- .../client/TestIncrementsFromClientSide.java| 29 +++- .../hbase/regionserver/TestAtomicOperation.java | 2 +- .../hbase/regionserver/wal/TestDurability.java | 3 +- 5 files changed, 57 insertions(+), 5 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/0c304a04/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index ca92f06..2cf55b5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -7256,7 +7256,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi dropMemstoreContents(); } // If results is null, then client asked that we not return the calculated results. - return results != null && returnResults? Result.create(results): null; + return results != null && returnResults? Result.create(results): Result.EMPTY_RESULT; } finally { // Call complete always, even on success. doDelta is doing a Get READ_UNCOMMITTED when it goes // to get current value under an exclusive lock so no need so no need to wait to return to http://git-wip-us.apache.org/repos/asf/hbase/blob/0c304a04/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java index 6981a48..89841a9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java @@ -4449,6 +4449,30 @@ public class TestFromClientSide { } @Test + public void testBatchAppendWithReturnResultFalse() throws Exception { +LOG.info("Starting testBatchAppendWithReturnResultFalse"); +final TableName TABLENAME = TableName.valueOf("testBatchAppend"); +Table table = TEST_UTIL.createTable(TABLENAME, FAMILY); +Append append1 = new Append(Bytes.toBytes("row1")); +append1.setReturnResults(false); +append1.add(FAMILY, Bytes.toBytes("f1"), Bytes.toBytes("value1")); +Append append2 = new Append(Bytes.toBytes("row1")); +append2.setReturnResults(false); +append2.add(FAMILY, Bytes.toBytes("f1"), Bytes.toBytes("value2")); +List appends = new ArrayList<>(); +appends.add(append1); +appends.add(append2); +Object[] results = new Object[2]; +table.batch(appends, results); +assertTrue(results.length == 2); +for(Object r : results) { + Result result = (Result)r; + assertTrue(result.isEmpty()); +} +table.close(); + } + + @Test public void testAppend() throws Exception { LOG.info("Starting testAppend"); final TableName TABLENAME = TableName.valueOf("testAppend"); @@ -4462,7 +4486,7 @@ public class TestFromClientSide { a.add(FAMILY, QUALIFIERS[0], v1); a.add(FAMILY, QUALIFIERS[1], v2); a.setReturnResults(false); -assertNullResult(t.append(a)); +assertEmptyResult(t.append(a)); a = new Append(ROW); a.add(FAMILY, QUALIFIERS[0], v2); http://git-wip-us.apache.org/repos/asf/hbase/blob/0c304a04/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIncrementsFromClientSide.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIncrementsFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIncrementsFromClientSide.java index 3ddfef4..1c51177 100644 ---
hbase git commit: HBASE-16856 Exception message in SyncRunner.run() should print currentSequence (Allan Yang)
Repository: hbase Updated Branches: refs/heads/master 1e3d8c822 -> 278625312 HBASE-16856 Exception message in SyncRunner.run() should print currentSequence (Allan Yang) Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/27862531 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/27862531 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/27862531 Branch: refs/heads/master Commit: 278625312047a2100f4dbb2d2eaa4e2219d00e14 Parents: 1e3d8c8 Author: tedyuAuthored: Mon Oct 17 07:19:49 2016 -0700 Committer: tedyu Committed: Mon Oct 17 07:19:49 2016 -0700 -- .../main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/27862531/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java index f93537d..3e0e829 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java @@ -601,7 +601,7 @@ public class FSHLog extends AbstractFSWAL { currentSequence = this.sequence; long syncFutureSequence = takeSyncFuture.getTxid(); if (syncFutureSequence > currentSequence) { - throw new IllegalStateException("currentSequence=" + syncFutureSequence + throw new IllegalStateException("currentSequence=" + currentSequence + ", syncFutureSequence=" + syncFutureSequence); } // See if we can process any syncfutures BEFORE we go sync.
[4/6] hbase git commit: HBASE-16847 Commented out broken test-compile references. These will be fixed and put back in later.
http://git-wip-us.apache.org/repos/asf/hbase/blob/d6ef946f/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 08f0470..c9b4217 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -676,445 +676,445 @@ public class TestHRegion { scanner1.close(); } - @Test - public void testSkipRecoveredEditsReplay() throws Exception { -String method = "testSkipRecoveredEditsReplay"; -TableName tableName = TableName.valueOf(method); -byte[] family = Bytes.toBytes("family"); -this.region = initHRegion(tableName, method, CONF, family); -final WALFactory wals = new WALFactory(CONF, null, method); -try { - Path regiondir = region.getRegionStorage().getRegionDir(); - FileSystem fs = region.getRegionStorage().getFileSystem(); - byte[] regionName = region.getRegionInfo().getEncodedNameAsBytes(); - - Path recoveredEditsDir = WALSplitter.getRegionDirRecoveredEditsDir(regiondir); - - long maxSeqId = 1050; - long minSeqId = 1000; - - for (long i = minSeqId; i <= maxSeqId; i += 10) { -Path recoveredEdits = new Path(recoveredEditsDir, String.format("%019d", i)); -fs.create(recoveredEdits); -WALProvider.Writer writer = wals.createRecoveredEditsWriter(fs, recoveredEdits); - -long time = System.nanoTime(); -WALEdit edit = new WALEdit(); -edit.add(new KeyValue(row, family, Bytes.toBytes(i), time, KeyValue.Type.Put, Bytes -.toBytes(i))); -writer.append(new WAL.Entry(new HLogKey(regionName, tableName, i, time, -HConstants.DEFAULT_CLUSTER_ID), edit)); - -writer.close(); - } - MonitoredTask status = TaskMonitor.get().createStatus(method); - MapmaxSeqIdInStores = new TreeMap (Bytes.BYTES_COMPARATOR); - for (Store store : region.getStores()) { -maxSeqIdInStores.put(store.getColumnFamilyName().getBytes(), minSeqId - 1); - } - long seqId = region.replayRecoveredEditsIfAny(regiondir, maxSeqIdInStores, null, status); - assertEquals(maxSeqId, seqId); - region.getMVCC().advanceTo(seqId); - Get get = new Get(row); - Result result = region.get(get); - for (long i = minSeqId; i <= maxSeqId; i += 10) { -List kvs = result.getColumnCells(family, Bytes.toBytes(i)); -assertEquals(1, kvs.size()); -assertArrayEquals(Bytes.toBytes(i), CellUtil.cloneValue(kvs.get(0))); - } -} finally { - HBaseTestingUtility.closeRegionAndWAL(this.region); - this.region = null; - wals.close(); -} - } - - @Test - public void testSkipRecoveredEditsReplaySomeIgnored() throws Exception { -String method = "testSkipRecoveredEditsReplaySomeIgnored"; -TableName tableName = TableName.valueOf(method); -byte[] family = Bytes.toBytes("family"); -this.region = initHRegion(tableName, method, CONF, family); -final WALFactory wals = new WALFactory(CONF, null, method); -try { - Path regiondir = region.getRegionStorage().getRegionDir(); - FileSystem fs = region.getRegionStorage().getFileSystem(); - byte[] regionName = region.getRegionInfo().getEncodedNameAsBytes(); - - Path recoveredEditsDir = WALSplitter.getRegionDirRecoveredEditsDir(regiondir); - - long maxSeqId = 1050; - long minSeqId = 1000; - - for (long i = minSeqId; i <= maxSeqId; i += 10) { -Path recoveredEdits = new Path(recoveredEditsDir, String.format("%019d", i)); -fs.create(recoveredEdits); -WALProvider.Writer writer = wals.createRecoveredEditsWriter(fs, recoveredEdits); - -long time = System.nanoTime(); -WALEdit edit = new WALEdit(); -edit.add(new KeyValue(row, family, Bytes.toBytes(i), time, KeyValue.Type.Put, Bytes -.toBytes(i))); -writer.append(new WAL.Entry(new HLogKey(regionName, tableName, i, time, -HConstants.DEFAULT_CLUSTER_ID), edit)); - -writer.close(); - } - long recoverSeqId = 1030; - MonitoredTask status = TaskMonitor.get().createStatus(method); - Map maxSeqIdInStores = new TreeMap (Bytes.BYTES_COMPARATOR); - for (Store store : region.getStores()) { -maxSeqIdInStores.put(store.getColumnFamilyName().getBytes(), recoverSeqId - 1); - } - long seqId = region.replayRecoveredEditsIfAny(regiondir, maxSeqIdInStores, null, status); - assertEquals(maxSeqId, seqId); - region.getMVCC().advanceTo(seqId); - Get get = new Get(row); - Result result = region.get(get); - for
[1/6] hbase git commit: HBASE-16847 Commented out broken test-compile references. These will be fixed and put back in later.
Repository: hbase Updated Branches: refs/heads/hbase-14439 300bdfd2a -> d6ef946f4 http://git-wip-us.apache.org/repos/asf/hbase/blob/d6ef946f/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java index d824d70..40bd961 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java @@ -65,8 +65,6 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.client.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest; -import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest; -import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.util.Bytes; @@ -163,8 +161,8 @@ public final class SnapshotTestingUtils { HBaseProtos.SnapshotDescription snapshotDescriptor, TableName tableName, byte[] family) throws IOException { MasterStorage mfs = testUtil.getHBaseCluster().getMaster().getMasterStorage(); -confirmSnapshotValid(snapshotDescriptor, tableName, family, -mfs.getRootDir(), testUtil.getHBaseAdmin(), mfs.getFileSystem()); +//confirmSnapshotValid(snapshotDescriptor, tableName, family, +//mfs.getRootDir(), testUtil.getHBaseAdmin(), mfs.getFileSystem()); } /** @@ -273,18 +271,18 @@ public final class SnapshotTestingUtils { */ public static void waitForSnapshotToComplete(HMaster master, HBaseProtos.SnapshotDescription snapshot, long sleep) throws ServiceException { -final IsSnapshotDoneRequest request = IsSnapshotDoneRequest.newBuilder() -.setSnapshot(snapshot).build(); -IsSnapshotDoneResponse done = IsSnapshotDoneResponse.newBuilder() -.buildPartial(); -while (!done.getDone()) { - done = master.getMasterRpcServices().isSnapshotDone(null, request); - try { -Thread.sleep(sleep); - } catch (InterruptedException e) { -throw new ServiceException(e); - } -} +//final IsSnapshotDoneRequest request = IsSnapshotDoneRequest.newBuilder() +//.setSnapshot(snapshot).build(); +//IsSnapshotDoneResponse done = IsSnapshotDoneResponse.newBuilder() +//.buildPartial(); +//while (!done.getDone()) { +// done = master.getMasterRpcServices().isSnapshotDone(null, request); +// try { +//Thread.sleep(sleep); +// } catch (InterruptedException e) { +//throw new ServiceException(e); +// } +//} } /* @@ -321,30 +319,30 @@ public final class SnapshotTestingUtils { assertNoSnapshots(admin); } - /** - * Expect the snapshot to throw an error when checking if the snapshot is - * complete - * - * @param master master to check - * @param snapshot the {@link SnapshotDescription} request to pass to the master - * @param clazz expected exception from the master - */ - public static void expectSnapshotDoneException(HMaster master, - IsSnapshotDoneRequest snapshot, - Class clazz) { -try { - master.getMasterRpcServices().isSnapshotDone(null, snapshot); - Assert.fail("didn't fail to lookup a snapshot"); -} catch (ServiceException se) { - try { -throw ProtobufUtil.getRemoteException(se); - } catch (HBaseSnapshotException e) { -assertEquals("Threw wrong snapshot exception!", clazz, e.getClass()); - } catch (Throwable t) { -Assert.fail("Threw an unexpected exception:" + t); - } -} - } +// /** +// * Expect the snapshot to throw an error when checking if the snapshot is +// * complete +// * +// * @param master master to check +// * @param snapshot the {@link SnapshotDescription} request to pass to the master +// * @param clazz expected exception from the master +// */ +// public static void expectSnapshotDoneException(HMaster master, +// IsSnapshotDoneRequest snapshot, +// Class clazz) { +//try { +// master.getMasterRpcServices().isSnapshotDone(null, snapshot); +// Assert.fail("didn't fail to lookup a snapshot"); +//} catch (ServiceException se) { +// try { +//throw ProtobufUtil.getRemoteException(se); +// } catch (HBaseSnapshotException e) { +//assertEquals("Threw wrong snapshot exception!", clazz, e.getClass()); +// } catch (Throwable t) { +//Assert.fail("Threw an unexpected exception:" + t); +// } +//
[3/6] hbase git commit: HBASE-16847 Commented out broken test-compile references. These will be fixed and put back in later.
http://git-wip-us.apache.org/repos/asf/hbase/blob/d6ef946f/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionStorage.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionStorage.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionStorage.java new file mode 100644 index 000..3a6fd47 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionStorage.java @@ -0,0 +1,230 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.regionserver; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.net.URI; +import java.util.Collection; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.fs.RegionStorage; +import org.apache.hadoop.hbase.fs.FSUtilsWithRetries; +import org.apache.hadoop.hbase.testclassification.RegionServerTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.FSUtils; +import org.apache.hadoop.util.Progressable; + +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category({RegionServerTests.class, SmallTests.class}) +public class TestHRegionStorage { + private static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); + private static final Log LOG = LogFactory.getLog(TestHRegionStorage.class); + +// @Test +// public void testOnDiskRegionCreation() throws IOException { +//Path rootDir = TEST_UTIL.getDataTestDirOnTestFS("testOnDiskRegionCreation"); +//FileSystem fs = TEST_UTIL.getTestFileSystem(); +//Configuration conf = TEST_UTIL.getConfiguration(); +// +//// Create a Region +//HRegionInfo hri = new HRegionInfo(TableName.valueOf("TestTable")); +//RegionStorage regionFs = RegionStorage.open(conf, fs, rootDir, hri, true); +// +//// Verify if the region is on disk +//Path regionDir = regionFs.getRegionDir(); +//assertTrue("The region folder should be created", fs.exists(regionDir)); +// +//// Verify the .regioninfo +//HRegionInfo hriVerify = RegionStorage.open(conf, regionDir, false).getRegionInfo(); +//assertEquals(hri, hriVerify); +// +//// Open the region +//regionFs = RegionStorage.open(conf, fs, rootDir, hri, false); +//assertEquals(regionDir, regionFs.getRegionDir()); +// +//// Delete the region +//RegionStorage.destroy(conf, fs, rootDir, hri); +//assertFalse("The region folder should be removed", fs.exists(regionDir)); +// +//fs.delete(rootDir, true); +// } + + @Test + public void testNonIdempotentOpsWithRetries() throws IOException { +Path rootDir = TEST_UTIL.getDataTestDirOnTestFS("testOnDiskRegionCreation"); +FileSystem fs = TEST_UTIL.getTestFileSystem(); +Configuration conf = TEST_UTIL.getConfiguration(); + +FSUtilsWithRetries regionFs = new FSUtilsWithRetries(conf, new MockFileSystemForCreate()); +boolean result = regionFs.createDir(new Path("/foo/bar")); +assertTrue("Couldn't create the directory", result); + +regionFs = new FSUtilsWithRetries(conf, new MockFileSystem()); +result = regionFs.rename(new Path("/foo/bar"), new Path("/foo/bar2")); +assertTrue("Couldn't rename the directory", result); + +regionFs = new FSUtilsWithRetries(conf, new MockFileSystem()); +result = regionFs.deleteDir(new Path("/foo/bar")); +assertTrue("Couldn't delete the directory", result); +fs.delete(rootDir, true); + } + + static class
[5/6] hbase git commit: HBASE-16847 Commented out broken test-compile references. These will be fixed and put back in later.
http://git-wip-us.apache.org/repos/asf/hbase/blob/d6ef946f/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java -- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java index 95e19f2..21c5b59 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java @@ -21,6 +21,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; +import junit.framework.TestCase; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; @@ -41,301 +42,301 @@ import com.google.common.collect.Lists; @Category({RegionServerTests.class, SmallTests.class}) public class TestDefaultCompactSelection extends TestCase { - private final static Log LOG = LogFactory.getLog(TestDefaultCompactSelection.class); - private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); - - protected Configuration conf; - protected HStore store; - private static final String DIR= - TEST_UTIL.getDataTestDir(TestDefaultCompactSelection.class.getSimpleName()).toString(); - private static Path TEST_FILE; - - protected static final int minFiles = 3; - protected static final int maxFiles = 5; - - protected static final long minSize = 10; - protected static final long maxSize = 2100; - - private WALFactory wals; - private HRegion region; - - @Override - public void setUp() throws Exception { -// setup config values necessary for store -this.conf = TEST_UTIL.getConfiguration(); -this.conf.setLong(HConstants.MAJOR_COMPACTION_PERIOD, 0); -this.conf.setInt("hbase.hstore.compaction.min", minFiles); -this.conf.setInt("hbase.hstore.compaction.max", maxFiles); -this.conf.setLong(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, minSize); -this.conf.setLong("hbase.hstore.compaction.max.size", maxSize); -this.conf.setFloat("hbase.hstore.compaction.ratio", 1.0F); -// Test depends on this not being set to pass. Default breaks test. TODO: Revisit. -this.conf.unset("hbase.hstore.compaction.min.size"); - -//Setting up a Store -final String id = TestDefaultCompactSelection.class.getName(); -Path basedir = new Path(DIR); -final Path logdir = new Path(basedir, DefaultWALProvider.getWALDirectoryName(id)); -HColumnDescriptor hcd = new HColumnDescriptor(Bytes.toBytes("family")); -FileSystem fs = FileSystem.get(conf); - -fs.delete(logdir, true); - -HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(Bytes.toBytes("table"))); -htd.addFamily(hcd); -HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false); - -final Configuration walConf = new Configuration(conf); -FSUtils.setRootDir(walConf, basedir); -wals = new WALFactory(walConf, null, id); -region = HBaseTestingUtility.createRegionAndWAL(info, basedir, conf, htd); -HBaseTestingUtility.closeRegionAndWAL(region); - -RegionStorage rfs = RegionStorage.open(conf, fs, basedir, info, false); -region = new HRegion(rfs, htd, - wals.getWAL(info.getEncodedNameAsBytes(), info.getTable().getNamespace()), null); - -store = new HStore(region, hcd, conf); - -TEST_FILE = region.getRegionStorage().createTempName(); -fs.createNewFile(TEST_FILE); - } - - @After - public void tearDown() throws IOException { -IOException ex = null; -try { - region.close(); -} catch (IOException e) { - LOG.warn("Caught Exception", e); - ex = e; -} -try { - wals.close(); -} catch (IOException e) { - LOG.warn("Caught Exception", e); - ex = e; -} -if (ex != null) { - throw ex; -} - } - - ArrayList toArrayList(long... numbers) { -ArrayList result = new ArrayList(); -for (long i : numbers) { - result.add(i); -} -return result; - } - - List sfCreate(long... sizes) throws IOException { -ArrayList ageInDisk = new ArrayList(); -for (int i = 0; i < sizes.length; i++) { - ageInDisk.add(0L); -} -return sfCreate(toArrayList(sizes), ageInDisk); - } - - List sfCreate(ArrayList sizes, ArrayList ageInDisk) -throws IOException { -return sfCreate(false, sizes, ageInDisk); - } - - List sfCreate(boolean isReference, long... sizes) throws IOException { -ArrayList ageInDisk = new ArrayList(sizes.length); -for (int i = 0; i < sizes.length; i++) { - ageInDisk.add(0L); -} -return sfCreate(isReference, toArrayList(sizes), ageInDisk); - } - - List sfCreate(boolean isReference, ArrayList sizes, ArrayList ageInDisk) - throws
hbase git commit: HBASE-16855 Avoid NPE in MetricsConnection’s construction (ChiaPing Tsai)
Repository: hbase Updated Branches: refs/heads/master 278625312 -> c8e9a295c HBASE-16855 Avoid NPE in MetricsConnectionâs construction (ChiaPing Tsai) Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c8e9a295 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c8e9a295 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c8e9a295 Branch: refs/heads/master Commit: c8e9a295c133ef9507a84ab9c70d18563e2c22ad Parents: 2786253 Author: tedyuAuthored: Mon Oct 17 09:34:21 2016 -0700 Committer: tedyu Committed: Mon Oct 17 09:34:21 2016 -0700 -- .../hadoop/hbase/client/MetricsConnection.java | 31 .../hbase/client/TestMetricsConnection.java | 24 +++ 2 files changed, 44 insertions(+), 11 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/c8e9a295/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java index 22a5561..36627bd 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java @@ -298,23 +298,29 @@ public class MetricsConnection implements StatisticTrackable { private final ConcurrentMap cacheDroppingExceptions = new ConcurrentHashMap<>(CAPACITY, LOAD_FACTOR, CONCURRENCY_LEVEL); - public MetricsConnection(final ConnectionImplementation conn) { + MetricsConnection(final ConnectionImplementation conn) { this.scope = conn.toString(); this.registry = new MetricRegistry(); -final ThreadPoolExecutor batchPool = (ThreadPoolExecutor) conn.getCurrentBatchPool(); -final ThreadPoolExecutor metaPool = (ThreadPoolExecutor) conn.getCurrentMetaLookupPool(); -this.registry.register(name(this.getClass(), "executorPoolActiveThreads", scope), +this.registry.register(getExecutorPoolName(), new RatioGauge() { @Override protected Ratio getRatio() { +ThreadPoolExecutor batchPool = (ThreadPoolExecutor) conn.getCurrentBatchPool(); +if (batchPool == null) { + return Ratio.of(0, 0); +} return Ratio.of(batchPool.getActiveCount(), batchPool.getMaximumPoolSize()); } }); -this.registry.register(name(this.getClass(), "metaPoolActiveThreads", scope), +this.registry.register(getMetaPoolName(), new RatioGauge() { @Override protected Ratio getRatio() { +ThreadPoolExecutor metaPool = (ThreadPoolExecutor) conn.getCurrentMetaLookupPool(); +if (metaPool == null) { + return Ratio.of(0, 0); +} return Ratio.of(metaPool.getActiveCount(), metaPool.getMaximumPoolSize()); } }); @@ -337,6 +343,21 @@ public class MetricsConnection implements StatisticTrackable { this.reporter.start(); } + @VisibleForTesting + final String getExecutorPoolName() { +return name(getClass(), "executorPoolActiveThreads", scope); + } + + @VisibleForTesting + final String getMetaPoolName() { +return name(getClass(), "metaPoolActiveThreads", scope); + } + + @VisibleForTesting + MetricRegistry getMetricRegistry() { +return registry; + } + public void shutdown() { this.reporter.stop(); } http://git-wip-us.apache.org/repos/asf/hbase/blob/c8e9a295/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMetricsConnection.java -- diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMetricsConnection.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMetricsConnection.java index d17dd7f..854ecc5 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMetricsConnection.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMetricsConnection.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hbase.client; +import com.codahale.metrics.RatioGauge; +import com.codahale.metrics.RatioGauge.Ratio; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService; @@ -32,24 +34,28 @@ import org.apache.hadoop.hbase.testclassification.MetricsTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import
hbase git commit: HBASE-16861 Rename Service to Task (Vladimir Rodionov)
Repository: hbase Updated Branches: refs/heads/HBASE-7912 909c4efa8 -> 914d162ca HBASE-16861 Rename Service to Task (Vladimir Rodionov) Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/914d162c Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/914d162c Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/914d162c Branch: refs/heads/HBASE-7912 Commit: 914d162ca163c818654eb912fd2c42e6ea58fa72 Parents: 909c4ef Author: tedyuAuthored: Mon Oct 17 11:59:54 2016 -0700 Committer: tedyu Committed: Mon Oct 17 11:59:54 2016 -0700 -- .../hadoop/hbase/backup/BackupCopyService.java | 53 --- .../hadoop/hbase/backup/BackupCopyTask.java | 53 +++ .../backup/BackupRestoreServerFactory.java | 24 +- .../hadoop/hbase/backup/RestoreService.java | 50 --- .../apache/hadoop/hbase/backup/RestoreTask.java | 50 +++ .../backup/impl/FullTableBackupClient.java | 4 +- .../impl/IncrementalTableBackupClient.java | 4 +- .../mapreduce/MapReduceBackupCopyService.java | 349 --- .../mapreduce/MapReduceBackupCopyTask.java | 349 +++ .../mapreduce/MapReduceRestoreService.java | 171 - .../backup/mapreduce/MapReduceRestoreTask.java | 171 + .../hbase/backup/util/RestoreServerUtil.java| 6 +- 12 files changed, 642 insertions(+), 642 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/914d162c/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupCopyService.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupCopyService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupCopyService.java deleted file mode 100644 index 6c70123..000 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupCopyService.java +++ /dev/null @@ -1,53 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.backup; - -import java.io.IOException; - -import org.apache.hadoop.conf.Configurable; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.backup.impl.BackupManager; -import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; - -@InterfaceAudience.Private -@InterfaceStability.Evolving -public interface BackupCopyService extends Configurable { - - /** - * Copy backup data - * @param backupContext - context - * @param backupManager - manager - * @param conf - configuration - * @param copyType - copy type - * @param options - array of options (implementation-specific) - * @return result (0 - success) - * @throws IOException - */ - public int copy(BackupInfo backupContext, BackupManager backupManager, Configuration conf, - BackupType copyType, String[] options) throws IOException; - - - /** -* Cancel copy job -* @param jobHandler - copy job handler -* @throws IOException -*/ - public void cancelCopyJob(String jobHandler) throws IOException; -} http://git-wip-us.apache.org/repos/asf/hbase/blob/914d162c/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupCopyTask.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupCopyTask.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupCopyTask.java new file mode 100644 index 000..ba23bd4 --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupCopyTask.java @@ -0,0 +1,53 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may
hbase git commit: HBASE-16326 CellModel / RowModel should override 'equals', 'hashCode' and 'toString' (Minwoo Kang)
Repository: hbase Updated Branches: refs/heads/master c6e9dabe6 -> 73e945670 HBASE-16326 CellModel / RowModel should override 'equals', 'hashCode' and 'toString' (Minwoo Kang) Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/73e94567 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/73e94567 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/73e94567 Branch: refs/heads/master Commit: 73e945670270de187be337037f75577ccc1a770b Parents: c6e9dab Author: tedyuAuthored: Mon Oct 17 12:42:06 2016 -0700 Committer: tedyu Committed: Mon Oct 17 12:42:06 2016 -0700 -- .../hadoop/hbase/rest/model/CellModel.java | 40 .../hadoop/hbase/rest/model/RowModel.java | 37 ++ .../hadoop/hbase/rest/model/TestCellModel.java | 24 .../hadoop/hbase/rest/model/TestRowModel.java | 25 4 files changed, 126 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/73e94567/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java -- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java index 919135f..0b98816 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java @@ -28,6 +28,9 @@ import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlValue; +import org.apache.commons.lang.builder.EqualsBuilder; +import org.apache.commons.lang.builder.HashCodeBuilder; +import org.apache.commons.lang.builder.ToStringBuilder; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.CellUtil; @@ -207,4 +210,41 @@ public class CellModel implements ProtobufMessageHandler, Serializable { } return this; } + + @Override + public boolean equals(Object obj) { +if (obj == null) { + return false; +} +if (obj == this) { + return true; +} +if (obj.getClass() != getClass()) { + return false; +} +CellModel cellModel = (CellModel) obj; +return new EqualsBuilder(). +append(column, cellModel.column). +append(timestamp, cellModel.timestamp). +append(value, cellModel.value). +isEquals(); + } + + @Override + public int hashCode() { +return new HashCodeBuilder(). +append(column). +append(timestamp). +append(value). +toHashCode(); + } + + @Override + public String toString() { +return new ToStringBuilder(this). +append("column", column). +append("timestamp", timestamp). +append("value", value). +toString(); + } } http://git-wip-us.apache.org/repos/asf/hbase/blob/73e94567/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/RowModel.java -- diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/RowModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/RowModel.java index 596c754..398d5e1 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/RowModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/RowModel.java @@ -30,6 +30,9 @@ import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; +import org.apache.commons.lang.builder.EqualsBuilder; +import org.apache.commons.lang.builder.HashCodeBuilder; +import org.apache.commons.lang.builder.ToStringBuilder; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; import org.codehaus.jackson.annotate.JsonProperty; @@ -148,4 +151,38 @@ public class RowModel implements ProtobufMessageHandler, Serializable { throw new UnsupportedOperationException( "no protobuf equivalent to RowModel"); } + + @Override + public boolean equals(Object obj) { +if (obj == null) { + return false; +} +if (obj == this) { + return true; +} +if (obj.getClass() != getClass()) { + return false; +} +RowModel rowModel = (RowModel) obj; +return new EqualsBuilder(). +append(key, rowModel.key). +append(cells, rowModel.cells). +isEquals(); + } + + @Override + public int hashCode() { +return new HashCodeBuilder(). +append(key). +append(cells). +