hbase git commit: HBASE-20095 Redesign single instance pool in CleanerChore - addendum simplifies onConfigurationChange
Repository: hbase Updated Branches: refs/heads/branch-2 039bc7357 -> e78a8e08f HBASE-20095 Redesign single instance pool in CleanerChore - addendum simplifies onConfigurationChange Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e78a8e08 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e78a8e08 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e78a8e08 Branch: refs/heads/branch-2 Commit: e78a8e08f0436cea86fc9a873576b2d5bab4de73 Parents: 039bc73 Author: tedyuAuthored: Thu Apr 5 10:25:09 2018 -0700 Committer: tedyu Committed: Thu Apr 5 10:25:09 2018 -0700 -- .../hbase/master/cleaner/CleanerChore.java | 25 +++- 1 file changed, 9 insertions(+), 16 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/e78a8e08/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java index 312bcce..396fbaf 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java @@ -89,37 +89,33 @@ public abstract class CleanerChore extends Schedu } /** - * Checks if pool can be updated immediately. + * Checks if pool can be updated. If so, mark for update later. * @param conf configuration - * @return true if pool can be updated immediately, false otherwise */ -synchronized boolean canUpdateImmediately(Configuration conf) { +synchronized void markUpdate(Configuration conf) { int newSize = calculatePoolSize(conf.get(CHORE_POOL_SIZE, DEFAULT_CHORE_POOL_SIZE)); if (newSize == size) { LOG.trace("Size from configuration is same as previous={}, no need to update.", newSize); -return false; +return; } size = newSize; - if (pool.getPoolSize() == 0) { -// chore has no working thread. -return true; - } // Chore is working, update it later. reconfigNotification.set(true); - return false; } /** * Update pool with new size. */ synchronized void updatePool(long timeout) { - while (cleanerLatch != 0) { + long stopTime = System.currentTimeMillis() + timeout; + while (cleanerLatch != 0 && timeout > 0) { try { wait(timeout); + timeout = stopTime - System.currentTimeMillis(); } catch (InterruptedException ie) { - // It's ok to ignore + Thread.currentThread().interrupt(); + break; } -break; } pool.shutdownNow(); LOG.info("Update chore's pool size from {} to {}", pool.getParallelism(), size); @@ -243,10 +239,7 @@ public abstract class CleanerChore extends Schedu @Override public void onConfigurationChange(Configuration conf) { -if (POOL.canUpdateImmediately(conf)) { - // Can immediately update, no need to wait. - POOL.updatePool(0); -} +POOL.markUpdate(conf); } /**
hbase git commit: HBASE-20095 Redesign single instance pool in CleanerChore - addendum simplifies onConfigurationChange
Repository: hbase Updated Branches: refs/heads/master 65e85c4f8 -> 8f6849ff0 HBASE-20095 Redesign single instance pool in CleanerChore - addendum simplifies onConfigurationChange Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8f6849ff Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8f6849ff Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8f6849ff Branch: refs/heads/master Commit: 8f6849ff02fa9e37aab9d7bcab5b8d7b8b37a89d Parents: 65e85c4 Author: tedyuAuthored: Thu Apr 5 10:24:37 2018 -0700 Committer: tedyu Committed: Thu Apr 5 10:24:37 2018 -0700 -- .../hbase/master/cleaner/CleanerChore.java | 25 +++- 1 file changed, 9 insertions(+), 16 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/8f6849ff/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java index 312bcce..396fbaf 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java @@ -89,37 +89,33 @@ public abstract class CleanerChore extends Schedu } /** - * Checks if pool can be updated immediately. + * Checks if pool can be updated. If so, mark for update later. * @param conf configuration - * @return true if pool can be updated immediately, false otherwise */ -synchronized boolean canUpdateImmediately(Configuration conf) { +synchronized void markUpdate(Configuration conf) { int newSize = calculatePoolSize(conf.get(CHORE_POOL_SIZE, DEFAULT_CHORE_POOL_SIZE)); if (newSize == size) { LOG.trace("Size from configuration is same as previous={}, no need to update.", newSize); -return false; +return; } size = newSize; - if (pool.getPoolSize() == 0) { -// chore has no working thread. -return true; - } // Chore is working, update it later. reconfigNotification.set(true); - return false; } /** * Update pool with new size. */ synchronized void updatePool(long timeout) { - while (cleanerLatch != 0) { + long stopTime = System.currentTimeMillis() + timeout; + while (cleanerLatch != 0 && timeout > 0) { try { wait(timeout); + timeout = stopTime - System.currentTimeMillis(); } catch (InterruptedException ie) { - // It's ok to ignore + Thread.currentThread().interrupt(); + break; } -break; } pool.shutdownNow(); LOG.info("Update chore's pool size from {} to {}", pool.getParallelism(), size); @@ -243,10 +239,7 @@ public abstract class CleanerChore extends Schedu @Override public void onConfigurationChange(Configuration conf) { -if (POOL.canUpdateImmediately(conf)) { - // Can immediately update, no need to wait. - POOL.updatePool(0); -} +POOL.markUpdate(conf); } /**
hbase git commit: HBASE-17518 HBase Reference Guide has a syntax error
Repository: hbase Updated Branches: refs/heads/master f27819a9a -> 65e85c4f8 HBASE-17518 HBase Reference Guide has a syntax error Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/65e85c4f Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/65e85c4f Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/65e85c4f Branch: refs/heads/master Commit: 65e85c4f8a9c57d0b2a483e1f7eb4329c5bbba3d Parents: f27819a Author: Zhang NingboAuthored: Thu Apr 5 10:17:13 2018 -0700 Committer: Michael Stack Committed: Thu Apr 5 10:17:18 2018 -0700 -- src/main/asciidoc/_chapters/appendix_hfile_format.adoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/65e85c4f/src/main/asciidoc/_chapters/appendix_hfile_format.adoc -- diff --git a/src/main/asciidoc/_chapters/appendix_hfile_format.adoc b/src/main/asciidoc/_chapters/appendix_hfile_format.adoc index 18eafe6..20f46d3 100644 --- a/src/main/asciidoc/_chapters/appendix_hfile_format.adoc +++ b/src/main/asciidoc/_chapters/appendix_hfile_format.adoc @@ -94,7 +94,7 @@ The version of HBase introducing the above features reads both version 1 and 2 H A version 2 HFile is structured as follows: .HFile Version 2 Structure -image:hfilev2.png[HFile Version 2] +image::hfilev2.png[HFile Version 2] Unified version 2 block format
[2/2] hbase git commit: HBASE-20348 Add HTrace to upgrade notes
HBASE-20348 Add HTrace to upgrade notes Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8adf1bb2 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8adf1bb2 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8adf1bb2 Branch: refs/heads/master Commit: 8adf1bb2b478cb79fc709ece6325c20b673c7e41 Parents: e2b0490 Author: Mike DrobAuthored: Wed Apr 4 12:23:55 2018 -0700 Committer: Mike Drob Committed: Thu Apr 5 10:13:23 2018 -0700 -- src/main/asciidoc/_chapters/tracing.adoc | 6 -- src/main/asciidoc/_chapters/upgrading.adoc | 8 2 files changed, 12 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/8adf1bb2/src/main/asciidoc/_chapters/tracing.adoc -- diff --git a/src/main/asciidoc/_chapters/tracing.adoc b/src/main/asciidoc/_chapters/tracing.adoc index 8bd1962..7305aa8 100644 --- a/src/main/asciidoc/_chapters/tracing.adoc +++ b/src/main/asciidoc/_chapters/tracing.adoc @@ -30,8 +30,10 @@ :icons: font :experimental: -link:https://issues.apache.org/jira/browse/HBASE-6449[HBASE-6449] added support for tracing requests through HBase, using the open source tracing library, link:https://htrace.incubator.apache.org/[HTrace]. -Setting up tracing is quite simple, however it currently requires some very minor changes to your client code (it would not be very difficult to remove this requirement). +HBase includes facilities for tracing requests using the open source tracing library, link:https://htrace.incubator.apache.org/[Apache HTrace]. +Setting up tracing is quite simple, however it currently requires some very minor changes to your client code (this requirement may be removed in the future). + +Support for this feature using HTrace 3 in HBase was added in link:https://issues.apache.org/jira/browse/HBASE-6449[HBASE-6449]. Starting with HBase 2.0, there was a non-compatible update to HTrace 4 via link:https://issues.apache.org/jira/browse/HBASE-18601[HBASE-18601]. The examples provided in this section will be using HTrace 4 package names, syntax, and conventions. For older examples, please consult previous versions of this guide. [[tracing.spanreceivers]] === SpanReceivers http://git-wip-us.apache.org/repos/asf/hbase/blob/8adf1bb2/src/main/asciidoc/_chapters/upgrading.adoc -- diff --git a/src/main/asciidoc/_chapters/upgrading.adoc b/src/main/asciidoc/_chapters/upgrading.adoc index f5cdff3..31589d7 100644 --- a/src/main/asciidoc/_chapters/upgrading.adoc +++ b/src/main/asciidoc/_chapters/upgrading.adoc @@ -543,6 +543,14 @@ A number of internal dependencies for HBase were updated or removed from the run .Multiple breaking changes to source and binary compatibility for client API The Java client API for HBase has a number of changes that break both source and binary compatibility for details see the Compatibility Check Report for the release you'll be upgrading to. +[[upgrade2.0.tracing]] +.Tracing implementation changes +The backing implementation of HBase's tracing features was updated from Apache HTrace 3 to HTrace 4, which includes several breaking changes. While HTrace 3 and 4 can coexist in the same runtime, they will not integrate with each other, leading to disjoint trace information. + +The internal changes to HBase during this upgrade were sufficient for compilation, but it has not been confirmed that there are no regressions in tracing functionality. Please consider this feature expiremental for the immediate future. + +If you previously relied on client side tracing integrated with HBase operations, it is recommended that you upgrade your usage to HTrace 4 as well. + This would be a good place to link to an appendix on migrating applications
[1/2] hbase git commit: HBASE-20346 Update how to run shell tests in ref guide
Repository: hbase Updated Branches: refs/heads/master e2b0490d1 -> f27819a9a HBASE-20346 Update how to run shell tests in ref guide Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f27819a9 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f27819a9 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f27819a9 Branch: refs/heads/master Commit: f27819a9aea2d856adba540cc6dee7a1ee77c854 Parents: 8adf1bb Author: Mike DrobAuthored: Wed Apr 4 14:23:56 2018 -0700 Committer: Mike Drob Committed: Thu Apr 5 10:13:23 2018 -0700 -- src/main/asciidoc/_chapters/developer.adoc | 18 -- 1 file changed, 12 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/f27819a9/src/main/asciidoc/_chapters/developer.adoc -- diff --git a/src/main/asciidoc/_chapters/developer.adoc b/src/main/asciidoc/_chapters/developer.adoc index c95f210..a6e9c3e 100644 --- a/src/main/asciidoc/_chapters/developer.adoc +++ b/src/main/asciidoc/_chapters/developer.adoc @@ -906,13 +906,21 @@ For any other module, for example `hbase-common`, the tests must be strict unit Testing the HBase Shell The HBase shell and its tests are predominantly written in jruby. -In order to make these tests run as a part of the standard build, there is a single JUnit test, `TestShell`, that takes care of loading the jruby implemented tests and running them. + +In order to make these tests run as a part of the standard build, there are a few JUnit test classes that take care of loading the jruby implemented tests and running them. +The tests were split into separate classes to accomodate class level timeouts (see <> for specifics). You can run all of these tests from the top level with: [source,bourne] + mvn clean test -Dtest=Test*Shell + - mvn clean test -Dtest=TestShell +If you have previously done a `mvn install`, then you can instruct maven to run only the tests in the hbase-shell module with: + +[source,bourne] + + mvn clean test -pl hbase-shell Alternatively, you may limit the shell tests that run using the system variable `shell.test`. @@ -921,8 +929,7 @@ For example, the tests that cover the shell commands for altering tables are con [source,bourne] - - mvn clean test -Dtest=TestShell -Dshell.test=/AdminAlterTableTest/ + mvn clean test -pl hbase-shell -Dshell.test=/AdminAlterTableTest/ You may also use a link:http://docs.ruby-doc.com/docs/ProgrammingRuby/html/language.html#UJ[Ruby Regular Expression @@ -932,14 +939,13 @@ You can run all of the HBase admin related tests, including both the normal admi [source,bourne] - mvn clean test -Dtest=TestShell -Dshell.test=/.*Admin.*Test/ + mvn clean test -pl hbase-shell -Dshell.test=/.*Admin.*Test/ In the event of a test failure, you can see details by examining the XML version of the surefire report results [source,bourne] - vim hbase-shell/target/surefire-reports/TEST-org.apache.hadoop.hbase.client.TestShell.xml
[36/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/apidocs/src-html/org/apache/hadoop/hbase/client/RowMutations.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/RowMutations.html b/apidocs/src-html/org/apache/hadoop/hbase/client/RowMutations.html index 6b01ff4..76c6859 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/client/RowMutations.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/client/RowMutations.html @@ -30,164 +30,165 @@ 022import java.util.Arrays; 023import java.util.Collections; 024import java.util.List; -025import org.apache.hadoop.hbase.util.Bytes; -026import org.apache.hadoop.hbase.util.CollectionUtils; -027import org.apache.yetus.audience.InterfaceAudience; -028 -029/** -030 * Performs multiple mutations atomically on a single row. -031 * Currently {@link Put} and {@link Delete} are supported. -032 * -033 * The mutations are performed in the order in which they -034 * were added. -035 * -036 * pWe compare and equate mutations based off their row so be careful putting RowMutations -037 * into Sets or using them as keys in Maps. -038 */ -039@InterfaceAudience.Public -040public class RowMutations implements Row { -041 -042 /** -043 * Create a {@link RowMutations} with the specified mutations. -044 * @param mutations the mutations to send -045 * @return RowMutations -046 * @throws IOException if any row in mutations is different to another -047 */ -048 public static RowMutations of(List? extends Mutation mutations) throws IOException { -049if (CollectionUtils.isEmpty(mutations)) { -050 throw new IllegalArgumentException("Can't instantiate a RowMutations by empty list"); -051} -052return new RowMutations(mutations.get(0).getRow(), mutations.size()) -053.add(mutations); -054 } -055 -056 private final ListMutation mutations; -057 private final byte [] row; -058 -059 public RowMutations(byte [] row) { -060this(row, -1); -061 } -062 /** -063 * Create an atomic mutation for the specified row. -064 * @param row row key -065 * @param initialCapacity the initial capacity of the RowMutations -066 */ -067 public RowMutations(byte [] row, int initialCapacity) { -068this.row = Bytes.copy(Mutation.checkRow(row)); -069if (initialCapacity = 0) { -070 this.mutations = new ArrayList(); -071} else { -072 this.mutations = new ArrayList(initialCapacity); -073} -074 } -075 -076 /** -077 * Add a {@link Put} operation to the list of mutations -078 * @param p The {@link Put} to add -079 * @throws IOException if the row of added mutation doesn't match the original row -080 * @deprecated since 2.0 version and will be removed in 3.0 version. -081 * use {@link #add(Mutation)} -082 */ -083 @Deprecated -084 public void add(Put p) throws IOException { -085add((Mutation) p); -086 } -087 -088 /** -089 * Add a {@link Delete} operation to the list of mutations -090 * @param d The {@link Delete} to add -091 * @throws IOException if the row of added mutation doesn't match the original row -092 * @deprecated since 2.0 version and will be removed in 3.0 version. -093 * use {@link #add(Mutation)} -094 */ -095 @Deprecated -096 public void add(Delete d) throws IOException { -097add((Mutation) d); -098 } -099 -100 /** -101 * Currently only supports {@link Put} and {@link Delete} mutations. -102 * -103 * @param mutation The data to send. -104 * @throws IOException if the row of added mutation doesn't match the original row -105 */ -106 public RowMutations add(Mutation mutation) throws IOException { -107return add(Collections.singletonList(mutation)); -108 } -109 -110 /** -111 * Currently only supports {@link Put} and {@link Delete} mutations. -112 * -113 * @param mutations The data to send. -114 * @throws IOException if the row of added mutation doesn't match the original row -115 */ -116 public RowMutations add(List? extends Mutation mutations) throws IOException { -117for (Mutation mutation : mutations) { -118 if (!Bytes.equals(row, mutation.getRow())) { -119throw new WrongRowIOException("The row in the recently added Put/Delete " + -120 Bytes.toStringBinary(mutation.getRow()) + " doesn't match the original one " + -121 Bytes.toStringBinary(this.row) + ""); -122 } -123} -124this.mutations.addAll(mutations); -125return this; -126 } -127 -128 /** -129 * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0. -130 * Use {@link Row#COMPARATOR} instead -131 */ -132 @Deprecated -133 @Override -134 public int compareTo(Row i) { -135return Bytes.compareTo(this.getRow(), i.getRow()); -136 } -137 -138 /** -139 * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0. -140 * No replacement
[18/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html index 5b7c419..7137829 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html @@ -78,93 +78,93 @@ 070import java.util.concurrent.locks.ReadWriteLock; 071import java.util.concurrent.locks.ReentrantReadWriteLock; 072import java.util.function.Function; -073import org.apache.hadoop.conf.Configuration; -074import org.apache.hadoop.fs.FileStatus; -075import org.apache.hadoop.fs.FileSystem; -076import org.apache.hadoop.fs.LocatedFileStatus; -077import org.apache.hadoop.fs.Path; -078import org.apache.hadoop.hbase.Cell; -079import org.apache.hadoop.hbase.CellBuilderType; -080import org.apache.hadoop.hbase.CellComparator; -081import org.apache.hadoop.hbase.CellComparatorImpl; -082import org.apache.hadoop.hbase.CellScanner; -083import org.apache.hadoop.hbase.CellUtil; -084import org.apache.hadoop.hbase.CompareOperator; -085import org.apache.hadoop.hbase.CompoundConfiguration; -086import org.apache.hadoop.hbase.DoNotRetryIOException; -087import org.apache.hadoop.hbase.DroppedSnapshotException; -088import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; -089import org.apache.hadoop.hbase.HConstants; -090import org.apache.hadoop.hbase.HConstants.OperationStatusCode; -091import org.apache.hadoop.hbase.HDFSBlocksDistribution; -092import org.apache.hadoop.hbase.KeyValue; -093import org.apache.hadoop.hbase.KeyValueUtil; -094import org.apache.hadoop.hbase.NamespaceDescriptor; -095import org.apache.hadoop.hbase.NotServingRegionException; -096import org.apache.hadoop.hbase.PrivateCellUtil; -097import org.apache.hadoop.hbase.RegionTooBusyException; -098import org.apache.hadoop.hbase.TableName; -099import org.apache.hadoop.hbase.Tag; -100import org.apache.hadoop.hbase.TagUtil; -101import org.apache.hadoop.hbase.UnknownScannerException; -102import org.apache.hadoop.hbase.client.Append; -103import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; -104import org.apache.hadoop.hbase.client.CompactionState; -105import org.apache.hadoop.hbase.client.Delete; -106import org.apache.hadoop.hbase.client.Durability; -107import org.apache.hadoop.hbase.client.Get; -108import org.apache.hadoop.hbase.client.Increment; -109import org.apache.hadoop.hbase.client.IsolationLevel; -110import org.apache.hadoop.hbase.client.Mutation; -111import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor; -112import org.apache.hadoop.hbase.client.Put; -113import org.apache.hadoop.hbase.client.RegionInfo; -114import org.apache.hadoop.hbase.client.RegionInfoBuilder; -115import org.apache.hadoop.hbase.client.RegionReplicaUtil; -116import org.apache.hadoop.hbase.client.Result; -117import org.apache.hadoop.hbase.client.RowMutations; -118import org.apache.hadoop.hbase.client.Scan; -119import org.apache.hadoop.hbase.client.TableDescriptor; -120import org.apache.hadoop.hbase.client.TableDescriptorBuilder; -121import org.apache.hadoop.hbase.conf.ConfigurationManager; -122import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver; -123import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType; -124import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare; -125import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException; -126import org.apache.hadoop.hbase.exceptions.TimeoutIOException; -127import org.apache.hadoop.hbase.exceptions.UnknownProtocolException; -128import org.apache.hadoop.hbase.filter.ByteArrayComparable; -129import org.apache.hadoop.hbase.filter.FilterWrapper; -130import org.apache.hadoop.hbase.filter.IncompatibleFilterException; -131import org.apache.hadoop.hbase.io.HFileLink; -132import org.apache.hadoop.hbase.io.HeapSize; -133import org.apache.hadoop.hbase.io.TimeRange; -134import org.apache.hadoop.hbase.io.hfile.HFile; -135import org.apache.hadoop.hbase.ipc.CallerDisconnectedException; -136import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; -137import org.apache.hadoop.hbase.ipc.RpcCall; -138import org.apache.hadoop.hbase.ipc.RpcServer; -139import org.apache.hadoop.hbase.monitoring.MonitoredTask; -140import org.apache.hadoop.hbase.monitoring.TaskMonitor; -141import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager; -142import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry; -143import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope; -144import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState; -145import
[38/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/apidocs/org/apache/hadoop/hbase/client/RowMutations.html -- diff --git a/apidocs/org/apache/hadoop/hbase/client/RowMutations.html b/apidocs/org/apache/hadoop/hbase/client/RowMutations.html index 8958b78..7142b04 100644 --- a/apidocs/org/apache/hadoop/hbase/client/RowMutations.html +++ b/apidocs/org/apache/hadoop/hbase/client/RowMutations.html @@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Public -public class RowMutations +public class RowMutations extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object implements Row Performs multiple mutations atomically on a single row. @@ -284,7 +284,7 @@ implements RowMutations -publicRowMutations(byte[]row) +publicRowMutations(byte[]row) @@ -293,7 +293,7 @@ implements RowMutations -publicRowMutations(byte[]row, +publicRowMutations(byte[]row, intinitialCapacity) Create an atomic mutation for the specified row. @@ -317,7 +317,7 @@ implements of -public staticRowMutationsof(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">List? extends Mutationmutations) +public staticRowMutationsof(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">List? extends Mutationmutations) throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException Create a RowMutations with the specified mutations. @@ -337,7 +337,7 @@ implements add https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true; title="class or interface in java.lang">@Deprecated -publicvoidadd(Putp) +publicvoidadd(Putp) throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException Deprecated.since 2.0 version and will be removed in 3.0 version. use add(Mutation) @@ -357,7 +357,7 @@ publicvoid add https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true; title="class or interface in java.lang">@Deprecated -publicvoidadd(Deleted) +publicvoidadd(Deleted) throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException Deprecated.since 2.0 version and will be removed in 3.0 version. use add(Mutation) @@ -376,7 +376,7 @@ publicvoid add -publicRowMutationsadd(Mutationmutation) +publicRowMutationsadd(Mutationmutation) throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException Currently only supports Put and Delete mutations. @@ -393,7 +393,7 @@ publicvoid add -publicRowMutationsadd(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">List? extends Mutationmutations) +publicRowMutationsadd(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">List? extends Mutationmutations) throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException Currently only supports Put and Delete mutations. @@ -411,7 +411,7 @@ publicvoid compareTo https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true; title="class or interface in java.lang">@Deprecated -publicintcompareTo(Rowi) +publicintcompareTo(Rowi) Deprecated.As of release 2.0.0, this will be removed in HBase 3.0.0. Use Row.COMPARATOR instead @@ -429,7 +429,7 @@ publicint equals https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true; title="class or interface in java.lang">@Deprecated -publicbooleanequals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Objectobj) +publicbooleanequals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Objectobj) Deprecated.As of release 2.0.0, this will be removed in HBase 3.0.0. No replacement @@ -445,7 +445,7 @@ publicboolean hashCode https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true; title="class or interface in java.lang">@Deprecated -publicinthashCode() +publicinthashCode() Deprecated.As of release 2.0.0, this will be removed in HBase 3.0.0. No replacement @@ -460,7 +460,7 @@ publicint getRow
[39/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/apache_hbase_reference_guide.pdf -- diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf index d691d6b..f07986c 100644 --- a/apache_hbase_reference_guide.pdf +++ b/apache_hbase_reference_guide.pdf @@ -5,16 +5,16 @@ /Author (Apache HBase Team) /Creator (Asciidoctor PDF 1.5.0.alpha.15, based on Prawn 2.2.2) /Producer (Apache HBase Team) -/ModDate (D:20180404144554+00'00') -/CreationDate (D:20180404144554+00'00') +/ModDate (D:20180405144504+00'00') +/CreationDate (D:20180405144504+00'00') >> endobj 2 0 obj << /Type /Catalog /Pages 3 0 R /Names 26 0 R -/Outlines 4543 0 R -/PageLabels 4769 0 R +/Outlines 4567 0 R +/PageLabels 4793 0 R /PageMode /UseOutlines /OpenAction [7 0 R /FitH 842.89] /ViewerPreferences << /DisplayDocTitle true @@ -23,8 +23,8 @@ endobj endobj 3 0 obj << /Type /Pages -/Count 712 -/Kids [7 0 R 12 0 R 14 0 R 16 0 R 18 0 R 20 0 R 22 0 R 24 0 R 44 0 R 47 0 R 50 0 R 54 0 R 61 0 R 65 0 R 67 0 R 69 0 R 76 0 R 79 0 R 81 0 R 87 0 R 90 0 R 92 0 R 94 0 R 101 0 R 107 0 R 112 0 R 114 0 R 130 0 R 135 0 R 142 0 R 151 0 R 159 0 R 168 0 R 179 0 R 183 0 R 185 0 R 189 0 R 198 0 R 207 0 R 215 0 R 224 0 R 229 0 R 238 0 R 246 0 R 255 0 R 268 0 R 275 0 R 285 0 R 293 0 R 301 0 R 308 0 R 316 0 R 322 0 R 328 0 R 335 0 R 343 0 R 354 0 R 363 0 R 375 0 R 383 0 R 391 0 R 398 0 R 407 0 R 415 0 R 425 0 R 433 0 R 440 0 R 449 0 R 461 0 R 470 0 R 477 0 R 485 0 R 493 0 R 502 0 R 509 0 R 514 0 R 518 0 R 523 0 R 527 0 R 543 0 R 554 0 R 558 0 R 573 0 R 578 0 R 583 0 R 585 0 R 587 0 R 590 0 R 592 0 R 594 0 R 602 0 R 608 0 R 613 0 R 618 0 R 625 0 R 635 0 R 643 0 R 647 0 R 651 0 R 653 0 R 664 0 R 674 0 R 681 0 R 688 0 R 700 0 R 709 0 R 719 0 R 723 0 R 729 0 R 732 0 R 736 0 R 740 0 R 743 0 R 746 0 R 748 0 R 751 0 R 756 0 R 758 0 R 763 0 R 767 0 R 772 0 R 776 0 R 779 0 R 785 0 R 787 0 R 792 0 R 800 0 R 802 0 R 805 0 R 808 0 R 812 0 R 815 0 R 830 0 R 837 0 R 846 0 R 857 0 R 863 0 R 873 0 R 884 0 R 887 0 R 891 0 R 894 0 R 899 0 R 908 0 R 916 0 R 920 0 R 924 0 R 929 0 R 933 0 R 935 0 R 950 0 R 961 0 R 966 0 R 972 0 R 975 0 R 983 0 R 991 0 R 996 0 R 1002 0 R 1007 0 R 1009 0 R 1011 0 R 1013 0 R 1023 0 R 1031 0 R 1035 0 R 1042 0 R 1049 0 R 1057 0 R 1061 0 R 1067 0 R 1072 0 R 1080 0 R 1084 0 R 1089 0 R 1091 0 R 1097 0 R 1104 0 R 1106 0 R 1113 0 R 1124 0 R 1128 0 R 1130 0 R 1132 0 R 1136 0 R 1139 0 R 1144 0 R 1147 0 R 1159 0 R 1163 0 R 1169 0 R 1177 0 R 1182 0 R 1186 0 R 1190 0 R 1192 0 R 1195 0 R 1198 0 R 1201 0 R 1205 0 R 1209 0 R 1213 0 R 1218 0 R 1222 0 R 1225 0 R 1227 0 R 1237 0 R 1240 0 R 1248 0 R 1257 0 R 1263 0 R 1267 0 R 1269 0 R 1279 0 R 1282 0 R 1288 0 R 1297 0 R 1300 0 R 1307 0 R 1315 0 R 1317 0 R 1319 0 R 1328 0 R 1330 0 R 1332 0 R 1335 0 R 1337 0 R 1339 0 R 1341 0 R 1343 0 R 1346 0 R 1350 0 R 1355 0 R 1357 0 R 1359 0 R 1361 0 R 1366 0 R 1374 0 R 1379 0 R 1382 0 R 1384 0 R 1387 0 R 1391 0 R 1395 0 R 1398 0 R 1400 0 R 1402 0 R 1405 0 R 1411 0 R 1416 0 R 1424 0 R 1438 0 R 1452 0 R 1456 0 R 1461 0 R 1474 0 R 1479 0 R 1494 0 R 1502 0 R 1506 0 R 1514 0 R 1529 0 R 1543 0 R 1555 0 R 1560 0 R 1566 0 R 1575 0 R 1581 0 R 1586 0 R 1594 0 R 1597 0 R 1607 0 R 1613 0 R 1617 0 R 1628 0 R 1633 0 R 1639 0 R 1641 0 R 1647 0 R 1655 0 R 1663 0 R 1667 0 R 1669 0 R 1671 0 R 1683 0 R 1689 0 R 1698 0 R 1704 0 R 1718 0 R 1723 0 R 1732 0 R 1740 0 R 1746 0 R 1751 0 R 1757 0 R 1760 0 R 1763 0 R 1768 0 R 1772 0 R 1779 0 R 1783 0 R 1788 0 R 1797 0 R 1802 0 R 1807 0 R 1809 0 R 1817 0 R 1824 0 R 1830 0 R 1835 0 R 1839 0 R 1842 0 R 1847 0 R 1852 0 R 1860 0 R 1862 0 R 1864 0 R 1867 0 R 1875 0 R 1878 0 R 1885 0 R 1893 0 R 1898 0 R 1901 0 R 1906 0 R 1908 0 R 1911 0 R 1916 0 R 1919 0 R 1921 0 R 1924 0 R 1927 0 R 1930 0 R 1941 0 R 1946 0 R 1951 0 R 1953 0 R 1962 0 R 1969 0 R 1977 0 R 1983 0 R 1988 0 R 1990 0 R 1999 0 R 2008 0 R 2019 0 R 2025 0 R 2032 0 R 2034 0 R 2039 0 R 2041 0 R 2043 0 R 2046 0 R 2049 0 R 2052 0 R 2057 0 R 2061 0 R 2072 0 R 2075 0 R 2080 0 R 2083 0 R 2085 0 R 2090 0 R 2100 0 R 2102 0 R 2104 0 R 2106 0 R 2108 0 R 2111 0 R 2113 0 R 2115 0 R 2118 0 R 2120 0 R 2122 0 R 2127 0 R 2132 0 R 2141 0 R 2143 0 R 2145 0 R 2152 0 R 2154 0 R 2159 0 R 2161 0 R 2163 0 R 2170 0 R 2175 0 R 2179 0 R 2183 0 R 2188 0 R 2190 0 R 2192 0 R 2196 0 R 2199 0 R 2201 0 R 2203 0 R 2207 0 R 2209 0 R 2212 0 R 2214 0 R 2216 0 R 2218 0 R 2225 0 R 2228 0 R 2233 0 R 2235 0 R 2237 0 R 2239 0 R 2241 0 R 2249 0 R 2260 0 R 2274 0 R 2285 0 R 2290 0 R 2295 0 R 2299 0 R 2302 0 R 2307 0 R 2312 0 R 2314 0 R 2317 0 R 2319 0 R 2321 0 R 2323 0 R 2328 0 R 2330 0 R 2343 0 R 2346 0 R 2354 0 R 2360 0 R 2372 0 R 2386 0 R 2399 0 R 2416 0 R 2420 0 R 2422 0 R 2426 0 R 2444 0 R 2450 0 R 2462 0 R 2466 0 R 2470 0 R 2479 0 R 2491 0 R 2496 0 R 2506 0 R 2519 0 R 2539 0 R 2548 0 R 2551 0 R 2560 0 R 2577 0 R 2584 0 R 2587 0 R 2592 0 R 2596 0 R 2599 0 R 2608 0 R 2616 0 R 2620 0 R 2622 0
[22/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html index 3165a6c..b6817d9 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html @@ -71,728 +71,754 @@ 063 static final String TARGET_TABLE_CONF_KEY = "sync.table.target.table.name"; 064 static final String SOURCE_ZK_CLUSTER_CONF_KEY = "sync.table.source.zk.cluster"; 065 static final String TARGET_ZK_CLUSTER_CONF_KEY = "sync.table.target.zk.cluster"; -066 static final String DRY_RUN_CONF_KEY="sync.table.dry.run"; -067 -068 Path sourceHashDir; -069 String sourceTableName; -070 String targetTableName; -071 -072 String sourceZkCluster; -073 String targetZkCluster; -074 boolean dryRun; -075 -076 Counters counters; -077 -078 public SyncTable(Configuration conf) { -079super(conf); -080 } +066 static final String DRY_RUN_CONF_KEY = "sync.table.dry.run"; +067 static final String DO_DELETES_CONF_KEY = "sync.table.do.deletes"; +068 static final String DO_PUTS_CONF_KEY = "sync.table.do.puts"; +069 +070 Path sourceHashDir; +071 String sourceTableName; +072 String targetTableName; +073 +074 String sourceZkCluster; +075 String targetZkCluster; +076 boolean dryRun; +077 boolean doDeletes = true; +078 boolean doPuts = true; +079 +080 Counters counters; 081 -082 public Job createSubmittableJob(String[] args) throws IOException { -083FileSystem fs = sourceHashDir.getFileSystem(getConf()); -084if (!fs.exists(sourceHashDir)) { -085 throw new IOException("Source hash dir not found: " + sourceHashDir); -086} -087 -088HashTable.TableHash tableHash = HashTable.TableHash.read(getConf(), sourceHashDir); -089LOG.info("Read source hash manifest: " + tableHash); -090LOG.info("Read " + tableHash.partitions.size() + " partition keys"); -091if (!tableHash.tableName.equals(sourceTableName)) { -092 LOG.warn("Table name mismatch - manifest indicates hash was taken from: " -093 + tableHash.tableName + " but job is reading from: " + sourceTableName); -094} -095if (tableHash.numHashFiles != tableHash.partitions.size() + 1) { -096 throw new RuntimeException("Hash data appears corrupt. The number of of hash files created" -097 + " should be 1 more than the number of partition keys. However, the manifest file " -098 + " says numHashFiles=" + tableHash.numHashFiles + " but the number of partition keys" -099 + " found in the partitions file is " + tableHash.partitions.size()); -100} -101 -102Path dataDir = new Path(sourceHashDir, HashTable.HASH_DATA_DIR); -103int dataSubdirCount = 0; -104for (FileStatus file : fs.listStatus(dataDir)) { -105 if (file.getPath().getName().startsWith(HashTable.OUTPUT_DATA_FILE_PREFIX)) { -106dataSubdirCount++; -107 } -108} -109 -110if (dataSubdirCount != tableHash.numHashFiles) { -111 throw new RuntimeException("Hash data appears corrupt. The number of of hash files created" -112 + " should be 1 more than the number of partition keys. However, the number of data dirs" -113 + " found is " + dataSubdirCount + " but the number of partition keys" -114 + " found in the partitions file is " + tableHash.partitions.size()); -115} -116 -117Job job = Job.getInstance(getConf(),getConf().get("mapreduce.job.name", -118"syncTable_" + sourceTableName + "-" + targetTableName)); -119Configuration jobConf = job.getConfiguration(); -120job.setJarByClass(HashTable.class); -121jobConf.set(SOURCE_HASH_DIR_CONF_KEY, sourceHashDir.toString()); -122jobConf.set(SOURCE_TABLE_CONF_KEY, sourceTableName); -123jobConf.set(TARGET_TABLE_CONF_KEY, targetTableName); -124if (sourceZkCluster != null) { -125 jobConf.set(SOURCE_ZK_CLUSTER_CONF_KEY, sourceZkCluster); -126} -127if (targetZkCluster != null) { -128 jobConf.set(TARGET_ZK_CLUSTER_CONF_KEY, targetZkCluster); -129} -130jobConf.setBoolean(DRY_RUN_CONF_KEY, dryRun); -131 -132 TableMapReduceUtil.initTableMapperJob(targetTableName, tableHash.initScan(), -133SyncMapper.class, null, null, job); -134 -135job.setNumReduceTasks(0); -136 -137if (dryRun) { -138 job.setOutputFormatClass(NullOutputFormat.class); -139} else { -140 // No reducers. Just write straight to table. Call initTableReducerJob -141 // because it sets up the TableOutputFormat. -142 TableMapReduceUtil.initTableReducerJob(targetTableName, null, job,
[16/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html index 5b7c419..7137829 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html @@ -78,93 +78,93 @@ 070import java.util.concurrent.locks.ReadWriteLock; 071import java.util.concurrent.locks.ReentrantReadWriteLock; 072import java.util.function.Function; -073import org.apache.hadoop.conf.Configuration; -074import org.apache.hadoop.fs.FileStatus; -075import org.apache.hadoop.fs.FileSystem; -076import org.apache.hadoop.fs.LocatedFileStatus; -077import org.apache.hadoop.fs.Path; -078import org.apache.hadoop.hbase.Cell; -079import org.apache.hadoop.hbase.CellBuilderType; -080import org.apache.hadoop.hbase.CellComparator; -081import org.apache.hadoop.hbase.CellComparatorImpl; -082import org.apache.hadoop.hbase.CellScanner; -083import org.apache.hadoop.hbase.CellUtil; -084import org.apache.hadoop.hbase.CompareOperator; -085import org.apache.hadoop.hbase.CompoundConfiguration; -086import org.apache.hadoop.hbase.DoNotRetryIOException; -087import org.apache.hadoop.hbase.DroppedSnapshotException; -088import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; -089import org.apache.hadoop.hbase.HConstants; -090import org.apache.hadoop.hbase.HConstants.OperationStatusCode; -091import org.apache.hadoop.hbase.HDFSBlocksDistribution; -092import org.apache.hadoop.hbase.KeyValue; -093import org.apache.hadoop.hbase.KeyValueUtil; -094import org.apache.hadoop.hbase.NamespaceDescriptor; -095import org.apache.hadoop.hbase.NotServingRegionException; -096import org.apache.hadoop.hbase.PrivateCellUtil; -097import org.apache.hadoop.hbase.RegionTooBusyException; -098import org.apache.hadoop.hbase.TableName; -099import org.apache.hadoop.hbase.Tag; -100import org.apache.hadoop.hbase.TagUtil; -101import org.apache.hadoop.hbase.UnknownScannerException; -102import org.apache.hadoop.hbase.client.Append; -103import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; -104import org.apache.hadoop.hbase.client.CompactionState; -105import org.apache.hadoop.hbase.client.Delete; -106import org.apache.hadoop.hbase.client.Durability; -107import org.apache.hadoop.hbase.client.Get; -108import org.apache.hadoop.hbase.client.Increment; -109import org.apache.hadoop.hbase.client.IsolationLevel; -110import org.apache.hadoop.hbase.client.Mutation; -111import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor; -112import org.apache.hadoop.hbase.client.Put; -113import org.apache.hadoop.hbase.client.RegionInfo; -114import org.apache.hadoop.hbase.client.RegionInfoBuilder; -115import org.apache.hadoop.hbase.client.RegionReplicaUtil; -116import org.apache.hadoop.hbase.client.Result; -117import org.apache.hadoop.hbase.client.RowMutations; -118import org.apache.hadoop.hbase.client.Scan; -119import org.apache.hadoop.hbase.client.TableDescriptor; -120import org.apache.hadoop.hbase.client.TableDescriptorBuilder; -121import org.apache.hadoop.hbase.conf.ConfigurationManager; -122import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver; -123import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType; -124import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare; -125import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException; -126import org.apache.hadoop.hbase.exceptions.TimeoutIOException; -127import org.apache.hadoop.hbase.exceptions.UnknownProtocolException; -128import org.apache.hadoop.hbase.filter.ByteArrayComparable; -129import org.apache.hadoop.hbase.filter.FilterWrapper; -130import org.apache.hadoop.hbase.filter.IncompatibleFilterException; -131import org.apache.hadoop.hbase.io.HFileLink; -132import org.apache.hadoop.hbase.io.HeapSize; -133import org.apache.hadoop.hbase.io.TimeRange; -134import org.apache.hadoop.hbase.io.hfile.HFile; -135import org.apache.hadoop.hbase.ipc.CallerDisconnectedException; -136import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; -137import org.apache.hadoop.hbase.ipc.RpcCall; -138import org.apache.hadoop.hbase.ipc.RpcServer; -139import org.apache.hadoop.hbase.monitoring.MonitoredTask; -140import org.apache.hadoop.hbase.monitoring.TaskMonitor; -141import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager; -142import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry; -143import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope; -144import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState; -145import
[21/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.html b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.html index 3165a6c..b6817d9 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.html @@ -71,728 +71,754 @@ 063 static final String TARGET_TABLE_CONF_KEY = "sync.table.target.table.name"; 064 static final String SOURCE_ZK_CLUSTER_CONF_KEY = "sync.table.source.zk.cluster"; 065 static final String TARGET_ZK_CLUSTER_CONF_KEY = "sync.table.target.zk.cluster"; -066 static final String DRY_RUN_CONF_KEY="sync.table.dry.run"; -067 -068 Path sourceHashDir; -069 String sourceTableName; -070 String targetTableName; -071 -072 String sourceZkCluster; -073 String targetZkCluster; -074 boolean dryRun; -075 -076 Counters counters; -077 -078 public SyncTable(Configuration conf) { -079super(conf); -080 } +066 static final String DRY_RUN_CONF_KEY = "sync.table.dry.run"; +067 static final String DO_DELETES_CONF_KEY = "sync.table.do.deletes"; +068 static final String DO_PUTS_CONF_KEY = "sync.table.do.puts"; +069 +070 Path sourceHashDir; +071 String sourceTableName; +072 String targetTableName; +073 +074 String sourceZkCluster; +075 String targetZkCluster; +076 boolean dryRun; +077 boolean doDeletes = true; +078 boolean doPuts = true; +079 +080 Counters counters; 081 -082 public Job createSubmittableJob(String[] args) throws IOException { -083FileSystem fs = sourceHashDir.getFileSystem(getConf()); -084if (!fs.exists(sourceHashDir)) { -085 throw new IOException("Source hash dir not found: " + sourceHashDir); -086} -087 -088HashTable.TableHash tableHash = HashTable.TableHash.read(getConf(), sourceHashDir); -089LOG.info("Read source hash manifest: " + tableHash); -090LOG.info("Read " + tableHash.partitions.size() + " partition keys"); -091if (!tableHash.tableName.equals(sourceTableName)) { -092 LOG.warn("Table name mismatch - manifest indicates hash was taken from: " -093 + tableHash.tableName + " but job is reading from: " + sourceTableName); -094} -095if (tableHash.numHashFiles != tableHash.partitions.size() + 1) { -096 throw new RuntimeException("Hash data appears corrupt. The number of of hash files created" -097 + " should be 1 more than the number of partition keys. However, the manifest file " -098 + " says numHashFiles=" + tableHash.numHashFiles + " but the number of partition keys" -099 + " found in the partitions file is " + tableHash.partitions.size()); -100} -101 -102Path dataDir = new Path(sourceHashDir, HashTable.HASH_DATA_DIR); -103int dataSubdirCount = 0; -104for (FileStatus file : fs.listStatus(dataDir)) { -105 if (file.getPath().getName().startsWith(HashTable.OUTPUT_DATA_FILE_PREFIX)) { -106dataSubdirCount++; -107 } -108} -109 -110if (dataSubdirCount != tableHash.numHashFiles) { -111 throw new RuntimeException("Hash data appears corrupt. The number of of hash files created" -112 + " should be 1 more than the number of partition keys. However, the number of data dirs" -113 + " found is " + dataSubdirCount + " but the number of partition keys" -114 + " found in the partitions file is " + tableHash.partitions.size()); -115} -116 -117Job job = Job.getInstance(getConf(),getConf().get("mapreduce.job.name", -118"syncTable_" + sourceTableName + "-" + targetTableName)); -119Configuration jobConf = job.getConfiguration(); -120job.setJarByClass(HashTable.class); -121jobConf.set(SOURCE_HASH_DIR_CONF_KEY, sourceHashDir.toString()); -122jobConf.set(SOURCE_TABLE_CONF_KEY, sourceTableName); -123jobConf.set(TARGET_TABLE_CONF_KEY, targetTableName); -124if (sourceZkCluster != null) { -125 jobConf.set(SOURCE_ZK_CLUSTER_CONF_KEY, sourceZkCluster); -126} -127if (targetZkCluster != null) { -128 jobConf.set(TARGET_ZK_CLUSTER_CONF_KEY, targetZkCluster); -129} -130jobConf.setBoolean(DRY_RUN_CONF_KEY, dryRun); -131 -132 TableMapReduceUtil.initTableMapperJob(targetTableName, tableHash.initScan(), -133SyncMapper.class, null, null, job); -134 -135job.setNumReduceTasks(0); -136 -137if (dryRun) { -138 job.setOutputFormatClass(NullOutputFormat.class); -139} else { -140 // No reducers. Just write straight to table. Call initTableReducerJob -141 // because it sets up the TableOutputFormat. -142 TableMapReduceUtil.initTableReducerJob(targetTableName, null, job, null, -143 targetZkCluster,
[27/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html index a4ab1b7..f6fc79b 100644 --- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html +++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html @@ -238,8 +238,8 @@ java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true; title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true; title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true; title="class or interface in java.io">Serializable) -org.apache.hadoop.hbase.regionserver.wal.RingBufferTruck.Type org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader.WALHdrResult +org.apache.hadoop.hbase.regionserver.wal.RingBufferTruck.Type http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationPeerStorage.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationPeerStorage.html b/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationPeerStorage.html index 40dff0f..4fcd6e2 100644 --- a/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationPeerStorage.html +++ b/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationPeerStorage.html @@ -535,7 +535,7 @@ implements isPeerEnabled -publicbooleanisPeerEnabled(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringpeerId) +publicbooleanisPeerEnabled(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringpeerId) throws ReplicationException Description copied from interface:ReplicationPeerStorage Test whether a replication peer is enabled. @@ -553,7 +553,7 @@ implements getPeerConfig -publicReplicationPeerConfiggetPeerConfig(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringpeerId) +publicReplicationPeerConfiggetPeerConfig(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringpeerId) throws ReplicationException Description copied from interface:ReplicationPeerStorage Get the peer config of a replication peer. http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html b/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html index 33b09d9..90fd656 100644 --- a/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html +++ b/devapidocs/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html @@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Private -class ZKReplicationQueueStorage +class ZKReplicationQueueStorage extends ZKReplicationStorageBase implements ReplicationQueueStorage ZK based replication queue storage. @@ -505,7 +505,7 @@ implements LOG -private static finalorg.slf4j.Logger LOG +private static finalorg.slf4j.Logger LOG @@ -514,7 +514,7 @@ implements ZOOKEEPER_ZNODE_REPLICATION_HFILE_REFS_KEY -public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String ZOOKEEPER_ZNODE_REPLICATION_HFILE_REFS_KEY +public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String ZOOKEEPER_ZNODE_REPLICATION_HFILE_REFS_KEY See Also: Constant Field Values @@ -527,7 +527,7 @@ implements ZOOKEEPER_ZNODE_REPLICATION_HFILE_REFS_DEFAULT -public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String ZOOKEEPER_ZNODE_REPLICATION_HFILE_REFS_DEFAULT +public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String ZOOKEEPER_ZNODE_REPLICATION_HFILE_REFS_DEFAULT See Also: Constant Field Values @@ -540,7 +540,7 @@ implements ZOOKEEPER_ZNODE_REPLICATION_REGIONS_KEY -public static
[15/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html index ebbde54..7d1dba6 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html @@ -50,90 +50,90 @@ 042import java.util.concurrent.atomic.AtomicBoolean; 043import java.util.concurrent.atomic.AtomicLong; 044import java.util.concurrent.atomic.LongAdder; -045import org.apache.commons.lang3.mutable.MutableObject; -046import org.apache.hadoop.conf.Configuration; -047import org.apache.hadoop.fs.Path; -048import org.apache.hadoop.hbase.ByteBufferExtendedCell; -049import org.apache.hadoop.hbase.CacheEvictionStats; -050import org.apache.hadoop.hbase.CacheEvictionStatsBuilder; -051import org.apache.hadoop.hbase.Cell; -052import org.apache.hadoop.hbase.CellScannable; -053import org.apache.hadoop.hbase.CellScanner; -054import org.apache.hadoop.hbase.CellUtil; -055import org.apache.hadoop.hbase.CompareOperator; -056import org.apache.hadoop.hbase.DoNotRetryIOException; -057import org.apache.hadoop.hbase.DroppedSnapshotException; -058import org.apache.hadoop.hbase.HBaseIOException; -059import org.apache.hadoop.hbase.HConstants; -060import org.apache.hadoop.hbase.MultiActionResultTooLarge; -061import org.apache.hadoop.hbase.NotServingRegionException; -062import org.apache.hadoop.hbase.PrivateCellUtil; -063import org.apache.hadoop.hbase.RegionTooBusyException; -064import org.apache.hadoop.hbase.Server; -065import org.apache.hadoop.hbase.ServerName; -066import org.apache.hadoop.hbase.TableName; -067import org.apache.hadoop.hbase.UnknownScannerException; -068import org.apache.hadoop.hbase.client.Append; -069import org.apache.hadoop.hbase.client.ConnectionUtils; -070import org.apache.hadoop.hbase.client.Delete; -071import org.apache.hadoop.hbase.client.Durability; -072import org.apache.hadoop.hbase.client.Get; -073import org.apache.hadoop.hbase.client.Increment; -074import org.apache.hadoop.hbase.client.Mutation; -075import org.apache.hadoop.hbase.client.Put; -076import org.apache.hadoop.hbase.client.RegionInfo; -077import org.apache.hadoop.hbase.client.RegionReplicaUtil; -078import org.apache.hadoop.hbase.client.Result; -079import org.apache.hadoop.hbase.client.Row; -080import org.apache.hadoop.hbase.client.RowMutations; -081import org.apache.hadoop.hbase.client.Scan; -082import org.apache.hadoop.hbase.client.TableDescriptor; -083import org.apache.hadoop.hbase.client.VersionInfoUtil; -084import org.apache.hadoop.hbase.conf.ConfigurationObserver; -085import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException; -086import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException; -087import org.apache.hadoop.hbase.exceptions.ScannerResetException; -088import org.apache.hadoop.hbase.exceptions.UnknownProtocolException; -089import org.apache.hadoop.hbase.filter.ByteArrayComparable; -090import org.apache.hadoop.hbase.io.TimeRange; -091import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler; -092import org.apache.hadoop.hbase.ipc.HBaseRpcController; -093import org.apache.hadoop.hbase.ipc.PriorityFunction; -094import org.apache.hadoop.hbase.ipc.QosPriority; -095import org.apache.hadoop.hbase.ipc.RpcCallContext; -096import org.apache.hadoop.hbase.ipc.RpcCallback; -097import org.apache.hadoop.hbase.ipc.RpcServer; -098import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface; -099import org.apache.hadoop.hbase.ipc.RpcServerFactory; -100import org.apache.hadoop.hbase.ipc.RpcServerInterface; -101import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; -102import org.apache.hadoop.hbase.ipc.ServerRpcController; -103import org.apache.hadoop.hbase.log.HBaseMarkers; -104import org.apache.hadoop.hbase.master.MasterRpcServices; -105import org.apache.hadoop.hbase.net.Address; -106import org.apache.hadoop.hbase.procedure2.RSProcedureCallable; -107import org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement; -108import org.apache.hadoop.hbase.quotas.OperationQuota; -109import org.apache.hadoop.hbase.quotas.QuotaUtil; -110import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager; -111import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager; -112import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot; -113import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement; -114import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl; -115import
[04/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html index 8c0d57c..e606e82 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html @@ -46,2582 +46,2583 @@ 038import java.util.Iterator; 039import java.util.List; 040 -041import org.apache.hadoop.hbase.Cell; -042import org.apache.hadoop.hbase.CellComparator; -043import org.apache.hadoop.hbase.KeyValue; -044import org.apache.hadoop.io.RawComparator; -045import org.apache.hadoop.io.WritableComparator; -046import org.apache.hadoop.io.WritableUtils; -047import org.apache.yetus.audience.InterfaceAudience; -048import org.slf4j.Logger; -049import org.slf4j.LoggerFactory; -050 -051import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; -052 -053import com.google.protobuf.ByteString; -054 -055import sun.misc.Unsafe; -056 -057/** -058 * Utility class that handles byte arrays, conversions to/from other types, -059 * comparisons, hash code generation, manufacturing keys for HashMaps or -060 * HashSets, and can be used as key in maps or trees. -061 */ -062@SuppressWarnings("restriction") -063@InterfaceAudience.Public -064@edu.umd.cs.findbugs.annotations.SuppressWarnings( -065 value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS", -066justification="It has been like this forever") -067public class Bytes implements ComparableBytes { -068 -069 // Using the charset canonical name for String/byte[] conversions is much -070 // more efficient due to use of cached encoders/decoders. -071 private static final String UTF8_CSN = StandardCharsets.UTF_8.name(); -072 -073 //HConstants.EMPTY_BYTE_ARRAY should be updated if this changed -074 private static final byte [] EMPTY_BYTE_ARRAY = new byte [0]; -075 -076 private static final Logger LOG = LoggerFactory.getLogger(Bytes.class); -077 -078 /** -079 * Size of boolean in bytes -080 */ -081 public static final int SIZEOF_BOOLEAN = Byte.SIZE / Byte.SIZE; -082 -083 /** -084 * Size of byte in bytes -085 */ -086 public static final int SIZEOF_BYTE = SIZEOF_BOOLEAN; -087 -088 /** -089 * Size of char in bytes -090 */ -091 public static final int SIZEOF_CHAR = Character.SIZE / Byte.SIZE; -092 -093 /** -094 * Size of double in bytes -095 */ -096 public static final int SIZEOF_DOUBLE = Double.SIZE / Byte.SIZE; -097 -098 /** -099 * Size of float in bytes -100 */ -101 public static final int SIZEOF_FLOAT = Float.SIZE / Byte.SIZE; -102 -103 /** -104 * Size of int in bytes -105 */ -106 public static final int SIZEOF_INT = Integer.SIZE / Byte.SIZE; -107 -108 /** -109 * Size of long in bytes -110 */ -111 public static final int SIZEOF_LONG = Long.SIZE / Byte.SIZE; -112 -113 /** -114 * Size of short in bytes -115 */ -116 public static final int SIZEOF_SHORT = Short.SIZE / Byte.SIZE; -117 -118 /** -119 * Mask to apply to a long to reveal the lower int only. Use like this: -120 * int i = (int)(0xL ^ some_long_value); -121 */ -122 public static final long MASK_FOR_LOWER_INT_IN_LONG = 0xL; -123 -124 /** -125 * Estimate of size cost to pay beyond payload in jvm for instance of byte []. -126 * Estimate based on study of jhat and jprofiler numbers. -127 */ -128 // JHat says BU is 56 bytes. -129 // SizeOf which uses java.lang.instrument says 24 bytes. (3 longs?) -130 public static final int ESTIMATED_HEAP_TAX = 16; -131 -132 private static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned(); -133 -134 /** -135 * Returns length of the byte array, returning 0 if the array is null. -136 * Useful for calculating sizes. -137 * @param b byte array, which can be null -138 * @return 0 if b is null, otherwise returns length -139 */ -140 final public static int len(byte[] b) { -141return b == null ? 0 : b.length; -142 } -143 -144 private byte[] bytes; -145 private int offset; -146 private int length; -147 -148 /** -149 * Create a zero-size sequence. -150 */ -151 public Bytes() { -152super(); -153 } -154 -155 /** -156 * Create a Bytes using the byte array as the initial value. -157 * @param bytes This array becomes the backing storage for the object. -158 */ -159 public Bytes(byte[] bytes) { -160this(bytes, 0, bytes.length); -161 } -162 -163 /** -164 * Set the new Bytes to the contents of the passed -165 * codeibw/code. -166 * @param ibw the value to set this Bytes to. -167 */ -168 public Bytes(final Bytes ibw) { -169this(ibw.get(), ibw.getOffset(), ibw.getLength()); -170 } -171 -172 /** -173 * Set the value to a given byte range -174 * @param bytes the new
[06/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.html index 8c0d57c..e606e82 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.html @@ -46,2582 +46,2583 @@ 038import java.util.Iterator; 039import java.util.List; 040 -041import org.apache.hadoop.hbase.Cell; -042import org.apache.hadoop.hbase.CellComparator; -043import org.apache.hadoop.hbase.KeyValue; -044import org.apache.hadoop.io.RawComparator; -045import org.apache.hadoop.io.WritableComparator; -046import org.apache.hadoop.io.WritableUtils; -047import org.apache.yetus.audience.InterfaceAudience; -048import org.slf4j.Logger; -049import org.slf4j.LoggerFactory; -050 -051import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; -052 -053import com.google.protobuf.ByteString; -054 -055import sun.misc.Unsafe; -056 -057/** -058 * Utility class that handles byte arrays, conversions to/from other types, -059 * comparisons, hash code generation, manufacturing keys for HashMaps or -060 * HashSets, and can be used as key in maps or trees. -061 */ -062@SuppressWarnings("restriction") -063@InterfaceAudience.Public -064@edu.umd.cs.findbugs.annotations.SuppressWarnings( -065 value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS", -066justification="It has been like this forever") -067public class Bytes implements ComparableBytes { -068 -069 // Using the charset canonical name for String/byte[] conversions is much -070 // more efficient due to use of cached encoders/decoders. -071 private static final String UTF8_CSN = StandardCharsets.UTF_8.name(); -072 -073 //HConstants.EMPTY_BYTE_ARRAY should be updated if this changed -074 private static final byte [] EMPTY_BYTE_ARRAY = new byte [0]; -075 -076 private static final Logger LOG = LoggerFactory.getLogger(Bytes.class); -077 -078 /** -079 * Size of boolean in bytes -080 */ -081 public static final int SIZEOF_BOOLEAN = Byte.SIZE / Byte.SIZE; -082 -083 /** -084 * Size of byte in bytes -085 */ -086 public static final int SIZEOF_BYTE = SIZEOF_BOOLEAN; -087 -088 /** -089 * Size of char in bytes -090 */ -091 public static final int SIZEOF_CHAR = Character.SIZE / Byte.SIZE; -092 -093 /** -094 * Size of double in bytes -095 */ -096 public static final int SIZEOF_DOUBLE = Double.SIZE / Byte.SIZE; -097 -098 /** -099 * Size of float in bytes -100 */ -101 public static final int SIZEOF_FLOAT = Float.SIZE / Byte.SIZE; -102 -103 /** -104 * Size of int in bytes -105 */ -106 public static final int SIZEOF_INT = Integer.SIZE / Byte.SIZE; -107 -108 /** -109 * Size of long in bytes -110 */ -111 public static final int SIZEOF_LONG = Long.SIZE / Byte.SIZE; -112 -113 /** -114 * Size of short in bytes -115 */ -116 public static final int SIZEOF_SHORT = Short.SIZE / Byte.SIZE; -117 -118 /** -119 * Mask to apply to a long to reveal the lower int only. Use like this: -120 * int i = (int)(0xL ^ some_long_value); -121 */ -122 public static final long MASK_FOR_LOWER_INT_IN_LONG = 0xL; -123 -124 /** -125 * Estimate of size cost to pay beyond payload in jvm for instance of byte []. -126 * Estimate based on study of jhat and jprofiler numbers. -127 */ -128 // JHat says BU is 56 bytes. -129 // SizeOf which uses java.lang.instrument says 24 bytes. (3 longs?) -130 public static final int ESTIMATED_HEAP_TAX = 16; -131 -132 private static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned(); -133 -134 /** -135 * Returns length of the byte array, returning 0 if the array is null. -136 * Useful for calculating sizes. -137 * @param b byte array, which can be null -138 * @return 0 if b is null, otherwise returns length -139 */ -140 final public static int len(byte[] b) { -141return b == null ? 0 : b.length; -142 } -143 -144 private byte[] bytes; -145 private int offset; -146 private int length; -147 -148 /** -149 * Create a zero-size sequence. -150 */ -151 public Bytes() { -152super(); -153 } -154 -155 /** -156 * Create a Bytes using the byte array as the initial value. -157 * @param bytes This array becomes the backing storage for the object. -158 */ -159 public Bytes(byte[] bytes) { -160this(bytes, 0, bytes.length); -161 } -162 -163 /** -164 * Set the new Bytes to the contents of the passed -165 * codeibw/code. -166 * @param ibw the value to set this Bytes to. -167 */ -168 public Bytes(final Bytes ibw) { -169
[02/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html -- diff --git a/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html index 3168ee3..e159b3f 100644 --- a/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html +++ b/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html @@ -145,8 +145,8 @@ java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true; title="class or interface in java.lang">EnumE (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true; title="class or interface in java.lang">ComparableT, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true; title="class or interface in java.io">Serializable) -org.apache.hadoop.hbase.backup.TestIncrementalBackupMergeWithFailures.FailurePhase org.apache.hadoop.hbase.backup.TestBackupDeleteWithFailures.Failure +org.apache.hadoop.hbase.backup.TestIncrementalBackupMergeWithFailures.FailurePhase http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/testdevapidocs/org/apache/hadoop/hbase/mapreduce/TestSyncTable.html -- diff --git a/testdevapidocs/org/apache/hadoop/hbase/mapreduce/TestSyncTable.html b/testdevapidocs/org/apache/hadoop/hbase/mapreduce/TestSyncTable.html index 015f1cb..65262ce 100644 --- a/testdevapidocs/org/apache/hadoop/hbase/mapreduce/TestSyncTable.html +++ b/testdevapidocs/org/apache/hadoop/hbase/mapreduce/TestSyncTable.html @@ -18,7 +18,7 @@ catch(err) { } //--> -var methods = {"i0":9,"i1":10,"i2":9,"i3":9,"i4":10,"i5":10,"i6":10,"i7":10}; +var methods = {"i0":9,"i1":10,"i2":10,"i3":10,"i4":9,"i5":9,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10}; var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]}; var altColor = "altColor"; var rowColor = "rowColor"; @@ -189,30 +189,51 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html org.apache.hadoop.hbase.TableNametargetTableName) +private void +assertTargetDoDeletesFalse(intexpectedRows, + org.apache.hadoop.hbase.TableNamesourceTableName, + org.apache.hadoop.hbase.TableNametargetTableName) + + +private void +assertTargetDoPutsFalse(intexpectedRows, + org.apache.hadoop.hbase.TableNamesourceTableName, + org.apache.hadoop.hbase.TableNametargetTableName) + + static void beforeClass() - + private static byte[][] generateSplits(intnumRows, intnumRegions) - + private void hashSourceTable(org.apache.hadoop.hbase.TableNamesourceTableName, org.apache.hadoop.fs.PathtestDir) - + private org.apache.hadoop.mapreduce.Counters -syncTables(org.apache.hadoop.hbase.TableNamesourceTableName, +syncTables(org.apache.hadoop.hbase.TableNamesourceTableName, org.apache.hadoop.hbase.TableNametargetTableName, - org.apache.hadoop.fs.PathtestDir) + org.apache.hadoop.fs.PathtestDir, + https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String...options) - + void testSyncTable() - + +void +testSyncTableDoDeletesFalse() + + +void +testSyncTableDoPutsFalse() + + private void writeTestData(org.apache.hadoop.hbase.TableNamesourceTableName, org.apache.hadoop.hbase.TableNametargetTableName) @@ -334,7 +355,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html generateSplits -private staticbyte[][]generateSplits(intnumRows, +private staticbyte[][]generateSplits(intnumRows, intnumRegions) @@ -344,7 +365,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html testSyncTable -publicvoidtestSyncTable() +publicvoidtestSyncTable() throws https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true; title="class or interface in java.lang">Exception Throws: @@ -352,13 +373,41 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html + + + + + +testSyncTableDoDeletesFalse +publicvoidtestSyncTableDoDeletesFalse() + throws https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true; title="class or interface in java.lang">Exception + +Throws: +https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true; title="class or interface in java.lang">Exception + + + + + + + + +testSyncTableDoPutsFalse +publicvoidtestSyncTableDoPutsFalse() +
[19/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html index 5b7c419..7137829 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html @@ -78,93 +78,93 @@ 070import java.util.concurrent.locks.ReadWriteLock; 071import java.util.concurrent.locks.ReentrantReadWriteLock; 072import java.util.function.Function; -073import org.apache.hadoop.conf.Configuration; -074import org.apache.hadoop.fs.FileStatus; -075import org.apache.hadoop.fs.FileSystem; -076import org.apache.hadoop.fs.LocatedFileStatus; -077import org.apache.hadoop.fs.Path; -078import org.apache.hadoop.hbase.Cell; -079import org.apache.hadoop.hbase.CellBuilderType; -080import org.apache.hadoop.hbase.CellComparator; -081import org.apache.hadoop.hbase.CellComparatorImpl; -082import org.apache.hadoop.hbase.CellScanner; -083import org.apache.hadoop.hbase.CellUtil; -084import org.apache.hadoop.hbase.CompareOperator; -085import org.apache.hadoop.hbase.CompoundConfiguration; -086import org.apache.hadoop.hbase.DoNotRetryIOException; -087import org.apache.hadoop.hbase.DroppedSnapshotException; -088import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; -089import org.apache.hadoop.hbase.HConstants; -090import org.apache.hadoop.hbase.HConstants.OperationStatusCode; -091import org.apache.hadoop.hbase.HDFSBlocksDistribution; -092import org.apache.hadoop.hbase.KeyValue; -093import org.apache.hadoop.hbase.KeyValueUtil; -094import org.apache.hadoop.hbase.NamespaceDescriptor; -095import org.apache.hadoop.hbase.NotServingRegionException; -096import org.apache.hadoop.hbase.PrivateCellUtil; -097import org.apache.hadoop.hbase.RegionTooBusyException; -098import org.apache.hadoop.hbase.TableName; -099import org.apache.hadoop.hbase.Tag; -100import org.apache.hadoop.hbase.TagUtil; -101import org.apache.hadoop.hbase.UnknownScannerException; -102import org.apache.hadoop.hbase.client.Append; -103import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; -104import org.apache.hadoop.hbase.client.CompactionState; -105import org.apache.hadoop.hbase.client.Delete; -106import org.apache.hadoop.hbase.client.Durability; -107import org.apache.hadoop.hbase.client.Get; -108import org.apache.hadoop.hbase.client.Increment; -109import org.apache.hadoop.hbase.client.IsolationLevel; -110import org.apache.hadoop.hbase.client.Mutation; -111import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor; -112import org.apache.hadoop.hbase.client.Put; -113import org.apache.hadoop.hbase.client.RegionInfo; -114import org.apache.hadoop.hbase.client.RegionInfoBuilder; -115import org.apache.hadoop.hbase.client.RegionReplicaUtil; -116import org.apache.hadoop.hbase.client.Result; -117import org.apache.hadoop.hbase.client.RowMutations; -118import org.apache.hadoop.hbase.client.Scan; -119import org.apache.hadoop.hbase.client.TableDescriptor; -120import org.apache.hadoop.hbase.client.TableDescriptorBuilder; -121import org.apache.hadoop.hbase.conf.ConfigurationManager; -122import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver; -123import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType; -124import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare; -125import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException; -126import org.apache.hadoop.hbase.exceptions.TimeoutIOException; -127import org.apache.hadoop.hbase.exceptions.UnknownProtocolException; -128import org.apache.hadoop.hbase.filter.ByteArrayComparable; -129import org.apache.hadoop.hbase.filter.FilterWrapper; -130import org.apache.hadoop.hbase.filter.IncompatibleFilterException; -131import org.apache.hadoop.hbase.io.HFileLink; -132import org.apache.hadoop.hbase.io.HeapSize; -133import org.apache.hadoop.hbase.io.TimeRange; -134import org.apache.hadoop.hbase.io.hfile.HFile; -135import org.apache.hadoop.hbase.ipc.CallerDisconnectedException; -136import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; -137import org.apache.hadoop.hbase.ipc.RpcCall; -138import org.apache.hadoop.hbase.ipc.RpcServer; -139import org.apache.hadoop.hbase.monitoring.MonitoredTask; -140import org.apache.hadoop.hbase.monitoring.TaskMonitor; -141import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager; -142import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry; -143import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope; -144import
[20/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/SyncTable.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/SyncTable.html b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/SyncTable.html index 3165a6c..b6817d9 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/SyncTable.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/SyncTable.html @@ -71,728 +71,754 @@ 063 static final String TARGET_TABLE_CONF_KEY = "sync.table.target.table.name"; 064 static final String SOURCE_ZK_CLUSTER_CONF_KEY = "sync.table.source.zk.cluster"; 065 static final String TARGET_ZK_CLUSTER_CONF_KEY = "sync.table.target.zk.cluster"; -066 static final String DRY_RUN_CONF_KEY="sync.table.dry.run"; -067 -068 Path sourceHashDir; -069 String sourceTableName; -070 String targetTableName; -071 -072 String sourceZkCluster; -073 String targetZkCluster; -074 boolean dryRun; -075 -076 Counters counters; -077 -078 public SyncTable(Configuration conf) { -079super(conf); -080 } +066 static final String DRY_RUN_CONF_KEY = "sync.table.dry.run"; +067 static final String DO_DELETES_CONF_KEY = "sync.table.do.deletes"; +068 static final String DO_PUTS_CONF_KEY = "sync.table.do.puts"; +069 +070 Path sourceHashDir; +071 String sourceTableName; +072 String targetTableName; +073 +074 String sourceZkCluster; +075 String targetZkCluster; +076 boolean dryRun; +077 boolean doDeletes = true; +078 boolean doPuts = true; +079 +080 Counters counters; 081 -082 public Job createSubmittableJob(String[] args) throws IOException { -083FileSystem fs = sourceHashDir.getFileSystem(getConf()); -084if (!fs.exists(sourceHashDir)) { -085 throw new IOException("Source hash dir not found: " + sourceHashDir); -086} -087 -088HashTable.TableHash tableHash = HashTable.TableHash.read(getConf(), sourceHashDir); -089LOG.info("Read source hash manifest: " + tableHash); -090LOG.info("Read " + tableHash.partitions.size() + " partition keys"); -091if (!tableHash.tableName.equals(sourceTableName)) { -092 LOG.warn("Table name mismatch - manifest indicates hash was taken from: " -093 + tableHash.tableName + " but job is reading from: " + sourceTableName); -094} -095if (tableHash.numHashFiles != tableHash.partitions.size() + 1) { -096 throw new RuntimeException("Hash data appears corrupt. The number of of hash files created" -097 + " should be 1 more than the number of partition keys. However, the manifest file " -098 + " says numHashFiles=" + tableHash.numHashFiles + " but the number of partition keys" -099 + " found in the partitions file is " + tableHash.partitions.size()); -100} -101 -102Path dataDir = new Path(sourceHashDir, HashTable.HASH_DATA_DIR); -103int dataSubdirCount = 0; -104for (FileStatus file : fs.listStatus(dataDir)) { -105 if (file.getPath().getName().startsWith(HashTable.OUTPUT_DATA_FILE_PREFIX)) { -106dataSubdirCount++; -107 } -108} -109 -110if (dataSubdirCount != tableHash.numHashFiles) { -111 throw new RuntimeException("Hash data appears corrupt. The number of of hash files created" -112 + " should be 1 more than the number of partition keys. However, the number of data dirs" -113 + " found is " + dataSubdirCount + " but the number of partition keys" -114 + " found in the partitions file is " + tableHash.partitions.size()); -115} -116 -117Job job = Job.getInstance(getConf(),getConf().get("mapreduce.job.name", -118"syncTable_" + sourceTableName + "-" + targetTableName)); -119Configuration jobConf = job.getConfiguration(); -120job.setJarByClass(HashTable.class); -121jobConf.set(SOURCE_HASH_DIR_CONF_KEY, sourceHashDir.toString()); -122jobConf.set(SOURCE_TABLE_CONF_KEY, sourceTableName); -123jobConf.set(TARGET_TABLE_CONF_KEY, targetTableName); -124if (sourceZkCluster != null) { -125 jobConf.set(SOURCE_ZK_CLUSTER_CONF_KEY, sourceZkCluster); -126} -127if (targetZkCluster != null) { -128 jobConf.set(TARGET_ZK_CLUSTER_CONF_KEY, targetZkCluster); -129} -130jobConf.setBoolean(DRY_RUN_CONF_KEY, dryRun); -131 -132 TableMapReduceUtil.initTableMapperJob(targetTableName, tableHash.initScan(), -133SyncMapper.class, null, null, job); -134 -135job.setNumReduceTasks(0); -136 -137if (dryRun) { -138 job.setOutputFormatClass(NullOutputFormat.class); -139} else { -140 // No reducers. Just write straight to table. Call initTableReducerJob -141 // because it sets up the TableOutputFormat. -142 TableMapReduceUtil.initTableReducerJob(targetTableName, null, job, null, -143 targetZkCluster, null, null); -144 -145 // would be nice to add an
[40/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb. Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/6c67ddd7 Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/6c67ddd7 Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/6c67ddd7 Branch: refs/heads/asf-site Commit: 6c67ddd76a595184cfeb816456c75b30a3773139 Parents: a0fbd6a Author: jenkinsAuthored: Thu Apr 5 14:46:49 2018 + Committer: jenkins Committed: Thu Apr 5 14:46:49 2018 + -- acid-semantics.html | 6 +- apache_hbase_reference_guide.pdf| 33810 + .../hadoop/hbase/client/RowMutations.html |28 +- .../hbase/util/Bytes.ByteArrayComparator.html | 8 +- .../hbase/util/Bytes.RowEndKeyComparator.html | 8 +- apidocs/org/apache/hadoop/hbase/util/Bytes.html | 300 +- .../hadoop/hbase/client/RowMutations.html | 317 +- .../hbase/util/Bytes.ByteArrayComparator.html | 5147 +-- .../hbase/util/Bytes.RowEndKeyComparator.html | 5147 +-- .../org/apache/hadoop/hbase/util/Bytes.html | 5147 +-- book.html | 121 +- bulk-loads.html | 6 +- checkstyle-aggregate.html | 11668 +++--- checkstyle.rss | 8 +- coc.html| 6 +- dependencies.html | 6 +- dependency-convergence.html | 6 +- dependency-info.html| 6 +- dependency-management.html | 6 +- devapidocs/constant-values.html |20 +- devapidocs/index-all.html |40 +- .../hadoop/hbase/backup/package-tree.html | 4 +- .../hadoop/hbase/client/RowMutations.html |32 +- .../hadoop/hbase/client/package-tree.html |22 +- .../hadoop/hbase/filter/package-tree.html | 8 +- .../hadoop/hbase/io/hfile/package-tree.html | 6 +- .../apache/hadoop/hbase/ipc/package-tree.html | 2 +- .../SyncTable.SyncMapper.CellScanner.html |18 +- .../mapreduce/SyncTable.SyncMapper.Counter.html |34 +- .../hbase/mapreduce/SyncTable.SyncMapper.html |84 +- .../hadoop/hbase/mapreduce/SyncTable.html |88 +- .../hadoop/hbase/mapreduce/package-tree.html| 4 +- .../hbase/master/balancer/package-tree.html | 2 +- .../hadoop/hbase/master/package-tree.html | 6 +- .../hbase/master/procedure/package-tree.html| 2 +- .../org/apache/hadoop/hbase/package-tree.html |16 +- .../hadoop/hbase/procedure2/package-tree.html | 4 +- .../hadoop/hbase/quotas/package-tree.html | 6 +- .../hadoop/hbase/regionserver/StoreScanner.html | 172 +- .../hadoop/hbase/regionserver/package-tree.html |16 +- .../regionserver/querymatcher/package-tree.html | 4 +- .../hbase/regionserver/wal/package-tree.html| 2 +- .../replication/ZKReplicationPeerStorage.html | 4 +- .../replication/ZKReplicationQueueStorage.html |96 +- .../replication/regionserver/package-tree.html | 2 +- .../hadoop/hbase/rest/model/package-tree.html | 2 +- .../hbase/security/access/package-tree.html | 2 +- .../hadoop/hbase/security/package-tree.html | 2 +- .../hadoop/hbase/thrift/package-tree.html | 2 +- .../hbase/util/Bytes.ByteArrayComparator.html | 8 +- .../hadoop/hbase/util/Bytes.Comparer.html | 4 +- ...raphicalComparerHolder.PureJavaComparer.html |10 +- ...ographicalComparerHolder.UnsafeComparer.html |12 +- .../Bytes.LexicographicalComparerHolder.html|10 +- .../hbase/util/Bytes.RowEndKeyComparator.html | 8 +- .../org/apache/hadoop/hbase/util/Bytes.html | 336 +- .../CollectionUtils.IOExceptionSupplier.html| 4 +- .../hadoop/hbase/util/CollectionUtils.html | 212 +- .../apache/hadoop/hbase/util/package-tree.html | 8 +- .../org/apache/hadoop/hbase/Version.html| 6 +- .../hadoop/hbase/client/RowMutations.html | 317 +- .../SyncTable.SyncMapper.CellScanner.html | 1444 +- .../mapreduce/SyncTable.SyncMapper.Counter.html | 1444 +- .../hbase/mapreduce/SyncTable.SyncMapper.html | 1444 +- .../hadoop/hbase/mapreduce/SyncTable.html | 1444 +- .../HRegion.BatchOperation.Visitor.html | 174 +- .../regionserver/HRegion.BatchOperation.html| 174 +- .../regionserver/HRegion.BulkLoadListener.html | 174 +- .../HRegion.FlushResult.Result.html | 174 +- .../hbase/regionserver/HRegion.FlushResult.html | 174 +- .../regionserver/HRegion.FlushResultImpl.html | 174 +- .../HRegion.MutationBatchOperation.html |
[33/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html index 8c0d57c..e606e82 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html @@ -46,2582 +46,2583 @@ 038import java.util.Iterator; 039import java.util.List; 040 -041import org.apache.hadoop.hbase.Cell; -042import org.apache.hadoop.hbase.CellComparator; -043import org.apache.hadoop.hbase.KeyValue; -044import org.apache.hadoop.io.RawComparator; -045import org.apache.hadoop.io.WritableComparator; -046import org.apache.hadoop.io.WritableUtils; -047import org.apache.yetus.audience.InterfaceAudience; -048import org.slf4j.Logger; -049import org.slf4j.LoggerFactory; -050 -051import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; -052 -053import com.google.protobuf.ByteString; -054 -055import sun.misc.Unsafe; -056 -057/** -058 * Utility class that handles byte arrays, conversions to/from other types, -059 * comparisons, hash code generation, manufacturing keys for HashMaps or -060 * HashSets, and can be used as key in maps or trees. -061 */ -062@SuppressWarnings("restriction") -063@InterfaceAudience.Public -064@edu.umd.cs.findbugs.annotations.SuppressWarnings( -065 value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS", -066justification="It has been like this forever") -067public class Bytes implements ComparableBytes { -068 -069 // Using the charset canonical name for String/byte[] conversions is much -070 // more efficient due to use of cached encoders/decoders. -071 private static final String UTF8_CSN = StandardCharsets.UTF_8.name(); -072 -073 //HConstants.EMPTY_BYTE_ARRAY should be updated if this changed -074 private static final byte [] EMPTY_BYTE_ARRAY = new byte [0]; -075 -076 private static final Logger LOG = LoggerFactory.getLogger(Bytes.class); -077 -078 /** -079 * Size of boolean in bytes -080 */ -081 public static final int SIZEOF_BOOLEAN = Byte.SIZE / Byte.SIZE; -082 -083 /** -084 * Size of byte in bytes -085 */ -086 public static final int SIZEOF_BYTE = SIZEOF_BOOLEAN; -087 -088 /** -089 * Size of char in bytes -090 */ -091 public static final int SIZEOF_CHAR = Character.SIZE / Byte.SIZE; -092 -093 /** -094 * Size of double in bytes -095 */ -096 public static final int SIZEOF_DOUBLE = Double.SIZE / Byte.SIZE; -097 -098 /** -099 * Size of float in bytes -100 */ -101 public static final int SIZEOF_FLOAT = Float.SIZE / Byte.SIZE; -102 -103 /** -104 * Size of int in bytes -105 */ -106 public static final int SIZEOF_INT = Integer.SIZE / Byte.SIZE; -107 -108 /** -109 * Size of long in bytes -110 */ -111 public static final int SIZEOF_LONG = Long.SIZE / Byte.SIZE; -112 -113 /** -114 * Size of short in bytes -115 */ -116 public static final int SIZEOF_SHORT = Short.SIZE / Byte.SIZE; -117 -118 /** -119 * Mask to apply to a long to reveal the lower int only. Use like this: -120 * int i = (int)(0xL ^ some_long_value); -121 */ -122 public static final long MASK_FOR_LOWER_INT_IN_LONG = 0xL; -123 -124 /** -125 * Estimate of size cost to pay beyond payload in jvm for instance of byte []. -126 * Estimate based on study of jhat and jprofiler numbers. -127 */ -128 // JHat says BU is 56 bytes. -129 // SizeOf which uses java.lang.instrument says 24 bytes. (3 longs?) -130 public static final int ESTIMATED_HEAP_TAX = 16; -131 -132 private static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned(); -133 -134 /** -135 * Returns length of the byte array, returning 0 if the array is null. -136 * Useful for calculating sizes. -137 * @param b byte array, which can be null -138 * @return 0 if b is null, otherwise returns length -139 */ -140 final public static int len(byte[] b) { -141return b == null ? 0 : b.length; -142 } -143 -144 private byte[] bytes; -145 private int offset; -146 private int length; -147 -148 /** -149 * Create a zero-size sequence. -150 */ -151 public Bytes() { -152super(); -153 } -154 -155 /** -156 * Create a Bytes using the byte array as the initial value. -157 * @param bytes This array becomes the backing storage for the object. -158 */ -159 public Bytes(byte[] bytes) { -160this(bytes, 0, bytes.length); -161 } -162 -163 /** -164 * Set the new Bytes to the contents of the passed -165 * codeibw/code. -166 * @param ibw the value to set this Bytes to. -167 */ -168 public Bytes(final Bytes ibw) { -169this(ibw.get(), ibw.getOffset(), ibw.getLength()); -170 } -171 -172 /** -173 * Set the value to a given byte range -174 * @param bytes the new byte range to
hbase-site git commit: INFRA-10751 Empty commit
Repository: hbase-site Updated Branches: refs/heads/asf-site 6c67ddd76 -> 37f65af2b INFRA-10751 Empty commit Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/37f65af2 Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/37f65af2 Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/37f65af2 Branch: refs/heads/asf-site Commit: 37f65af2b9e980b2ed3aee4ecedb53857618c1f8 Parents: 6c67ddd Author: jenkinsAuthored: Thu Apr 5 14:47:27 2018 + Committer: jenkins Committed: Thu Apr 5 14:47:27 2018 + -- --
[25/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/org/apache/hadoop/hbase/util/Bytes.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/util/Bytes.html b/devapidocs/org/apache/hadoop/hbase/util/Bytes.html index 99240f2..9dd868b 100644 --- a/devapidocs/org/apache/hadoop/hbase/util/Bytes.html +++ b/devapidocs/org/apache/hadoop/hbase/util/Bytes.html @@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Public -public class Bytes +public class Bytes extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true; title="class or interface in java.lang">ComparableBytes Utility class that handles byte arrays, conversions to/from other types, @@ -1293,7 +1293,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab UTF8_CSN -private static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String UTF8_CSN +private static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String UTF8_CSN @@ -1302,7 +1302,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab EMPTY_BYTE_ARRAY -private static finalbyte[] EMPTY_BYTE_ARRAY +private static finalbyte[] EMPTY_BYTE_ARRAY @@ -1311,7 +1311,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab LOG -private static finalorg.slf4j.Logger LOG +private static finalorg.slf4j.Logger LOG @@ -1320,7 +1320,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab SIZEOF_BOOLEAN -public static finalint SIZEOF_BOOLEAN +public static finalint SIZEOF_BOOLEAN Size of boolean in bytes See Also: @@ -1334,7 +1334,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab SIZEOF_BYTE -public static finalint SIZEOF_BYTE +public static finalint SIZEOF_BYTE Size of byte in bytes See Also: @@ -1348,7 +1348,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab SIZEOF_CHAR -public static finalint SIZEOF_CHAR +public static finalint SIZEOF_CHAR Size of char in bytes See Also: @@ -1362,7 +1362,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab SIZEOF_DOUBLE -public static finalint SIZEOF_DOUBLE +public static finalint SIZEOF_DOUBLE Size of double in bytes See Also: @@ -1376,7 +1376,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab SIZEOF_FLOAT -public static finalint SIZEOF_FLOAT +public static finalint SIZEOF_FLOAT Size of float in bytes See Also: @@ -1390,7 +1390,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab SIZEOF_INT -public static finalint SIZEOF_INT +public static finalint SIZEOF_INT Size of int in bytes See Also: @@ -1404,7 +1404,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab SIZEOF_LONG -public static finalint SIZEOF_LONG +public static finalint SIZEOF_LONG Size of long in bytes See Also: @@ -1418,7 +1418,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab SIZEOF_SHORT -public static finalint SIZEOF_SHORT +public static finalint SIZEOF_SHORT Size of short in bytes See Also: @@ -1432,7 +1432,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab MASK_FOR_LOWER_INT_IN_LONG -public static finallong MASK_FOR_LOWER_INT_IN_LONG +public static finallong MASK_FOR_LOWER_INT_IN_LONG Mask to apply to a long to reveal the lower int only. Use like this: int i = (int)(0xL ^ some_long_value); @@ -1447,7 +1447,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab ESTIMATED_HEAP_TAX -public static finalint ESTIMATED_HEAP_TAX +public static finalint ESTIMATED_HEAP_TAX Estimate of size cost to pay beyond payload in jvm for instance of byte []. Estimate based on study of jhat and jprofiler numbers. @@ -1462,7 +1462,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab UNSAFE_UNALIGNED -private static finalboolean UNSAFE_UNALIGNED +private static finalboolean UNSAFE_UNALIGNED @@ -1471,7 +1471,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab bytes -privatebyte[] bytes +privatebyte[] bytes @@ -1480,7 +1480,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab offset -privateint offset +privateint offset @@ -1489,7 +1489,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab length -privateint length +privateint length @@ -1498,7 +1498,7 @@ implements
[08/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html index 8c0d57c..e606e82 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html @@ -46,2582 +46,2583 @@ 038import java.util.Iterator; 039import java.util.List; 040 -041import org.apache.hadoop.hbase.Cell; -042import org.apache.hadoop.hbase.CellComparator; -043import org.apache.hadoop.hbase.KeyValue; -044import org.apache.hadoop.io.RawComparator; -045import org.apache.hadoop.io.WritableComparator; -046import org.apache.hadoop.io.WritableUtils; -047import org.apache.yetus.audience.InterfaceAudience; -048import org.slf4j.Logger; -049import org.slf4j.LoggerFactory; -050 -051import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; -052 -053import com.google.protobuf.ByteString; -054 -055import sun.misc.Unsafe; -056 -057/** -058 * Utility class that handles byte arrays, conversions to/from other types, -059 * comparisons, hash code generation, manufacturing keys for HashMaps or -060 * HashSets, and can be used as key in maps or trees. -061 */ -062@SuppressWarnings("restriction") -063@InterfaceAudience.Public -064@edu.umd.cs.findbugs.annotations.SuppressWarnings( -065 value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS", -066justification="It has been like this forever") -067public class Bytes implements ComparableBytes { -068 -069 // Using the charset canonical name for String/byte[] conversions is much -070 // more efficient due to use of cached encoders/decoders. -071 private static final String UTF8_CSN = StandardCharsets.UTF_8.name(); -072 -073 //HConstants.EMPTY_BYTE_ARRAY should be updated if this changed -074 private static final byte [] EMPTY_BYTE_ARRAY = new byte [0]; -075 -076 private static final Logger LOG = LoggerFactory.getLogger(Bytes.class); -077 -078 /** -079 * Size of boolean in bytes -080 */ -081 public static final int SIZEOF_BOOLEAN = Byte.SIZE / Byte.SIZE; -082 -083 /** -084 * Size of byte in bytes -085 */ -086 public static final int SIZEOF_BYTE = SIZEOF_BOOLEAN; -087 -088 /** -089 * Size of char in bytes -090 */ -091 public static final int SIZEOF_CHAR = Character.SIZE / Byte.SIZE; -092 -093 /** -094 * Size of double in bytes -095 */ -096 public static final int SIZEOF_DOUBLE = Double.SIZE / Byte.SIZE; -097 -098 /** -099 * Size of float in bytes -100 */ -101 public static final int SIZEOF_FLOAT = Float.SIZE / Byte.SIZE; -102 -103 /** -104 * Size of int in bytes -105 */ -106 public static final int SIZEOF_INT = Integer.SIZE / Byte.SIZE; -107 -108 /** -109 * Size of long in bytes -110 */ -111 public static final int SIZEOF_LONG = Long.SIZE / Byte.SIZE; -112 -113 /** -114 * Size of short in bytes -115 */ -116 public static final int SIZEOF_SHORT = Short.SIZE / Byte.SIZE; -117 -118 /** -119 * Mask to apply to a long to reveal the lower int only. Use like this: -120 * int i = (int)(0xL ^ some_long_value); -121 */ -122 public static final long MASK_FOR_LOWER_INT_IN_LONG = 0xL; -123 -124 /** -125 * Estimate of size cost to pay beyond payload in jvm for instance of byte []. -126 * Estimate based on study of jhat and jprofiler numbers. -127 */ -128 // JHat says BU is 56 bytes. -129 // SizeOf which uses java.lang.instrument says 24 bytes. (3 longs?) -130 public static final int ESTIMATED_HEAP_TAX = 16; -131 -132 private static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned(); -133 -134 /** -135 * Returns length of the byte array, returning 0 if the array is null. -136 * Useful for calculating sizes. -137 * @param b byte array, which can be null -138 * @return 0 if b is null, otherwise returns length -139 */ -140 final public static int len(byte[] b) { -141return b == null ? 0 : b.length; -142 } -143 -144 private byte[] bytes; -145 private int offset; -146 private int length; -147 -148 /** -149 * Create a zero-size sequence. -150 */ -151 public Bytes() { -152super(); -153 } -154 -155 /** -156 * Create a Bytes using the byte array as the initial value. -157 * @param bytes This array becomes the backing storage for the object. -158 */ -159 public Bytes(byte[] bytes) { -160this(bytes, 0, bytes.length); -161 } -162 -163 /** -164 * Set the new Bytes to the contents of the passed -165 * codeibw/code. -166 * @param ibw
[34/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html index 8c0d57c..e606e82 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html @@ -46,2582 +46,2583 @@ 038import java.util.Iterator; 039import java.util.List; 040 -041import org.apache.hadoop.hbase.Cell; -042import org.apache.hadoop.hbase.CellComparator; -043import org.apache.hadoop.hbase.KeyValue; -044import org.apache.hadoop.io.RawComparator; -045import org.apache.hadoop.io.WritableComparator; -046import org.apache.hadoop.io.WritableUtils; -047import org.apache.yetus.audience.InterfaceAudience; -048import org.slf4j.Logger; -049import org.slf4j.LoggerFactory; -050 -051import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; -052 -053import com.google.protobuf.ByteString; -054 -055import sun.misc.Unsafe; -056 -057/** -058 * Utility class that handles byte arrays, conversions to/from other types, -059 * comparisons, hash code generation, manufacturing keys for HashMaps or -060 * HashSets, and can be used as key in maps or trees. -061 */ -062@SuppressWarnings("restriction") -063@InterfaceAudience.Public -064@edu.umd.cs.findbugs.annotations.SuppressWarnings( -065 value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS", -066justification="It has been like this forever") -067public class Bytes implements ComparableBytes { -068 -069 // Using the charset canonical name for String/byte[] conversions is much -070 // more efficient due to use of cached encoders/decoders. -071 private static final String UTF8_CSN = StandardCharsets.UTF_8.name(); -072 -073 //HConstants.EMPTY_BYTE_ARRAY should be updated if this changed -074 private static final byte [] EMPTY_BYTE_ARRAY = new byte [0]; -075 -076 private static final Logger LOG = LoggerFactory.getLogger(Bytes.class); -077 -078 /** -079 * Size of boolean in bytes -080 */ -081 public static final int SIZEOF_BOOLEAN = Byte.SIZE / Byte.SIZE; -082 -083 /** -084 * Size of byte in bytes -085 */ -086 public static final int SIZEOF_BYTE = SIZEOF_BOOLEAN; -087 -088 /** -089 * Size of char in bytes -090 */ -091 public static final int SIZEOF_CHAR = Character.SIZE / Byte.SIZE; -092 -093 /** -094 * Size of double in bytes -095 */ -096 public static final int SIZEOF_DOUBLE = Double.SIZE / Byte.SIZE; -097 -098 /** -099 * Size of float in bytes -100 */ -101 public static final int SIZEOF_FLOAT = Float.SIZE / Byte.SIZE; -102 -103 /** -104 * Size of int in bytes -105 */ -106 public static final int SIZEOF_INT = Integer.SIZE / Byte.SIZE; -107 -108 /** -109 * Size of long in bytes -110 */ -111 public static final int SIZEOF_LONG = Long.SIZE / Byte.SIZE; -112 -113 /** -114 * Size of short in bytes -115 */ -116 public static final int SIZEOF_SHORT = Short.SIZE / Byte.SIZE; -117 -118 /** -119 * Mask to apply to a long to reveal the lower int only. Use like this: -120 * int i = (int)(0xL ^ some_long_value); -121 */ -122 public static final long MASK_FOR_LOWER_INT_IN_LONG = 0xL; -123 -124 /** -125 * Estimate of size cost to pay beyond payload in jvm for instance of byte []. -126 * Estimate based on study of jhat and jprofiler numbers. -127 */ -128 // JHat says BU is 56 bytes. -129 // SizeOf which uses java.lang.instrument says 24 bytes. (3 longs?) -130 public static final int ESTIMATED_HEAP_TAX = 16; -131 -132 private static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned(); -133 -134 /** -135 * Returns length of the byte array, returning 0 if the array is null. -136 * Useful for calculating sizes. -137 * @param b byte array, which can be null -138 * @return 0 if b is null, otherwise returns length -139 */ -140 final public static int len(byte[] b) { -141return b == null ? 0 : b.length; -142 } -143 -144 private byte[] bytes; -145 private int offset; -146 private int length; -147 -148 /** -149 * Create a zero-size sequence. -150 */ -151 public Bytes() { -152super(); -153 } -154 -155 /** -156 * Create a Bytes using the byte array as the initial value. -157 * @param bytes This array becomes the backing storage for the object. -158 */ -159 public Bytes(byte[] bytes) { -160this(bytes, 0, bytes.length); -161 } -162 -163 /** -164 * Set the new Bytes to the contents of the passed -165 * codeibw/code. -166 * @param ibw the value to set this Bytes to. -167 */ -168 public Bytes(final Bytes ibw) { -169this(ibw.get(), ibw.getOffset(), ibw.getLength()); -170 }
[37/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/apidocs/org/apache/hadoop/hbase/util/Bytes.html -- diff --git a/apidocs/org/apache/hadoop/hbase/util/Bytes.html b/apidocs/org/apache/hadoop/hbase/util/Bytes.html index 4c30b84..b240e4c 100644 --- a/apidocs/org/apache/hadoop/hbase/util/Bytes.html +++ b/apidocs/org/apache/hadoop/hbase/util/Bytes.html @@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Public -public class Bytes +public class Bytes extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true; title="class or interface in java.lang">ComparableBytes Utility class that handles byte arrays, conversions to/from other types, @@ -1204,7 +1204,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab SIZEOF_BOOLEAN -public static finalint SIZEOF_BOOLEAN +public static finalint SIZEOF_BOOLEAN Size of boolean in bytes See Also: @@ -1218,7 +1218,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab SIZEOF_BYTE -public static finalint SIZEOF_BYTE +public static finalint SIZEOF_BYTE Size of byte in bytes See Also: @@ -1232,7 +1232,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab SIZEOF_CHAR -public static finalint SIZEOF_CHAR +public static finalint SIZEOF_CHAR Size of char in bytes See Also: @@ -1246,7 +1246,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab SIZEOF_DOUBLE -public static finalint SIZEOF_DOUBLE +public static finalint SIZEOF_DOUBLE Size of double in bytes See Also: @@ -1260,7 +1260,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab SIZEOF_FLOAT -public static finalint SIZEOF_FLOAT +public static finalint SIZEOF_FLOAT Size of float in bytes See Also: @@ -1274,7 +1274,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab SIZEOF_INT -public static finalint SIZEOF_INT +public static finalint SIZEOF_INT Size of int in bytes See Also: @@ -1288,7 +1288,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab SIZEOF_LONG -public static finalint SIZEOF_LONG +public static finalint SIZEOF_LONG Size of long in bytes See Also: @@ -1302,7 +1302,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab SIZEOF_SHORT -public static finalint SIZEOF_SHORT +public static finalint SIZEOF_SHORT Size of short in bytes See Also: @@ -1316,7 +1316,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab MASK_FOR_LOWER_INT_IN_LONG -public static finallong MASK_FOR_LOWER_INT_IN_LONG +public static finallong MASK_FOR_LOWER_INT_IN_LONG Mask to apply to a long to reveal the lower int only. Use like this: int i = (int)(0xL ^ some_long_value); @@ -1331,7 +1331,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab ESTIMATED_HEAP_TAX -public static finalint ESTIMATED_HEAP_TAX +public static finalint ESTIMATED_HEAP_TAX Estimate of size cost to pay beyond payload in jvm for instance of byte []. Estimate based on study of jhat and jprofiler numbers. @@ -1346,7 +1346,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab BYTES_COMPARATOR -public static finalhttps://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true; title="class or interface in java.util">Comparatorbyte[] BYTES_COMPARATOR +public static finalhttps://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true; title="class or interface in java.util">Comparatorbyte[] BYTES_COMPARATOR Pass this to TreeMaps where byte [] are keys. @@ -1356,7 +1356,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab BYTES_RAWCOMPARATOR -public static finalorg.apache.hadoop.io.RawComparatorbyte[] BYTES_RAWCOMPARATOR +public static finalorg.apache.hadoop.io.RawComparatorbyte[] BYTES_RAWCOMPARATOR Use comparing byte arrays, byte-by-byte @@ -1374,7 +1374,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab Bytes -publicBytes() +publicBytes() Create a zero-size sequence. @@ -1384,7 +1384,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab Bytes -publicBytes(byte[]bytes) +publicBytes(byte[]bytes) Create a Bytes using the byte array as the initial value. Parameters: @@ -1398,7 +1398,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab Bytes -publicBytes(Bytesibw) +publicBytes(Bytesibw) Set the new Bytes to the contents of the passed ibw. @@ -1413,7 +1413,7 @@ implements https://docs.oracle.com/javase/8/docs/api/java/lang/Comparab Bytes
[09/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.Comparer.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.Comparer.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.Comparer.html index 8c0d57c..e606e82 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.Comparer.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.Comparer.html @@ -46,2582 +46,2583 @@ 038import java.util.Iterator; 039import java.util.List; 040 -041import org.apache.hadoop.hbase.Cell; -042import org.apache.hadoop.hbase.CellComparator; -043import org.apache.hadoop.hbase.KeyValue; -044import org.apache.hadoop.io.RawComparator; -045import org.apache.hadoop.io.WritableComparator; -046import org.apache.hadoop.io.WritableUtils; -047import org.apache.yetus.audience.InterfaceAudience; -048import org.slf4j.Logger; -049import org.slf4j.LoggerFactory; -050 -051import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; -052 -053import com.google.protobuf.ByteString; -054 -055import sun.misc.Unsafe; -056 -057/** -058 * Utility class that handles byte arrays, conversions to/from other types, -059 * comparisons, hash code generation, manufacturing keys for HashMaps or -060 * HashSets, and can be used as key in maps or trees. -061 */ -062@SuppressWarnings("restriction") -063@InterfaceAudience.Public -064@edu.umd.cs.findbugs.annotations.SuppressWarnings( -065 value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS", -066justification="It has been like this forever") -067public class Bytes implements ComparableBytes { -068 -069 // Using the charset canonical name for String/byte[] conversions is much -070 // more efficient due to use of cached encoders/decoders. -071 private static final String UTF8_CSN = StandardCharsets.UTF_8.name(); -072 -073 //HConstants.EMPTY_BYTE_ARRAY should be updated if this changed -074 private static final byte [] EMPTY_BYTE_ARRAY = new byte [0]; -075 -076 private static final Logger LOG = LoggerFactory.getLogger(Bytes.class); -077 -078 /** -079 * Size of boolean in bytes -080 */ -081 public static final int SIZEOF_BOOLEAN = Byte.SIZE / Byte.SIZE; -082 -083 /** -084 * Size of byte in bytes -085 */ -086 public static final int SIZEOF_BYTE = SIZEOF_BOOLEAN; -087 -088 /** -089 * Size of char in bytes -090 */ -091 public static final int SIZEOF_CHAR = Character.SIZE / Byte.SIZE; -092 -093 /** -094 * Size of double in bytes -095 */ -096 public static final int SIZEOF_DOUBLE = Double.SIZE / Byte.SIZE; -097 -098 /** -099 * Size of float in bytes -100 */ -101 public static final int SIZEOF_FLOAT = Float.SIZE / Byte.SIZE; -102 -103 /** -104 * Size of int in bytes -105 */ -106 public static final int SIZEOF_INT = Integer.SIZE / Byte.SIZE; -107 -108 /** -109 * Size of long in bytes -110 */ -111 public static final int SIZEOF_LONG = Long.SIZE / Byte.SIZE; -112 -113 /** -114 * Size of short in bytes -115 */ -116 public static final int SIZEOF_SHORT = Short.SIZE / Byte.SIZE; -117 -118 /** -119 * Mask to apply to a long to reveal the lower int only. Use like this: -120 * int i = (int)(0xL ^ some_long_value); -121 */ -122 public static final long MASK_FOR_LOWER_INT_IN_LONG = 0xL; -123 -124 /** -125 * Estimate of size cost to pay beyond payload in jvm for instance of byte []. -126 * Estimate based on study of jhat and jprofiler numbers. -127 */ -128 // JHat says BU is 56 bytes. -129 // SizeOf which uses java.lang.instrument says 24 bytes. (3 longs?) -130 public static final int ESTIMATED_HEAP_TAX = 16; -131 -132 private static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned(); -133 -134 /** -135 * Returns length of the byte array, returning 0 if the array is null. -136 * Useful for calculating sizes. -137 * @param b byte array, which can be null -138 * @return 0 if b is null, otherwise returns length -139 */ -140 final public static int len(byte[] b) { -141return b == null ? 0 : b.length; -142 } -143 -144 private byte[] bytes; -145 private int offset; -146 private int length; -147 -148 /** -149 * Create a zero-size sequence. -150 */ -151 public Bytes() { -152super(); -153 } -154 -155 /** -156 * Create a Bytes using the byte array as the initial value. -157 * @param bytes This array becomes the backing storage for the object. -158 */ -159 public Bytes(byte[] bytes) { -160this(bytes, 0, bytes.length); -161 } -162 -163 /** -164 * Set the new Bytes to the contents of the passed -165 * codeibw/code. -166 * @param ibw the value to set this Bytes to. -167 */ -168 public Bytes(final Bytes ibw) { -169this(ibw.get(), ibw.getOffset(), ibw.getLength()); -170 } -171 -172 /** -173 * Set the value to a
[35/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html -- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html index 8c0d57c..e606e82 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html @@ -46,2582 +46,2583 @@ 038import java.util.Iterator; 039import java.util.List; 040 -041import org.apache.hadoop.hbase.Cell; -042import org.apache.hadoop.hbase.CellComparator; -043import org.apache.hadoop.hbase.KeyValue; -044import org.apache.hadoop.io.RawComparator; -045import org.apache.hadoop.io.WritableComparator; -046import org.apache.hadoop.io.WritableUtils; -047import org.apache.yetus.audience.InterfaceAudience; -048import org.slf4j.Logger; -049import org.slf4j.LoggerFactory; -050 -051import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; -052 -053import com.google.protobuf.ByteString; -054 -055import sun.misc.Unsafe; -056 -057/** -058 * Utility class that handles byte arrays, conversions to/from other types, -059 * comparisons, hash code generation, manufacturing keys for HashMaps or -060 * HashSets, and can be used as key in maps or trees. -061 */ -062@SuppressWarnings("restriction") -063@InterfaceAudience.Public -064@edu.umd.cs.findbugs.annotations.SuppressWarnings( -065 value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS", -066justification="It has been like this forever") -067public class Bytes implements ComparableBytes { -068 -069 // Using the charset canonical name for String/byte[] conversions is much -070 // more efficient due to use of cached encoders/decoders. -071 private static final String UTF8_CSN = StandardCharsets.UTF_8.name(); -072 -073 //HConstants.EMPTY_BYTE_ARRAY should be updated if this changed -074 private static final byte [] EMPTY_BYTE_ARRAY = new byte [0]; -075 -076 private static final Logger LOG = LoggerFactory.getLogger(Bytes.class); -077 -078 /** -079 * Size of boolean in bytes -080 */ -081 public static final int SIZEOF_BOOLEAN = Byte.SIZE / Byte.SIZE; -082 -083 /** -084 * Size of byte in bytes -085 */ -086 public static final int SIZEOF_BYTE = SIZEOF_BOOLEAN; -087 -088 /** -089 * Size of char in bytes -090 */ -091 public static final int SIZEOF_CHAR = Character.SIZE / Byte.SIZE; -092 -093 /** -094 * Size of double in bytes -095 */ -096 public static final int SIZEOF_DOUBLE = Double.SIZE / Byte.SIZE; -097 -098 /** -099 * Size of float in bytes -100 */ -101 public static final int SIZEOF_FLOAT = Float.SIZE / Byte.SIZE; -102 -103 /** -104 * Size of int in bytes -105 */ -106 public static final int SIZEOF_INT = Integer.SIZE / Byte.SIZE; -107 -108 /** -109 * Size of long in bytes -110 */ -111 public static final int SIZEOF_LONG = Long.SIZE / Byte.SIZE; -112 -113 /** -114 * Size of short in bytes -115 */ -116 public static final int SIZEOF_SHORT = Short.SIZE / Byte.SIZE; -117 -118 /** -119 * Mask to apply to a long to reveal the lower int only. Use like this: -120 * int i = (int)(0xL ^ some_long_value); -121 */ -122 public static final long MASK_FOR_LOWER_INT_IN_LONG = 0xL; -123 -124 /** -125 * Estimate of size cost to pay beyond payload in jvm for instance of byte []. -126 * Estimate based on study of jhat and jprofiler numbers. -127 */ -128 // JHat says BU is 56 bytes. -129 // SizeOf which uses java.lang.instrument says 24 bytes. (3 longs?) -130 public static final int ESTIMATED_HEAP_TAX = 16; -131 -132 private static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned(); -133 -134 /** -135 * Returns length of the byte array, returning 0 if the array is null. -136 * Useful for calculating sizes. -137 * @param b byte array, which can be null -138 * @return 0 if b is null, otherwise returns length -139 */ -140 final public static int len(byte[] b) { -141return b == null ? 0 : b.length; -142 } -143 -144 private byte[] bytes; -145 private int offset; -146 private int length; -147 -148 /** -149 * Create a zero-size sequence. -150 */ -151 public Bytes() { -152super(); -153 } -154 -155 /** -156 * Create a Bytes using the byte array as the initial value. -157 * @param bytes This array becomes the backing storage for the object. -158 */ -159 public Bytes(byte[] bytes) { -160this(bytes, 0, bytes.length); -161 } -162 -163 /** -164 * Set the new Bytes to the contents of the passed -165 * codeibw/code. -166 * @param ibw the value to set this Bytes to. -167 */ -168 public Bytes(final Bytes ibw) { -169this(ibw.get(), ibw.getOffset(), ibw.getLength()); -170 }
[29/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html b/devapidocs/org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html index f4299f1..26da61e 100644 --- a/devapidocs/org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html +++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html @@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab"; -public static enum SyncTable.SyncMapper.Counter +public static enum SyncTable.SyncMapper.Counter extends https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true; title="class or interface in java.lang">EnumSyncTable.SyncMapper.Counter @@ -246,7 +246,7 @@ the order they are declared. BATCHES -public static finalSyncTable.SyncMapper.Counter BATCHES +public static finalSyncTable.SyncMapper.Counter BATCHES @@ -255,7 +255,7 @@ the order they are declared. HASHES_MATCHED -public static finalSyncTable.SyncMapper.Counter HASHES_MATCHED +public static finalSyncTable.SyncMapper.Counter HASHES_MATCHED @@ -264,7 +264,7 @@ the order they are declared. HASHES_NOT_MATCHED -public static finalSyncTable.SyncMapper.Counter HASHES_NOT_MATCHED +public static finalSyncTable.SyncMapper.Counter HASHES_NOT_MATCHED @@ -273,7 +273,7 @@ the order they are declared. SOURCEMISSINGROWS -public static finalSyncTable.SyncMapper.Counter SOURCEMISSINGROWS +public static finalSyncTable.SyncMapper.Counter SOURCEMISSINGROWS @@ -282,7 +282,7 @@ the order they are declared. SOURCEMISSINGCELLS -public static finalSyncTable.SyncMapper.Counter SOURCEMISSINGCELLS +public static finalSyncTable.SyncMapper.Counter SOURCEMISSINGCELLS @@ -291,7 +291,7 @@ the order they are declared. TARGETMISSINGROWS -public static finalSyncTable.SyncMapper.Counter TARGETMISSINGROWS +public static finalSyncTable.SyncMapper.Counter TARGETMISSINGROWS @@ -300,7 +300,7 @@ the order they are declared. TARGETMISSINGCELLS -public static finalSyncTable.SyncMapper.Counter TARGETMISSINGCELLS +public static finalSyncTable.SyncMapper.Counter TARGETMISSINGCELLS @@ -309,7 +309,7 @@ the order they are declared. ROWSWITHDIFFS -public static finalSyncTable.SyncMapper.Counter ROWSWITHDIFFS +public static finalSyncTable.SyncMapper.Counter ROWSWITHDIFFS @@ -318,7 +318,7 @@ the order they are declared. DIFFERENTCELLVALUES -public static finalSyncTable.SyncMapper.Counter DIFFERENTCELLVALUES +public static finalSyncTable.SyncMapper.Counter DIFFERENTCELLVALUES @@ -327,7 +327,7 @@ the order they are declared. MATCHINGROWS -public static finalSyncTable.SyncMapper.Counter MATCHINGROWS +public static finalSyncTable.SyncMapper.Counter MATCHINGROWS @@ -336,7 +336,7 @@ the order they are declared. MATCHINGCELLS -public static finalSyncTable.SyncMapper.Counter MATCHINGCELLS +public static finalSyncTable.SyncMapper.Counter MATCHINGCELLS @@ -345,7 +345,7 @@ the order they are declared. EMPTY_BATCHES -public static finalSyncTable.SyncMapper.Counter EMPTY_BATCHES +public static finalSyncTable.SyncMapper.Counter EMPTY_BATCHES @@ -354,7 +354,7 @@ the order they are declared. RANGESMATCHED -public static finalSyncTable.SyncMapper.Counter RANGESMATCHED +public static finalSyncTable.SyncMapper.Counter RANGESMATCHED @@ -363,7 +363,7 @@ the order they are declared. RANGESNOTMATCHED -public static finalSyncTable.SyncMapper.Counter RANGESNOTMATCHED +public static finalSyncTable.SyncMapper.Counter RANGESNOTMATCHED @@ -380,7 +380,7 @@ the order they are declared. values -public staticSyncTable.SyncMapper.Counter[]values() +public staticSyncTable.SyncMapper.Counter[]values() Returns an array containing the constants of this enum type, in the order they are declared. This method may be used to iterate over the constants as follows: @@ -400,7 +400,7 @@ for (SyncTable.SyncMapper.Counter c : SyncTable.SyncMapper.Counter.values()) valueOf -public staticSyncTable.SyncMapper.CountervalueOf(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringname) +public staticSyncTable.SyncMapper.CountervalueOf(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringname) Returns the enum constant of this type with the specified name. The string must match exactly an identifier used to declare an enum constant in this type. (Extraneous whitespace characters are http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.html
[28/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/org/apache/hadoop/hbase/regionserver/StoreScanner.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreScanner.html b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreScanner.html index 2b5360c..7370b94 100644 --- a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreScanner.html +++ b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreScanner.html @@ -128,7 +128,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Private -public class StoreScanner +public class StoreScanner extends NonReversedNonLazyKeyValueScanner implements KeyValueScanner, InternalScanner, ChangedReadersObserver Scanner scans both the memstore and the Store. Coalesce KeyValue stream into ListKeyValue @@ -695,7 +695,7 @@ implements LOG -private static finalorg.slf4j.Logger LOG +private static finalorg.slf4j.Logger LOG @@ -704,7 +704,7 @@ implements store -protected finalHStore store +protected finalHStore store @@ -713,7 +713,7 @@ implements comparator -private finalCellComparator comparator +private finalCellComparator comparator @@ -722,7 +722,7 @@ implements matcher -privateScanQueryMatcher matcher +privateScanQueryMatcher matcher @@ -731,7 +731,7 @@ implements heap -protectedKeyValueHeap heap +protectedKeyValueHeap heap @@ -740,7 +740,7 @@ implements cacheBlocks -privateboolean cacheBlocks +privateboolean cacheBlocks @@ -749,7 +749,7 @@ implements countPerRow -privatelong countPerRow +privatelong countPerRow @@ -758,7 +758,7 @@ implements storeLimit -privateint storeLimit +privateint storeLimit @@ -767,7 +767,7 @@ implements storeOffset -privateint storeOffset +privateint storeOffset @@ -776,7 +776,7 @@ implements closing -privateboolean closing +privateboolean closing @@ -785,7 +785,7 @@ implements get -private finalboolean get +private finalboolean get @@ -794,7 +794,7 @@ implements explicitColumnQuery -private finalboolean explicitColumnQuery +private finalboolean explicitColumnQuery @@ -803,7 +803,7 @@ implements useRowColBloom -private finalboolean useRowColBloom +private finalboolean useRowColBloom @@ -812,7 +812,7 @@ implements parallelSeekEnabled -privateboolean parallelSeekEnabled +privateboolean parallelSeekEnabled A flag that enables StoreFileScanner parallel-seeking @@ -822,7 +822,7 @@ implements executor -privateExecutorService executor +privateExecutorService executor @@ -831,7 +831,7 @@ implements scan -private finalScan scan +private finalScan scan @@ -840,7 +840,7 @@ implements oldestUnexpiredTS -private finallong oldestUnexpiredTS +private finallong oldestUnexpiredTS @@ -849,7 +849,7 @@ implements now -private finallong now +private finallong now @@ -858,7 +858,7 @@ implements minVersions -private finalint minVersions +private finalint minVersions @@ -867,7 +867,7 @@ implements maxRowSize -private finallong maxRowSize +private finallong maxRowSize @@ -876,7 +876,7 @@ implements cellsPerHeartbeatCheck -private finallong cellsPerHeartbeatCheck +private finallong cellsPerHeartbeatCheck @@ -885,7 +885,7 @@ implements scannersForDelayedClose -private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListKeyValueScanner scannersForDelayedClose +private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListKeyValueScanner scannersForDelayedClose @@ -894,7 +894,7 @@ implements kvsScanned -privatelong kvsScanned +privatelong kvsScanned The number of KVs seen by the scanner. Includes explicitly skipped KVs, but not KVs skipped via seeking to next row/column. TODO: estimate them? @@ -905,7 +905,7 @@ implements prevCell -privateCell prevCell +privateCell prevCell @@ -914,7 +914,7 @@ implements preadMaxBytes -private finallong preadMaxBytes +private finallong preadMaxBytes @@ -923,7 +923,7 @@ implements bytesRead -privatelong bytesRead +privatelong bytesRead @@ -932,7 +932,7 @@ implements LAZY_SEEK_ENABLED_BY_DEFAULT -static finalboolean LAZY_SEEK_ENABLED_BY_DEFAULT +static finalboolean LAZY_SEEK_ENABLED_BY_DEFAULT We don't ever expect to change this, the constant is just for clarity. See Also: @@ -946,7 +946,7 @@ implements STORESCANNER_PARALLEL_SEEK_ENABLE -public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String STORESCANNER_PARALLEL_SEEK_ENABLE +public static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String STORESCANNER_PARALLEL_SEEK_ENABLE
[31/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/checkstyle-aggregate.html -- diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html index 8131ea5..72162d8 100644 --- a/checkstyle-aggregate.html +++ b/checkstyle-aggregate.html @@ -7,7 +7,7 @@ - + Apache HBase Checkstyle Results @@ -61,7 +61,7 @@ License -http://www.apache.org/dyn/closer.cgi/hbase/; title="Downloads">Downloads +http://www.apache.org/dyn/closer.lua/hbase/; title="Downloads">Downloads https://issues.apache.org/jira/browse/HBASE?report=com.atlassian.jira.plugin.system.project:changelog-panel#selectedTab=com.atlassian.jira.plugin.system.project%3Achangelog-panel; title="Release Notes">Release Notes @@ -274,7 +274,7 @@ 3600 0 0 -15913 +15910 Files @@ -4527,7 +4527,7 @@ org/apache/hadoop/hbase/mapreduce/TestSyncTable.java 0 0 -4 +3 org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java 0 @@ -6972,7 +6972,7 @@ org/apache/hadoop/hbase/regionserver/StoreScanner.java 0 0 -24 +23 org/apache/hadoop/hbase/regionserver/StoreUtils.java 0 @@ -9367,7 +9367,7 @@ org/apache/hadoop/hbase/util/CollectionUtils.java 0 0 -5 +4 org/apache/hadoop/hbase/util/CommonFSUtils.java 0 @@ -10282,7 +10282,7 @@ sortStaticImportsAlphabetically: true groups: *,org.apache.hbase.thirdparty,org.apache.hadoop.hbase.shaded option: top -1225 +1224 Error @@ -10305,19 +10305,19 @@ caseIndent: 2 basicOffset: 2 lineWrappingIndentation: 2 -4662 +4661 Error javadoc http://checkstyle.sourceforge.net/config_javadoc.html#JavadocTagContinuationIndentation;>JavadocTagContinuationIndentation offset: 2 -798 +784 Error http://checkstyle.sourceforge.net/config_javadoc.html#NonEmptyAtclauseDescription;>NonEmptyAtclauseDescription -3833 +3846 Error misc @@ -14526,7 +14526,7 @@ Error javadoc -JavadocTagContinuationIndentation +NonEmptyAtclauseDescription Javadoc comment at column 26 has parse error. Missed HTML close tag 'arg'. Sometimes it means that close tag missed for one of previous tags. 44 @@ -15162,7 +15162,7 @@ Error javadoc -JavadocTagContinuationIndentation +NonEmptyAtclauseDescription Javadoc comment at column 4 has parse error. Missed HTML close tag 'pre'. Sometimes it means that close tag missed for one of previous tags. 59 @@ -16917,7 +16917,7 @@ Error javadoc -JavadocTagContinuationIndentation +NonEmptyAtclauseDescription Javadoc comment at column 19 has parse error. Details: no viable alternative at input 'code\n * ListFuture' while parsing HTML_TAG 167 @@ -19839,7 +19839,7 @@ Error javadoc -JavadocTagContinuationIndentation +NonEmptyAtclauseDescription Javadoc comment at column 2 has parse error. Unrecognized error from ANTLR parser: null 83 @@ -27473,7 +27473,7 @@ blocks NeedBraces 'if' construct must use '{}'s. -145 +146 org/apache/hadoop/hbase/client/RpcRetryingCaller.java @@ -33846,7 +33846,7 @@ Error javadoc -JavadocTagContinuationIndentation +NonEmptyAtclauseDescription Javadoc comment at column 37 has parse error. Details: no viable alternative at input 'ColumnFamily,' while parsing HTML_ELEMENT 29 @@ -50142,7 +50142,7 @@ Error javadoc -JavadocTagContinuationIndentation +NonEmptyAtclauseDescription Javadoc comment at column 24 has parse error. Details: no viable alternative at input 'key,' while parsing HTML_ELEMENT 25 @@ -53730,7 +53730,7 @@ Error javadoc -JavadocTagContinuationIndentation +NonEmptyAtclauseDescription Javadoc comment at column 2 has parse error. Unrecognized error from ANTLR parser: null 587 @@ -57560,25 +57560,25 @@ indentation Indentation 'method def' child have incorrect indentation level 8, expected level should be 6. -210 +220 Error indentation Indentation 'method def' child have incorrect indentation level 8, expected level should be 6. -211 +221 Error indentation Indentation 'method def' child have incorrect indentation level 8, expected level should be 6. -213 +223 Error indentation Indentation 'method def modifier' have incorrect indentation level 5, expected level should be 4. -584 +594 org/apache/hadoop/hbase/mapreduce/TableInputFormat.java @@ -59950,216 +59950,210 @@ Error indentation Indentation -'array initialization rcurly' have incorrect indentation level 8, expected level should be one of the following: 4, 6. -194 +'array initialization' child have incorrect indentation level 8, expected level should be 6. +442 Error indentation Indentation 'array initialization' child have incorrect indentation level 8, expected level should be 6. -208 +443 Error indentation Indentation 'array initialization' child have incorrect indentation level 8, expected level should be
[32/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/book.html -- diff --git a/book.html b/book.html index 0621ea8..7977239 100644 --- a/book.html +++ b/book.html @@ -485,7 +485,7 @@ See Java for information about supported JDK versions. Procedure: Download, Configure, and Start HBase in Standalone Mode -Choose a download site from this list of https://www.apache.org/dyn/closer.cgi/hbase/;>Apache Download Mirrors. +Choose a download site from this list of https://www.apache.org/dyn/closer.lua/hbase/;>Apache Download Mirrors. Click on the suggested top link. This will take you to a mirror of HBase Releases. Click on the folder named stable and then download the binary file that ends in .tar.gz to your local filesystem. @@ -6703,6 +6703,9 @@ Quitting... hbase.regionserver.region.split.policy is now SteppingSplitPolicy. Previously it was IncreasingToUpperBoundRegionSplitPolicy. + +replication.source.ratio is now 0.5. Previously it was 0.1. + @@ -6915,13 +6918,81 @@ Quitting... -13.1.2. Rolling Upgrade from 1.x to 2.x +13.1.2. Upgrading Coprocessors to 2.0 + +Coprocessors have changed substantially in 2.0 ranging from top level design changes in class +hierarchies to changed/removed methods, interfaces, etc. +(Parent jira: https://issues.apache.org/jira/browse/HBASE-18169;>HBASE-18169 Coprocessor fix +and cleanup before 2.0.0 release). Some of the reasons for such widespread changes: + + + + +Pass Interfaces instead of Implementations; e.g. TableDescriptor instead of HTableDescriptor and +Region instead of HRegion (https://issues.apache.org/jira/browse/HBASE-18241;>HBASE-18241 +Change client.Table and client.Admin to not use HTableDescriptor). + + +Design refactor so implementers need to fill out less boilerplate and so we can do more +compile-time checking (https://issues.apache.org/jira/browse/HBASE-17732;>HBASE-17732) + + +Purge Protocol Buffers from Coprocessor API +(https://issues.apache.org/jira/browse/HBASE-18859;>HBASE-18859, +https://issues.apache.org/jira/browse/HBASE-16769;>HBASE-16769, etc) + + +Cut back on what we expose to Coprocessors removing hooks on internals that were too private to +expose (for eg. https://issues.apache.org/jira/browse/HBASE-18453;>HBASE-18453 +CompactionRequest should not be exposed to user directly; +https://issues.apache.org/jira/browse/HBASE-18298;>HBASE-18298 RegionServerServices Interface +cleanup for CP expose; etc) + + + + +To use coprocessors in 2.0, they should be rebuilt against new API otherwise they will fail to +load and HBase processes will die. + + +Suggested order of changes to upgrade the coprocessors: + + + + +Directly implement observer interfaces instead of extending Base*Observer classes. Change +Foo extends BaseXXXObserver to Foo implements XXXObserver. +(https://issues.apache.org/jira/browse/HBASE-17312;>HBASE-17312). + + +Adapt to design change from Inheritence to Composition +(https://issues.apache.org/jira/browse/HBASE-17732;>HBASE-17732) by following +https://github.com/apache/hbase/blob/master/dev-support/design-docs/Coprocessor_Design_Improvements-Use_composition_instead_of_inheritance-HBASE-17732.adoc#migrating-existing-cps-to-new-design;>this +example. + + +getTable() has been removed from the CoprocessorEnvrionment, coprocessors should self-manage +Table instances. + + + + +Some examples of writing coprocessors with new API can be found in hbase-example module +https://github.com/apache/hbase/tree/branch-2.0/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example;>here . + + +Lastly, if an api has been changed/removed that breaks you in an irreparable way, and if theres a +good justification to add it back, bring it our notice (mailto:d...@hbase.apache.org;>d...@hbase.apache.org). + + + +13.1.3. Rolling Upgrade from 1.x to 2.x There is no rolling upgrade from HBase 1.x+ to HBase 2.x+. In order to perform a zero downtime upgrade, you will need to run an additional cluster in parallel and handle failover in application logic. -13.1.3. Upgrade process from 1.x to 2.x +13.1.4. Upgrade process from 1.x to 2.x To upgrade an existing HBase 1.x cluster, you should: @@ -6931,6 +7002,9 @@ Quitting... Clean shutdown of existing 1.x cluster +Update coprocessors + + Upgrade Master roles first @@ -10043,18 +10117,33 @@ If you dont have time to build it both ways and compare, my advice would -45.4. Optimize on the Server Side for Low Latency +45.4. Optimize on the Server Side for Low Latency + +Skip the network for local blocks when the RegionServer goes to read from HDFS by exploiting HDFSs +https://hadoop.apache.org/docs/stable/hadoop-project-dist/hadoop-hdfs/ShortCircuitLocalReads.html;>Short-Circuit Local Reads facility. +Note how setup must be done both at the datanode and on the dfsclient ends of the connecitoni.e. at the RegionServer +and how both ends need to have
[24/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/org/apache/hadoop/hbase/util/CollectionUtils.IOExceptionSupplier.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/util/CollectionUtils.IOExceptionSupplier.html b/devapidocs/org/apache/hadoop/hbase/util/CollectionUtils.IOExceptionSupplier.html index b05dc5f..54e562f 100644 --- a/devapidocs/org/apache/hadoop/hbase/util/CollectionUtils.IOExceptionSupplier.html +++ b/devapidocs/org/apache/hadoop/hbase/util/CollectionUtils.IOExceptionSupplier.html @@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab"; https://docs.oracle.com/javase/8/docs/api/java/lang/FunctionalInterface.html?is-external=true; title="class or interface in java.lang">@FunctionalInterface -public static interface CollectionUtils.IOExceptionSupplierV +public static interface CollectionUtils.IOExceptionSupplierV A supplier that throws IOException when get. @@ -155,7 +155,7 @@ public static interface get -Vget() +Vget() throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true; title="class or interface in java.io">IOException Throws: http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/org/apache/hadoop/hbase/util/CollectionUtils.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/util/CollectionUtils.html b/devapidocs/org/apache/hadoop/hbase/util/CollectionUtils.html index db3550d..e8df64e 100644 --- a/devapidocs/org/apache/hadoop/hbase/util/CollectionUtils.html +++ b/devapidocs/org/apache/hadoop/hbase/util/CollectionUtils.html @@ -18,7 +18,7 @@ catch(err) { } //--> -var methods = {"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9}; +var methods = {"i0":9,"i1":9,"i2":9}; var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],8:["t4","Concrete Methods"]}; var altColor = "altColor"; var rowColor = "rowColor"; @@ -75,13 +75,13 @@ var activeTableTab = "activeTableTab"; Summary: Nested| -Field| +Field| Constr| Method Detail: -Field| +Field| Constr| Method @@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab"; @InterfaceAudience.Private -public class CollectionUtils +public class CollectionUtils extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object Utility methods for dealing with Collections, including treating null collections as empty. @@ -140,25 +140,6 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html - - - - - -Field Summary - -Fields - -Modifier and Type -Field and Description - - -private static https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">Listhttps://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true; title="class or interface in java.lang">Object -EMPTY_LIST - - - - @@ -213,54 +194,6 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html value already exists. - -static TT -getFirst(https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true; title="class or interface in java.util">CollectionTcollection) -first/last - - - -static TT -getLast(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">ListTlist) - - -static int -getLastIndex(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">List?list) - - -static Tboolean -isEmpty(https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true; title="class or interface in java.util">CollectionTcollection) -empty - - - -static boolean -isLastIndex(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true; title="class or interface in java.util">List?list, - intindex) - - -static Tboolean -notEmpty(https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true; title="class or interface in java.util">CollectionTcollection) - - -static Thttps://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true; title="class or interface in java.util">CollectionT -nullSafe(https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true; title="class or interface in java.util">CollectionTin) - - -static A,Bboolean -nullSafeSameSize(https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true; title="class or interface in java.util">CollectionAa, -https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true; title="class or interface in java.util">CollectionBb) - - -static Tint
[26/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html b/devapidocs/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html index ad9fa47..3d721e1 100644 --- a/devapidocs/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html +++ b/devapidocs/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html @@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab"; -static enum Bytes.LexicographicalComparerHolder.PureJavaComparer +static enum Bytes.LexicographicalComparerHolder.PureJavaComparer extends https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true; title="class or interface in java.lang">EnumBytes.LexicographicalComparerHolder.PureJavaComparer implements Bytes.Comparerbyte[] @@ -217,7 +217,7 @@ the order they are declared. INSTANCE -public static finalBytes.LexicographicalComparerHolder.PureJavaComparer INSTANCE +public static finalBytes.LexicographicalComparerHolder.PureJavaComparer INSTANCE @@ -234,7 +234,7 @@ the order they are declared. values -public staticBytes.LexicographicalComparerHolder.PureJavaComparer[]values() +public staticBytes.LexicographicalComparerHolder.PureJavaComparer[]values() Returns an array containing the constants of this enum type, in the order they are declared. This method may be used to iterate over the constants as follows: @@ -254,7 +254,7 @@ for (Bytes.LexicographicalComparerHolder.PureJavaComparer c : Bytes.Lexicographi valueOf -public staticBytes.LexicographicalComparerHolder.PureJavaComparervalueOf(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringname) +public staticBytes.LexicographicalComparerHolder.PureJavaComparervalueOf(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringname) Returns the enum constant of this type with the specified name. The string must match exactly an identifier used to declare an enum constant in this type. (Extraneous whitespace characters are @@ -276,7 +276,7 @@ not permitted.) compareTo -publicintcompareTo(byte[]buffer1, +publicintcompareTo(byte[]buffer1, intoffset1, intlength1, byte[]buffer2, http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html b/devapidocs/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html index b549946..909bc09 100644 --- a/devapidocs/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html +++ b/devapidocs/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html @@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab"; -static enum Bytes.LexicographicalComparerHolder.UnsafeComparer +static enum Bytes.LexicographicalComparerHolder.UnsafeComparer extends https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true; title="class or interface in java.lang">EnumBytes.LexicographicalComparerHolder.UnsafeComparer implements Bytes.Comparerbyte[] @@ -238,7 +238,7 @@ the order they are declared. INSTANCE -public static finalBytes.LexicographicalComparerHolder.UnsafeComparer INSTANCE +public static finalBytes.LexicographicalComparerHolder.UnsafeComparer INSTANCE @@ -255,7 +255,7 @@ the order they are declared. theUnsafe -static finalsun.misc.Unsafe theUnsafe +static finalsun.misc.Unsafe theUnsafe @@ -272,7 +272,7 @@ the order they are declared. values -public staticBytes.LexicographicalComparerHolder.UnsafeComparer[]values() +public staticBytes.LexicographicalComparerHolder.UnsafeComparer[]values() Returns an array containing the constants of this enum type, in the order they are declared. This method may be used to iterate over the constants as follows: @@ -292,7 +292,7 @@ for (Bytes.LexicographicalComparerHolder.UnsafeComparer c : Bytes.Lexicographica valueOf -public staticBytes.LexicographicalComparerHolder.UnsafeComparervalueOf(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringname) +public staticBytes.LexicographicalComparerHolder.UnsafeComparervalueOf(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
[13/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreScanner.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreScanner.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreScanner.html index 6e05959..4f63e3a 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreScanner.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreScanner.html @@ -35,1125 +35,1127 @@ 027import java.util.concurrent.CountDownLatch; 028import java.util.concurrent.locks.ReentrantLock; 029 -030import org.apache.hadoop.hbase.Cell; -031import org.apache.hadoop.hbase.CellComparator; -032import org.apache.hadoop.hbase.CellUtil; -033import org.apache.hadoop.hbase.DoNotRetryIOException; -034import org.apache.hadoop.hbase.HConstants; -035import org.apache.hadoop.hbase.PrivateCellUtil; -036import org.apache.hadoop.hbase.KeyValue; -037import org.apache.hadoop.hbase.KeyValueUtil; -038import org.apache.hadoop.hbase.client.IsolationLevel; -039import org.apache.hadoop.hbase.client.Scan; -040import org.apache.hadoop.hbase.executor.ExecutorService; -041import org.apache.hadoop.hbase.filter.Filter; -042import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope; -043import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState; -044import org.apache.hadoop.hbase.regionserver.handler.ParallelSeekHandler; -045import org.apache.hadoop.hbase.regionserver.querymatcher.CompactionScanQueryMatcher; -046import org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher; -047import org.apache.hadoop.hbase.regionserver.querymatcher.UserScanQueryMatcher; -048import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; -049import org.apache.hadoop.hbase.util.CollectionUtils; -050import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -051import org.apache.yetus.audience.InterfaceAudience; +030import org.apache.commons.collections.CollectionUtils; +031import org.apache.hadoop.hbase.Cell; +032import org.apache.hadoop.hbase.CellComparator; +033import org.apache.hadoop.hbase.CellUtil; +034import org.apache.hadoop.hbase.DoNotRetryIOException; +035import org.apache.hadoop.hbase.HConstants; +036import org.apache.hadoop.hbase.PrivateCellUtil; +037import org.apache.hadoop.hbase.KeyValue; +038import org.apache.hadoop.hbase.KeyValueUtil; +039import org.apache.hadoop.hbase.client.IsolationLevel; +040import org.apache.hadoop.hbase.client.Scan; +041import org.apache.hadoop.hbase.executor.ExecutorService; +042import org.apache.hadoop.hbase.filter.Filter; +043import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope; +044import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState; +045import org.apache.hadoop.hbase.regionserver.handler.ParallelSeekHandler; +046import org.apache.hadoop.hbase.regionserver.querymatcher.CompactionScanQueryMatcher; +047import org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher; +048import org.apache.hadoop.hbase.regionserver.querymatcher.UserScanQueryMatcher; +049import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; +050import org.apache.yetus.audience.InterfaceAudience; +051 052import org.slf4j.Logger; 053import org.slf4j.LoggerFactory; -054import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; -055 -056/** -057 * Scanner scans both the memstore and the Store. Coalesce KeyValue stream into Listlt;KeyValuegt; -058 * for a single row. -059 * p -060 * The implementation is not thread safe. So there will be no race between next and close. The only -061 * exception is updateReaders, it will be called in the memstore flush thread to indicate that there -062 * is a flush. -063 */ -064@InterfaceAudience.Private -065public class StoreScanner extends NonReversedNonLazyKeyValueScanner -066implements KeyValueScanner, InternalScanner, ChangedReadersObserver { -067 private static final Logger LOG = LoggerFactory.getLogger(StoreScanner.class); -068 // In unit tests, the store could be null -069 protected final HStore store; -070 private final CellComparator comparator; -071 private ScanQueryMatcher matcher; -072 protected KeyValueHeap heap; -073 private boolean cacheBlocks; -074 -075 private long countPerRow = 0; -076 private int storeLimit = -1; -077 private int storeOffset = 0; -078 -079 // Used to indicate that the scanner has closed (see HBASE-1107) -080 // Do not need to be volatile because it's always accessed via synchronized methods -081 private boolean closing = false; -082 private final boolean get; -083 private final boolean explicitColumnQuery; -084 private final boolean useRowColBloom; -085 /** -086 * A flag that enables StoreFileScanner parallel-seeking -087 */ -088 private boolean parallelSeekEnabled = false; -089 private
[23/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.CellScanner.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.CellScanner.html b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.CellScanner.html index 3165a6c..b6817d9 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.CellScanner.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.CellScanner.html @@ -71,728 +71,754 @@ 063 static final String TARGET_TABLE_CONF_KEY = "sync.table.target.table.name"; 064 static final String SOURCE_ZK_CLUSTER_CONF_KEY = "sync.table.source.zk.cluster"; 065 static final String TARGET_ZK_CLUSTER_CONF_KEY = "sync.table.target.zk.cluster"; -066 static final String DRY_RUN_CONF_KEY="sync.table.dry.run"; -067 -068 Path sourceHashDir; -069 String sourceTableName; -070 String targetTableName; -071 -072 String sourceZkCluster; -073 String targetZkCluster; -074 boolean dryRun; -075 -076 Counters counters; -077 -078 public SyncTable(Configuration conf) { -079super(conf); -080 } +066 static final String DRY_RUN_CONF_KEY = "sync.table.dry.run"; +067 static final String DO_DELETES_CONF_KEY = "sync.table.do.deletes"; +068 static final String DO_PUTS_CONF_KEY = "sync.table.do.puts"; +069 +070 Path sourceHashDir; +071 String sourceTableName; +072 String targetTableName; +073 +074 String sourceZkCluster; +075 String targetZkCluster; +076 boolean dryRun; +077 boolean doDeletes = true; +078 boolean doPuts = true; +079 +080 Counters counters; 081 -082 public Job createSubmittableJob(String[] args) throws IOException { -083FileSystem fs = sourceHashDir.getFileSystem(getConf()); -084if (!fs.exists(sourceHashDir)) { -085 throw new IOException("Source hash dir not found: " + sourceHashDir); -086} -087 -088HashTable.TableHash tableHash = HashTable.TableHash.read(getConf(), sourceHashDir); -089LOG.info("Read source hash manifest: " + tableHash); -090LOG.info("Read " + tableHash.partitions.size() + " partition keys"); -091if (!tableHash.tableName.equals(sourceTableName)) { -092 LOG.warn("Table name mismatch - manifest indicates hash was taken from: " -093 + tableHash.tableName + " but job is reading from: " + sourceTableName); -094} -095if (tableHash.numHashFiles != tableHash.partitions.size() + 1) { -096 throw new RuntimeException("Hash data appears corrupt. The number of of hash files created" -097 + " should be 1 more than the number of partition keys. However, the manifest file " -098 + " says numHashFiles=" + tableHash.numHashFiles + " but the number of partition keys" -099 + " found in the partitions file is " + tableHash.partitions.size()); -100} -101 -102Path dataDir = new Path(sourceHashDir, HashTable.HASH_DATA_DIR); -103int dataSubdirCount = 0; -104for (FileStatus file : fs.listStatus(dataDir)) { -105 if (file.getPath().getName().startsWith(HashTable.OUTPUT_DATA_FILE_PREFIX)) { -106dataSubdirCount++; -107 } -108} -109 -110if (dataSubdirCount != tableHash.numHashFiles) { -111 throw new RuntimeException("Hash data appears corrupt. The number of of hash files created" -112 + " should be 1 more than the number of partition keys. However, the number of data dirs" -113 + " found is " + dataSubdirCount + " but the number of partition keys" -114 + " found in the partitions file is " + tableHash.partitions.size()); -115} -116 -117Job job = Job.getInstance(getConf(),getConf().get("mapreduce.job.name", -118"syncTable_" + sourceTableName + "-" + targetTableName)); -119Configuration jobConf = job.getConfiguration(); -120job.setJarByClass(HashTable.class); -121jobConf.set(SOURCE_HASH_DIR_CONF_KEY, sourceHashDir.toString()); -122jobConf.set(SOURCE_TABLE_CONF_KEY, sourceTableName); -123jobConf.set(TARGET_TABLE_CONF_KEY, targetTableName); -124if (sourceZkCluster != null) { -125 jobConf.set(SOURCE_ZK_CLUSTER_CONF_KEY, sourceZkCluster); -126} -127if (targetZkCluster != null) { -128 jobConf.set(TARGET_ZK_CLUSTER_CONF_KEY, targetZkCluster); -129} -130jobConf.setBoolean(DRY_RUN_CONF_KEY, dryRun); -131 -132 TableMapReduceUtil.initTableMapperJob(targetTableName, tableHash.initScan(), -133SyncMapper.class, null, null, job); -134 -135job.setNumReduceTasks(0); -136 -137if (dryRun) { -138 job.setOutputFormatClass(NullOutputFormat.class); -139} else { -140 // No reducers. Just write straight to table. Call initTableReducerJob -141 // because it sets up the TableOutputFormat. -142
[10/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html index 8c0d57c..e606e82 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html @@ -46,2582 +46,2583 @@ 038import java.util.Iterator; 039import java.util.List; 040 -041import org.apache.hadoop.hbase.Cell; -042import org.apache.hadoop.hbase.CellComparator; -043import org.apache.hadoop.hbase.KeyValue; -044import org.apache.hadoop.io.RawComparator; -045import org.apache.hadoop.io.WritableComparator; -046import org.apache.hadoop.io.WritableUtils; -047import org.apache.yetus.audience.InterfaceAudience; -048import org.slf4j.Logger; -049import org.slf4j.LoggerFactory; -050 -051import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; -052 -053import com.google.protobuf.ByteString; -054 -055import sun.misc.Unsafe; -056 -057/** -058 * Utility class that handles byte arrays, conversions to/from other types, -059 * comparisons, hash code generation, manufacturing keys for HashMaps or -060 * HashSets, and can be used as key in maps or trees. -061 */ -062@SuppressWarnings("restriction") -063@InterfaceAudience.Public -064@edu.umd.cs.findbugs.annotations.SuppressWarnings( -065 value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS", -066justification="It has been like this forever") -067public class Bytes implements ComparableBytes { -068 -069 // Using the charset canonical name for String/byte[] conversions is much -070 // more efficient due to use of cached encoders/decoders. -071 private static final String UTF8_CSN = StandardCharsets.UTF_8.name(); -072 -073 //HConstants.EMPTY_BYTE_ARRAY should be updated if this changed -074 private static final byte [] EMPTY_BYTE_ARRAY = new byte [0]; -075 -076 private static final Logger LOG = LoggerFactory.getLogger(Bytes.class); -077 -078 /** -079 * Size of boolean in bytes -080 */ -081 public static final int SIZEOF_BOOLEAN = Byte.SIZE / Byte.SIZE; -082 -083 /** -084 * Size of byte in bytes -085 */ -086 public static final int SIZEOF_BYTE = SIZEOF_BOOLEAN; -087 -088 /** -089 * Size of char in bytes -090 */ -091 public static final int SIZEOF_CHAR = Character.SIZE / Byte.SIZE; -092 -093 /** -094 * Size of double in bytes -095 */ -096 public static final int SIZEOF_DOUBLE = Double.SIZE / Byte.SIZE; -097 -098 /** -099 * Size of float in bytes -100 */ -101 public static final int SIZEOF_FLOAT = Float.SIZE / Byte.SIZE; -102 -103 /** -104 * Size of int in bytes -105 */ -106 public static final int SIZEOF_INT = Integer.SIZE / Byte.SIZE; -107 -108 /** -109 * Size of long in bytes -110 */ -111 public static final int SIZEOF_LONG = Long.SIZE / Byte.SIZE; -112 -113 /** -114 * Size of short in bytes -115 */ -116 public static final int SIZEOF_SHORT = Short.SIZE / Byte.SIZE; -117 -118 /** -119 * Mask to apply to a long to reveal the lower int only. Use like this: -120 * int i = (int)(0xL ^ some_long_value); -121 */ -122 public static final long MASK_FOR_LOWER_INT_IN_LONG = 0xL; -123 -124 /** -125 * Estimate of size cost to pay beyond payload in jvm for instance of byte []. -126 * Estimate based on study of jhat and jprofiler numbers. -127 */ -128 // JHat says BU is 56 bytes. -129 // SizeOf which uses java.lang.instrument says 24 bytes. (3 longs?) -130 public static final int ESTIMATED_HEAP_TAX = 16; -131 -132 private static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned(); -133 -134 /** -135 * Returns length of the byte array, returning 0 if the array is null. -136 * Useful for calculating sizes. -137 * @param b byte array, which can be null -138 * @return 0 if b is null, otherwise returns length -139 */ -140 final public static int len(byte[] b) { -141return b == null ? 0 : b.length; -142 } -143 -144 private byte[] bytes; -145 private int offset; -146 private int length; -147 -148 /** -149 * Create a zero-size sequence. -150 */ -151 public Bytes() { -152super(); -153 } -154 -155 /** -156 * Create a Bytes using the byte array as the initial value. -157 * @param bytes This array becomes the backing storage for the object. -158 */ -159 public Bytes(byte[] bytes) { -160this(bytes, 0, bytes.length); -161 } -162 -163 /** -164 * Set the new Bytes to the contents of the passed -165 * codeibw/code. -166 * @param ibw the value to set this Bytes to. -167 */ -168 public Bytes(final Bytes ibw) { -169this(ibw.get(), ibw.getOffset(),
[30/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/checkstyle.rss -- diff --git a/checkstyle.rss b/checkstyle.rss index 3d9e14a..037ca70 100644 --- a/checkstyle.rss +++ b/checkstyle.rss @@ -26,7 +26,7 @@ under the License. 2007 - 2018 The Apache Software Foundation File: 3600, - Errors: 15913, + Errors: 15910, Warnings: 0, Infos: 0 @@ -5277,7 +5277,7 @@ under the License. 0 - 4 + 3 @@ -13663,7 +13663,7 @@ under the License. 0 - 24 + 23 @@ -47053,7 +47053,7 @@ under the License. 0 - 5 + 4 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/coc.html -- diff --git a/coc.html b/coc.html index d922ab3..00d2ebb 100644 --- a/coc.html +++ b/coc.html @@ -7,7 +7,7 @@ - + Apache HBase Code of Conduct Policy @@ -63,7 +63,7 @@ License -http://www.apache.org/dyn/closer.cgi/hbase/; title="Downloads">Downloads +http://www.apache.org/dyn/closer.lua/hbase/; title="Downloads">Downloads https://issues.apache.org/jira/browse/HBASE?report=com.atlassian.jira.plugin.system.project:changelog-panel#selectedTab=com.atlassian.jira.plugin.system.project%3Achangelog-panel; title="Release Notes">Release Notes @@ -365,7 +365,7 @@ email to mailto:priv...@hbase.apache.org;>the priv https://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2018-04-04 + Last Published: 2018-04-05 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/dependencies.html -- diff --git a/dependencies.html b/dependencies.html index dea81bf..4b0e9a5 100644 --- a/dependencies.html +++ b/dependencies.html @@ -7,7 +7,7 @@ - + Apache HBase Project Dependencies @@ -61,7 +61,7 @@ License -http://www.apache.org/dyn/closer.cgi/hbase/; title="Downloads">Downloads +http://www.apache.org/dyn/closer.lua/hbase/; title="Downloads">Downloads https://issues.apache.org/jira/browse/HBASE?report=com.atlassian.jira.plugin.system.project:changelog-panel#selectedTab=com.atlassian.jira.plugin.system.project%3Achangelog-panel; title="Release Notes">Release Notes @@ -430,7 +430,7 @@ https://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2018-04-04 + Last Published: 2018-04-05 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/dependency-convergence.html -- diff --git a/dependency-convergence.html b/dependency-convergence.html index 0c9c22d..1176b8e 100644 --- a/dependency-convergence.html +++ b/dependency-convergence.html @@ -7,7 +7,7 @@ - + Apache HBase Reactor Dependency Convergence @@ -61,7 +61,7 @@ License -http://www.apache.org/dyn/closer.cgi/hbase/; title="Downloads">Downloads +http://www.apache.org/dyn/closer.lua/hbase/; title="Downloads">Downloads https://issues.apache.org/jira/browse/HBASE?report=com.atlassian.jira.plugin.system.project:changelog-panel#selectedTab=com.atlassian.jira.plugin.system.project%3Achangelog-panel; title="Release Notes">Release Notes @@ -1095,7 +1095,7 @@ https://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2018-04-04 + Last Published: 2018-04-05 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/dependency-info.html
[01/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
Repository: hbase-site Updated Branches: refs/heads/asf-site a0fbd6a82 -> 6c67ddd76 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestSyncTable.html -- diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestSyncTable.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestSyncTable.html index f1774c3..f608e7e 100644 --- a/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestSyncTable.html +++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestSyncTable.html @@ -82,271 +82,505 @@ 074 075 @AfterClass 076 public static void afterClass() throws Exception { -077TEST_UTIL.shutdownMiniCluster(); -078 } -079 -080 private static byte[][] generateSplits(int numRows, int numRegions) { -081byte[][] splitRows = new byte[numRegions-1][]; -082for (int i = 1; i numRegions; i++) { -083 splitRows[i-1] = Bytes.toBytes(numRows * i / numRegions); -084} -085return splitRows; -086 } -087 -088 @Test -089 public void testSyncTable() throws Exception { -090final TableName sourceTableName = TableName.valueOf(name.getMethodName() + "_source"); -091final TableName targetTableName = TableName.valueOf(name.getMethodName() + "_target"); -092Path testDir = TEST_UTIL.getDataTestDirOnTestFS("testSyncTable"); -093 -094writeTestData(sourceTableName, targetTableName); -095hashSourceTable(sourceTableName, testDir); -096Counters syncCounters = syncTables(sourceTableName, targetTableName, testDir); -097assertEqualTables(90, sourceTableName, targetTableName); -098 -099assertEquals(60, syncCounters.findCounter(Counter.ROWSWITHDIFFS).getValue()); -100assertEquals(10, syncCounters.findCounter(Counter.SOURCEMISSINGROWS).getValue()); -101assertEquals(10, syncCounters.findCounter(Counter.TARGETMISSINGROWS).getValue()); -102assertEquals(50, syncCounters.findCounter(Counter.SOURCEMISSINGCELLS).getValue()); -103assertEquals(50, syncCounters.findCounter(Counter.TARGETMISSINGCELLS).getValue()); -104assertEquals(20, syncCounters.findCounter(Counter.DIFFERENTCELLVALUES).getValue()); -105 -106 TEST_UTIL.deleteTable(sourceTableName); -107 TEST_UTIL.deleteTable(targetTableName); -108 TEST_UTIL.cleanupDataTestDirOnTestFS(); +077 TEST_UTIL.cleanupDataTestDirOnTestFS(); +078TEST_UTIL.shutdownMiniCluster(); +079 } +080 +081 private static byte[][] generateSplits(int numRows, int numRegions) { +082byte[][] splitRows = new byte[numRegions-1][]; +083for (int i = 1; i numRegions; i++) { +084 splitRows[i-1] = Bytes.toBytes(numRows * i / numRegions); +085} +086return splitRows; +087 } +088 +089 @Test +090 public void testSyncTable() throws Exception { +091final TableName sourceTableName = TableName.valueOf(name.getMethodName() + "_source"); +092final TableName targetTableName = TableName.valueOf(name.getMethodName() + "_target"); +093Path testDir = TEST_UTIL.getDataTestDirOnTestFS("testSyncTable"); +094 +095writeTestData(sourceTableName, targetTableName); +096hashSourceTable(sourceTableName, testDir); +097Counters syncCounters = syncTables(sourceTableName, targetTableName, testDir); +098assertEqualTables(90, sourceTableName, targetTableName); +099 +100assertEquals(60, syncCounters.findCounter(Counter.ROWSWITHDIFFS).getValue()); +101assertEquals(10, syncCounters.findCounter(Counter.SOURCEMISSINGROWS).getValue()); +102assertEquals(10, syncCounters.findCounter(Counter.TARGETMISSINGROWS).getValue()); +103assertEquals(50, syncCounters.findCounter(Counter.SOURCEMISSINGCELLS).getValue()); +104assertEquals(50, syncCounters.findCounter(Counter.TARGETMISSINGCELLS).getValue()); +105assertEquals(20, syncCounters.findCounter(Counter.DIFFERENTCELLVALUES).getValue()); +106 +107 TEST_UTIL.deleteTable(sourceTableName); +108 TEST_UTIL.deleteTable(targetTableName); 109 } 110 -111 private void assertEqualTables(int expectedRows, TableName sourceTableName, -112 TableName targetTableName) throws Exception { -113Table sourceTable = TEST_UTIL.getConnection().getTable(sourceTableName); -114Table targetTable = TEST_UTIL.getConnection().getTable(targetTableName); -115 -116ResultScanner sourceScanner = sourceTable.getScanner(new Scan()); -117ResultScanner targetScanner = targetTable.getScanner(new Scan()); -118 -119for (int i = 0; i expectedRows; i++) { -120 Result sourceRow = sourceScanner.next(); -121 Result targetRow = targetScanner.next(); +111 @Test +112 public void testSyncTableDoDeletesFalse() throws Exception { +113final TableName sourceTableName = TableName.valueOf(name.getMethodName() + "_source"); +114final TableName targetTableName = TableName.valueOf(name.getMethodName() + "_target"); +115
[07/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html index 8c0d57c..e606e82 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html @@ -46,2582 +46,2583 @@ 038import java.util.Iterator; 039import java.util.List; 040 -041import org.apache.hadoop.hbase.Cell; -042import org.apache.hadoop.hbase.CellComparator; -043import org.apache.hadoop.hbase.KeyValue; -044import org.apache.hadoop.io.RawComparator; -045import org.apache.hadoop.io.WritableComparator; -046import org.apache.hadoop.io.WritableUtils; -047import org.apache.yetus.audience.InterfaceAudience; -048import org.slf4j.Logger; -049import org.slf4j.LoggerFactory; -050 -051import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; -052 -053import com.google.protobuf.ByteString; -054 -055import sun.misc.Unsafe; -056 -057/** -058 * Utility class that handles byte arrays, conversions to/from other types, -059 * comparisons, hash code generation, manufacturing keys for HashMaps or -060 * HashSets, and can be used as key in maps or trees. -061 */ -062@SuppressWarnings("restriction") -063@InterfaceAudience.Public -064@edu.umd.cs.findbugs.annotations.SuppressWarnings( -065 value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS", -066justification="It has been like this forever") -067public class Bytes implements ComparableBytes { -068 -069 // Using the charset canonical name for String/byte[] conversions is much -070 // more efficient due to use of cached encoders/decoders. -071 private static final String UTF8_CSN = StandardCharsets.UTF_8.name(); -072 -073 //HConstants.EMPTY_BYTE_ARRAY should be updated if this changed -074 private static final byte [] EMPTY_BYTE_ARRAY = new byte [0]; -075 -076 private static final Logger LOG = LoggerFactory.getLogger(Bytes.class); -077 -078 /** -079 * Size of boolean in bytes -080 */ -081 public static final int SIZEOF_BOOLEAN = Byte.SIZE / Byte.SIZE; -082 -083 /** -084 * Size of byte in bytes -085 */ -086 public static final int SIZEOF_BYTE = SIZEOF_BOOLEAN; -087 -088 /** -089 * Size of char in bytes -090 */ -091 public static final int SIZEOF_CHAR = Character.SIZE / Byte.SIZE; -092 -093 /** -094 * Size of double in bytes -095 */ -096 public static final int SIZEOF_DOUBLE = Double.SIZE / Byte.SIZE; -097 -098 /** -099 * Size of float in bytes -100 */ -101 public static final int SIZEOF_FLOAT = Float.SIZE / Byte.SIZE; -102 -103 /** -104 * Size of int in bytes -105 */ -106 public static final int SIZEOF_INT = Integer.SIZE / Byte.SIZE; -107 -108 /** -109 * Size of long in bytes -110 */ -111 public static final int SIZEOF_LONG = Long.SIZE / Byte.SIZE; -112 -113 /** -114 * Size of short in bytes -115 */ -116 public static final int SIZEOF_SHORT = Short.SIZE / Byte.SIZE; -117 -118 /** -119 * Mask to apply to a long to reveal the lower int only. Use like this: -120 * int i = (int)(0xL ^ some_long_value); -121 */ -122 public static final long MASK_FOR_LOWER_INT_IN_LONG = 0xL; -123 -124 /** -125 * Estimate of size cost to pay beyond payload in jvm for instance of byte []. -126 * Estimate based on study of jhat and jprofiler numbers. -127 */ -128 // JHat says BU is 56 bytes. -129 // SizeOf which uses java.lang.instrument says 24 bytes. (3 longs?) -130 public static final int ESTIMATED_HEAP_TAX = 16; -131 -132 private static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned(); -133 -134 /** -135 * Returns length of the byte array, returning 0 if the array is null. -136 * Useful for calculating sizes. -137 * @param b byte array, which can be null -138 * @return 0 if b is null, otherwise returns length -139 */ -140 final public static int len(byte[] b) { -141return b == null ? 0 : b.length; -142 } -143 -144 private byte[] bytes; -145 private int offset; -146 private int length; -147 -148 /** -149 * Create a zero-size sequence. -150 */ -151 public Bytes() { -152super(); -153 } -154 -155 /** -156 * Create a Bytes using the byte array as the initial value. -157 * @param bytes This array becomes the backing storage for the object. -158 */ -159 public Bytes(byte[] bytes) { -160this(bytes, 0, bytes.length); -161 } -162 -163 /** -164 * Set the new Bytes to the contents of the passed -165 * codeibw/code. -166 * @param ibw the value
[17/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.PrepareFlushResult.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.PrepareFlushResult.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.PrepareFlushResult.html index 5b7c419..7137829 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.PrepareFlushResult.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.PrepareFlushResult.html @@ -78,93 +78,93 @@ 070import java.util.concurrent.locks.ReadWriteLock; 071import java.util.concurrent.locks.ReentrantReadWriteLock; 072import java.util.function.Function; -073import org.apache.hadoop.conf.Configuration; -074import org.apache.hadoop.fs.FileStatus; -075import org.apache.hadoop.fs.FileSystem; -076import org.apache.hadoop.fs.LocatedFileStatus; -077import org.apache.hadoop.fs.Path; -078import org.apache.hadoop.hbase.Cell; -079import org.apache.hadoop.hbase.CellBuilderType; -080import org.apache.hadoop.hbase.CellComparator; -081import org.apache.hadoop.hbase.CellComparatorImpl; -082import org.apache.hadoop.hbase.CellScanner; -083import org.apache.hadoop.hbase.CellUtil; -084import org.apache.hadoop.hbase.CompareOperator; -085import org.apache.hadoop.hbase.CompoundConfiguration; -086import org.apache.hadoop.hbase.DoNotRetryIOException; -087import org.apache.hadoop.hbase.DroppedSnapshotException; -088import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; -089import org.apache.hadoop.hbase.HConstants; -090import org.apache.hadoop.hbase.HConstants.OperationStatusCode; -091import org.apache.hadoop.hbase.HDFSBlocksDistribution; -092import org.apache.hadoop.hbase.KeyValue; -093import org.apache.hadoop.hbase.KeyValueUtil; -094import org.apache.hadoop.hbase.NamespaceDescriptor; -095import org.apache.hadoop.hbase.NotServingRegionException; -096import org.apache.hadoop.hbase.PrivateCellUtil; -097import org.apache.hadoop.hbase.RegionTooBusyException; -098import org.apache.hadoop.hbase.TableName; -099import org.apache.hadoop.hbase.Tag; -100import org.apache.hadoop.hbase.TagUtil; -101import org.apache.hadoop.hbase.UnknownScannerException; -102import org.apache.hadoop.hbase.client.Append; -103import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; -104import org.apache.hadoop.hbase.client.CompactionState; -105import org.apache.hadoop.hbase.client.Delete; -106import org.apache.hadoop.hbase.client.Durability; -107import org.apache.hadoop.hbase.client.Get; -108import org.apache.hadoop.hbase.client.Increment; -109import org.apache.hadoop.hbase.client.IsolationLevel; -110import org.apache.hadoop.hbase.client.Mutation; -111import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor; -112import org.apache.hadoop.hbase.client.Put; -113import org.apache.hadoop.hbase.client.RegionInfo; -114import org.apache.hadoop.hbase.client.RegionInfoBuilder; -115import org.apache.hadoop.hbase.client.RegionReplicaUtil; -116import org.apache.hadoop.hbase.client.Result; -117import org.apache.hadoop.hbase.client.RowMutations; -118import org.apache.hadoop.hbase.client.Scan; -119import org.apache.hadoop.hbase.client.TableDescriptor; -120import org.apache.hadoop.hbase.client.TableDescriptorBuilder; -121import org.apache.hadoop.hbase.conf.ConfigurationManager; -122import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver; -123import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType; -124import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare; -125import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException; -126import org.apache.hadoop.hbase.exceptions.TimeoutIOException; -127import org.apache.hadoop.hbase.exceptions.UnknownProtocolException; -128import org.apache.hadoop.hbase.filter.ByteArrayComparable; -129import org.apache.hadoop.hbase.filter.FilterWrapper; -130import org.apache.hadoop.hbase.filter.IncompatibleFilterException; -131import org.apache.hadoop.hbase.io.HFileLink; -132import org.apache.hadoop.hbase.io.HeapSize; -133import org.apache.hadoop.hbase.io.TimeRange; -134import org.apache.hadoop.hbase.io.hfile.HFile; -135import org.apache.hadoop.hbase.ipc.CallerDisconnectedException; -136import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; -137import org.apache.hadoop.hbase.ipc.RpcCall; -138import org.apache.hadoop.hbase.ipc.RpcServer; -139import org.apache.hadoop.hbase.monitoring.MonitoredTask; -140import org.apache.hadoop.hbase.monitoring.TaskMonitor; -141import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager; -142import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry; -143import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope; -144import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState; -145import
[11/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/src-html/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html b/devapidocs/src-html/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html index 4a4853f..c90e6fa 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html @@ -26,18 +26,18 @@ 018package org.apache.hadoop.hbase.replication; 019 020import static java.util.stream.Collectors.toList; -021import static org.apache.hadoop.hbase.util.CollectionUtils.nullToEmpty; -022 -023import java.util.ArrayList; -024import java.util.Collections; -025import java.util.HashSet; -026import java.util.List; -027import java.util.Map; -028import java.util.Map.Entry; -029import java.util.Set; -030import java.util.SortedSet; -031import java.util.TreeSet; -032import java.util.stream.Collectors; +021 +022import java.util.ArrayList; +023import java.util.Collections; +024import java.util.HashSet; +025import java.util.List; +026import java.util.Map; +027import java.util.Map.Entry; +028import java.util.Set; +029import java.util.SortedSet; +030import java.util.TreeSet; +031import java.util.stream.Collectors; +032import org.apache.commons.collections.CollectionUtils; 033import org.apache.hadoop.conf.Configuration; 034import org.apache.hadoop.fs.Path; 035import org.apache.hadoop.hbase.HConstants; @@ -45,636 +45,644 @@ 037import org.apache.hadoop.hbase.client.RegionInfo; 038import org.apache.hadoop.hbase.exceptions.DeserializationException; 039import org.apache.hadoop.hbase.util.Bytes; -040import org.apache.hadoop.hbase.util.CollectionUtils; -041import org.apache.hadoop.hbase.util.Pair; -042import org.apache.hadoop.hbase.zookeeper.ZKUtil; -043import org.apache.hadoop.hbase.zookeeper.ZKUtil.ZKUtilOp; -044import org.apache.hadoop.hbase.zookeeper.ZKWatcher; -045import org.apache.hadoop.hbase.zookeeper.ZNodePaths; -046import org.apache.yetus.audience.InterfaceAudience; -047import org.apache.zookeeper.KeeperException; -048import org.apache.zookeeper.KeeperException.BadVersionException; -049import org.apache.zookeeper.KeeperException.NoNodeException; -050import org.apache.zookeeper.KeeperException.NodeExistsException; -051import org.apache.zookeeper.KeeperException.NotEmptyException; -052import org.apache.zookeeper.data.Stat; -053import org.slf4j.Logger; -054import org.slf4j.LoggerFactory; -055 -056import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; -057 -058/** -059 * ZK based replication queue storage. -060 * p -061 * The base znode for each regionserver is the regionserver name. For example: -062 * -063 * pre -064 * /hbase/replication/rs/hostname.example.org,6020,1234 -065 * /pre -066 * -067 * Within this znode, the region server maintains a set of WAL replication queues. These queues are -068 * represented by child znodes named using there give queue id. For example: -069 * -070 * pre -071 * /hbase/replication/rs/hostname.example.org,6020,1234/1 -072 * /hbase/replication/rs/hostname.example.org,6020,1234/2 -073 * /pre -074 * -075 * Each queue has one child znode for every WAL that still needs to be replicated. The value of -076 * these WAL child znodes is the latest position that has been replicated. This position is updated -077 * every time a WAL entry is replicated. For example: -078 * -079 * pre -080 * /hbase/replication/rs/hostname.example.org,6020,1234/1/23522342.23422 [VALUE: 254] -081 * /pre -082 */ -083@InterfaceAudience.Private -084class ZKReplicationQueueStorage extends ZKReplicationStorageBase -085implements ReplicationQueueStorage { -086 -087 private static final Logger LOG = LoggerFactory.getLogger(ZKReplicationQueueStorage.class); -088 -089 public static final String ZOOKEEPER_ZNODE_REPLICATION_HFILE_REFS_KEY = -090 "zookeeper.znode.replication.hfile.refs"; -091 public static final String ZOOKEEPER_ZNODE_REPLICATION_HFILE_REFS_DEFAULT = "hfile-refs"; -092 -093 public static final String ZOOKEEPER_ZNODE_REPLICATION_REGIONS_KEY = -094 "zookeeper.znode.replication.regions"; -095 public static final String ZOOKEEPER_ZNODE_REPLICATION_REGIONS_DEFAULT = "regions"; -096 -097 /** -098 * The name of the znode that contains all replication queues -099 */ -100 private final String queuesZNode; -101 -102 /** -103 * The name of the znode that contains queues of hfile references to be replicated -104 */ -105 private final String hfileRefsZNode; -106 -107 @VisibleForTesting -108 final String regionsZNode; -109 -110 public ZKReplicationQueueStorage(ZKWatcher zookeeper, Configuration conf) { -111super(zookeeper, conf); -112 -113String queuesZNodeName =
[14/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.ScannerListener.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.ScannerListener.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.ScannerListener.html index ebbde54..7d1dba6 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.ScannerListener.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.ScannerListener.html @@ -50,90 +50,90 @@ 042import java.util.concurrent.atomic.AtomicBoolean; 043import java.util.concurrent.atomic.AtomicLong; 044import java.util.concurrent.atomic.LongAdder; -045import org.apache.commons.lang3.mutable.MutableObject; -046import org.apache.hadoop.conf.Configuration; -047import org.apache.hadoop.fs.Path; -048import org.apache.hadoop.hbase.ByteBufferExtendedCell; -049import org.apache.hadoop.hbase.CacheEvictionStats; -050import org.apache.hadoop.hbase.CacheEvictionStatsBuilder; -051import org.apache.hadoop.hbase.Cell; -052import org.apache.hadoop.hbase.CellScannable; -053import org.apache.hadoop.hbase.CellScanner; -054import org.apache.hadoop.hbase.CellUtil; -055import org.apache.hadoop.hbase.CompareOperator; -056import org.apache.hadoop.hbase.DoNotRetryIOException; -057import org.apache.hadoop.hbase.DroppedSnapshotException; -058import org.apache.hadoop.hbase.HBaseIOException; -059import org.apache.hadoop.hbase.HConstants; -060import org.apache.hadoop.hbase.MultiActionResultTooLarge; -061import org.apache.hadoop.hbase.NotServingRegionException; -062import org.apache.hadoop.hbase.PrivateCellUtil; -063import org.apache.hadoop.hbase.RegionTooBusyException; -064import org.apache.hadoop.hbase.Server; -065import org.apache.hadoop.hbase.ServerName; -066import org.apache.hadoop.hbase.TableName; -067import org.apache.hadoop.hbase.UnknownScannerException; -068import org.apache.hadoop.hbase.client.Append; -069import org.apache.hadoop.hbase.client.ConnectionUtils; -070import org.apache.hadoop.hbase.client.Delete; -071import org.apache.hadoop.hbase.client.Durability; -072import org.apache.hadoop.hbase.client.Get; -073import org.apache.hadoop.hbase.client.Increment; -074import org.apache.hadoop.hbase.client.Mutation; -075import org.apache.hadoop.hbase.client.Put; -076import org.apache.hadoop.hbase.client.RegionInfo; -077import org.apache.hadoop.hbase.client.RegionReplicaUtil; -078import org.apache.hadoop.hbase.client.Result; -079import org.apache.hadoop.hbase.client.Row; -080import org.apache.hadoop.hbase.client.RowMutations; -081import org.apache.hadoop.hbase.client.Scan; -082import org.apache.hadoop.hbase.client.TableDescriptor; -083import org.apache.hadoop.hbase.client.VersionInfoUtil; -084import org.apache.hadoop.hbase.conf.ConfigurationObserver; -085import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException; -086import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException; -087import org.apache.hadoop.hbase.exceptions.ScannerResetException; -088import org.apache.hadoop.hbase.exceptions.UnknownProtocolException; -089import org.apache.hadoop.hbase.filter.ByteArrayComparable; -090import org.apache.hadoop.hbase.io.TimeRange; -091import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler; -092import org.apache.hadoop.hbase.ipc.HBaseRpcController; -093import org.apache.hadoop.hbase.ipc.PriorityFunction; -094import org.apache.hadoop.hbase.ipc.QosPriority; -095import org.apache.hadoop.hbase.ipc.RpcCallContext; -096import org.apache.hadoop.hbase.ipc.RpcCallback; -097import org.apache.hadoop.hbase.ipc.RpcServer; -098import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface; -099import org.apache.hadoop.hbase.ipc.RpcServerFactory; -100import org.apache.hadoop.hbase.ipc.RpcServerInterface; -101import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; -102import org.apache.hadoop.hbase.ipc.ServerRpcController; -103import org.apache.hadoop.hbase.log.HBaseMarkers; -104import org.apache.hadoop.hbase.master.MasterRpcServices; -105import org.apache.hadoop.hbase.net.Address; -106import org.apache.hadoop.hbase.procedure2.RSProcedureCallable; -107import org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement; -108import org.apache.hadoop.hbase.quotas.OperationQuota; -109import org.apache.hadoop.hbase.quotas.QuotaUtil; -110import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager; -111import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager; -112import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot; -113import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement; -114import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl; -115import org.apache.hadoop.hbase.regionserver.Leases.Lease; -116import
[12/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.html index e1fe473..a9224f0 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.html @@ -33,14 +33,14 @@ 025import java.util.Set; 026import java.util.TreeSet; 027 -028import org.apache.hadoop.hbase.Cell; -029import org.apache.hadoop.hbase.CellComparator; -030import org.apache.hadoop.hbase.CellUtil; -031import org.apache.hadoop.hbase.PrivateCellUtil; -032import org.apache.hadoop.hbase.client.RegionInfo; -033import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl; -034import org.apache.hadoop.hbase.util.Bytes; -035import org.apache.hadoop.hbase.util.CollectionUtils; +028import org.apache.commons.collections.CollectionUtils; +029import org.apache.hadoop.hbase.Cell; +030import org.apache.hadoop.hbase.CellComparator; +031import org.apache.hadoop.hbase.CellUtil; +032import org.apache.hadoop.hbase.PrivateCellUtil; +033import org.apache.hadoop.hbase.client.RegionInfo; +034import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl; +035import org.apache.hadoop.hbase.util.Bytes; 036import org.apache.hadoop.hbase.wal.WAL.Entry; 037import org.apache.hadoop.hbase.wal.WALEdit; 038import org.apache.hadoop.hbase.wal.WALKeyImpl; http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/src-html/org/apache/hadoop/hbase/replication/ZKReplicationPeerStorage.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/replication/ZKReplicationPeerStorage.html b/devapidocs/src-html/org/apache/hadoop/hbase/replication/ZKReplicationPeerStorage.html index 724dad5..0245396 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/replication/ZKReplicationPeerStorage.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/replication/ZKReplicationPeerStorage.html @@ -26,11 +26,11 @@ 018package org.apache.hadoop.hbase.replication; 019 020import java.util.Arrays; -021import java.util.List; -022import org.apache.hadoop.conf.Configuration; -023import org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil; -024import org.apache.hadoop.hbase.exceptions.DeserializationException; -025import org.apache.hadoop.hbase.util.CollectionUtils; +021import java.util.Collections; +022import java.util.List; +023import org.apache.hadoop.conf.Configuration; +024import org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil; +025import org.apache.hadoop.hbase.exceptions.DeserializationException; 026import org.apache.hadoop.hbase.zookeeper.ZKUtil; 027import org.apache.hadoop.hbase.zookeeper.ZKUtil.ZKUtilOp; 028import org.apache.hadoop.hbase.zookeeper.ZKWatcher; @@ -139,42 +139,43 @@ 131 @Override 132 public ListString listPeerIds() throws ReplicationException { 133try { -134 return CollectionUtils.nullToEmpty(ZKUtil.listChildrenNoWatch(zookeeper, peersZNode)); -135} catch (KeeperException e) { -136 throw new ReplicationException("Cannot get the list of peers", e); -137} -138 } -139 -140 @Override -141 public boolean isPeerEnabled(String peerId) throws ReplicationException { -142try { -143 return Arrays.equals(ENABLED_ZNODE_BYTES, -144ZKUtil.getData(zookeeper, getPeerStateNode(peerId))); -145} catch (KeeperException | InterruptedException e) { -146 throw new ReplicationException("Unable to get status of the peer with id=" + peerId, e); -147} -148 } -149 -150 @Override -151 public ReplicationPeerConfig getPeerConfig(String peerId) throws ReplicationException { -152byte[] data; -153try { -154 data = ZKUtil.getData(zookeeper, getPeerNode(peerId)); -155} catch (KeeperException | InterruptedException e) { -156 throw new ReplicationException("Error getting configuration for peer with id=" + peerId, e); -157} -158if (data == null || data.length == 0) { -159 throw new ReplicationException( -160 "Replication peer config data shouldn't be empty, peerId=" + peerId); -161} -162try { -163 return ReplicationPeerConfigUtil.parsePeerFrom(data); -164} catch (DeserializationException e) { -165 throw new ReplicationException( -166 "Failed to parse replication peer config for peer with id=" + peerId, e); -167} -168 } -169} +134 ListString children = ZKUtil.listChildrenNoWatch(zookeeper, peersZNode); +135 return children != null ? children : Collections.emptyList(); +136} catch (KeeperException e) { +137 throw new
[05/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html index 8c0d57c..e606e82 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html @@ -46,2582 +46,2583 @@ 038import java.util.Iterator; 039import java.util.List; 040 -041import org.apache.hadoop.hbase.Cell; -042import org.apache.hadoop.hbase.CellComparator; -043import org.apache.hadoop.hbase.KeyValue; -044import org.apache.hadoop.io.RawComparator; -045import org.apache.hadoop.io.WritableComparator; -046import org.apache.hadoop.io.WritableUtils; -047import org.apache.yetus.audience.InterfaceAudience; -048import org.slf4j.Logger; -049import org.slf4j.LoggerFactory; -050 -051import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; -052 -053import com.google.protobuf.ByteString; -054 -055import sun.misc.Unsafe; -056 -057/** -058 * Utility class that handles byte arrays, conversions to/from other types, -059 * comparisons, hash code generation, manufacturing keys for HashMaps or -060 * HashSets, and can be used as key in maps or trees. -061 */ -062@SuppressWarnings("restriction") -063@InterfaceAudience.Public -064@edu.umd.cs.findbugs.annotations.SuppressWarnings( -065 value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS", -066justification="It has been like this forever") -067public class Bytes implements ComparableBytes { -068 -069 // Using the charset canonical name for String/byte[] conversions is much -070 // more efficient due to use of cached encoders/decoders. -071 private static final String UTF8_CSN = StandardCharsets.UTF_8.name(); -072 -073 //HConstants.EMPTY_BYTE_ARRAY should be updated if this changed -074 private static final byte [] EMPTY_BYTE_ARRAY = new byte [0]; -075 -076 private static final Logger LOG = LoggerFactory.getLogger(Bytes.class); -077 -078 /** -079 * Size of boolean in bytes -080 */ -081 public static final int SIZEOF_BOOLEAN = Byte.SIZE / Byte.SIZE; -082 -083 /** -084 * Size of byte in bytes -085 */ -086 public static final int SIZEOF_BYTE = SIZEOF_BOOLEAN; -087 -088 /** -089 * Size of char in bytes -090 */ -091 public static final int SIZEOF_CHAR = Character.SIZE / Byte.SIZE; -092 -093 /** -094 * Size of double in bytes -095 */ -096 public static final int SIZEOF_DOUBLE = Double.SIZE / Byte.SIZE; -097 -098 /** -099 * Size of float in bytes -100 */ -101 public static final int SIZEOF_FLOAT = Float.SIZE / Byte.SIZE; -102 -103 /** -104 * Size of int in bytes -105 */ -106 public static final int SIZEOF_INT = Integer.SIZE / Byte.SIZE; -107 -108 /** -109 * Size of long in bytes -110 */ -111 public static final int SIZEOF_LONG = Long.SIZE / Byte.SIZE; -112 -113 /** -114 * Size of short in bytes -115 */ -116 public static final int SIZEOF_SHORT = Short.SIZE / Byte.SIZE; -117 -118 /** -119 * Mask to apply to a long to reveal the lower int only. Use like this: -120 * int i = (int)(0xL ^ some_long_value); -121 */ -122 public static final long MASK_FOR_LOWER_INT_IN_LONG = 0xL; -123 -124 /** -125 * Estimate of size cost to pay beyond payload in jvm for instance of byte []. -126 * Estimate based on study of jhat and jprofiler numbers. -127 */ -128 // JHat says BU is 56 bytes. -129 // SizeOf which uses java.lang.instrument says 24 bytes. (3 longs?) -130 public static final int ESTIMATED_HEAP_TAX = 16; -131 -132 private static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned(); -133 -134 /** -135 * Returns length of the byte array, returning 0 if the array is null. -136 * Useful for calculating sizes. -137 * @param b byte array, which can be null -138 * @return 0 if b is null, otherwise returns length -139 */ -140 final public static int len(byte[] b) { -141return b == null ? 0 : b.length; -142 } -143 -144 private byte[] bytes; -145 private int offset; -146 private int length; -147 -148 /** -149 * Create a zero-size sequence. -150 */ -151 public Bytes() { -152super(); -153 } -154 -155 /** -156 * Create a Bytes using the byte array as the initial value. -157 * @param bytes This array becomes the backing storage for the object. -158 */ -159 public Bytes(byte[] bytes) { -160this(bytes, 0, bytes.length); -161 } -162 -163 /** -164 * Set the new Bytes to the contents of the passed -165 * codeibw/code. -166 * @param ibw the value to set this Bytes to. -167 */ -168 public Bytes(final Bytes ibw) { -169this(ibw.get(), ibw.getOffset(),
[03/40] hbase-site git commit: Published site at e2b0490d18f7cc03aa59475a1b423597ddc481fb.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6c67ddd7/devapidocs/src-html/org/apache/hadoop/hbase/util/CollectionUtils.IOExceptionSupplier.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/CollectionUtils.IOExceptionSupplier.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/CollectionUtils.IOExceptionSupplier.html index ef475c0..aa0d77d 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/util/CollectionUtils.IOExceptionSupplier.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/CollectionUtils.IOExceptionSupplier.html @@ -27,144 +27,64 @@ 019package org.apache.hadoop.hbase.util; 020 021import java.io.IOException; -022import java.util.ArrayList; -023import java.util.Collection; -024import java.util.Collections; -025import java.util.List; -026import java.util.concurrent.ConcurrentMap; -027import java.util.function.Supplier; -028 -029import org.apache.yetus.audience.InterfaceAudience; -030 -031/** -032 * Utility methods for dealing with Collections, including treating null collections as empty. -033 */ -034@InterfaceAudience.Private -035public class CollectionUtils { -036 -037 private static final ListObject EMPTY_LIST = Collections.unmodifiableList(new ArrayList(0)); -038 -039 -040 @SuppressWarnings("unchecked") -041 public static T CollectionT nullSafe(CollectionT in) { -042if (in == null) { -043 return (CollectionT)EMPTY_LIST; -044} -045return in; -046 } -047 -048 / size / -049 -050 public static T int nullSafeSize(CollectionT collection) { -051if (collection == null) { -052 return 0; -053} -054return collection.size(); -055 } -056 -057 public static A, B boolean nullSafeSameSize(CollectionA a, CollectionB b) { -058return nullSafeSize(a) == nullSafeSize(b); -059 } -060 -061 /*** empty / -062 -063 public static T boolean isEmpty(CollectionT collection) { -064return collection == null || collection.isEmpty(); -065 } -066 -067 public static T boolean notEmpty(CollectionT collection) { -068return !isEmpty(collection); -069 } -070 -071 / first/last **/ -072 -073 public static T T getFirst(CollectionT collection) { -074if (CollectionUtils.isEmpty(collection)) { -075 return null; -076} -077for (T t : collection) { -078 return t; -079} -080return null; -081 } -082 -083 /** -084 * @param list any list -085 * @return -1 if list is empty, otherwise the max index -086 */ -087 public static int getLastIndex(List? list){ -088if(isEmpty(list)){ -089 return -1; -090} -091return list.size() - 1; -092 } -093 -094 /** -095 * @param list -096 * @param index the index in question -097 * @return true if it is the last index or if list is empty and -1 is passed for the index param -098 */ -099 public static boolean isLastIndex(List? list, int index){ -100return index == getLastIndex(list); -101 } -102 -103 public static T T getLast(ListT list) { -104if (isEmpty(list)) { -105 return null; -106} -107return list.get(list.size() - 1); -108 } -109 -110 public static T ListT nullToEmpty(ListT list) { -111return list != null ? list : Collections.emptyList(); -112 } -113 /** -114 * In HBASE-16648 we found that ConcurrentHashMap.get is much faster than computeIfAbsent if the -115 * value already exists. Notice that the implementation does not guarantee that the supplier will -116 * only be executed once. -117 */ -118 public static K, V V computeIfAbsent(ConcurrentMapK, V map, K key, SupplierV supplier) { -119return computeIfAbsent(map, key, supplier, () - { -120}); -121 } -122 -123 /** -124 * A supplier that throws IOException when get. -125 */ -126 @FunctionalInterface -127 public interface IOExceptionSupplierV { -128V get() throws IOException; -129 } -130 -131 /** -132 * In HBASE-16648 we found that ConcurrentHashMap.get is much faster than computeIfAbsent if the -133 * value already exists. So here we copy the implementation of -134 * {@link ConcurrentMap#computeIfAbsent(Object, java.util.function.Function)}. It uses get and -135 * putIfAbsent to implement computeIfAbsent. And notice that the implementation does not guarantee -136 * that the supplier will only be executed once. -137 */ -138 public static K, V V computeIfAbsentEx(ConcurrentMapK, V map, K key, -139 IOExceptionSupplierV supplier) throws IOException { -140V v, newValue; -141return ((v = map.get(key)) == null (newValue = supplier.get()) != null -142 (v = map.putIfAbsent(key, newValue)) == null) ? newValue : v; -143 } -144 -145 public static K, V V computeIfAbsent(ConcurrentMapK, V map, K key, SupplierV