This is an automated email from the ASF dual-hosted git repository.
cconnell pushed a commit to branch branch-2
in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/branch-2 by this push:
new 889c3497318 HBASE-29729 add table descriptor hash metric (#7556)
889c3497318 is described below
commit 889c34973184f90c951c831f670e299f9924f035
Author: Alex Hughes <[email protected]>
AuthorDate: Tue Dec 16 16:54:23 2025 +0100
HBASE-29729 add table descriptor hash metric (#7556)
Co-authored-by: Alex Hughes <[email protected]>
Signed-off by: Ray Mattingly <[email protected]>
---
.../hadoop/hbase/client/TableDescriptor.java | 25 ++++
.../hbase/client/TableDescriptorBuilder.java | 14 +-
.../hbase/regionserver/MetricsRegionSource.java | 2 +
.../hbase/regionserver/MetricsRegionWrapper.java | 8 ++
.../regionserver/MetricsRegionSourceImpl.java | 7 +
.../regionserver/TestMetricsRegionSourceImpl.java | 6 +-
.../regionserver/MetricsRegionWrapperImpl.java | 15 ++
.../regionserver/MetricsRegionWrapperStub.java | 5 +
...estMetricsRegionWrapperTableDescriptorHash.java | 155 +++++++++++++++++++++
.../regionserver/TestRegionServerMetrics.java | 19 ++-
.../TestTableDescriptorHashComputation.java | 127 +++++++++++++++++
11 files changed, 380 insertions(+), 3 deletions(-)
diff --git
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
index c017387d67f..c438b4bdad9 100644
---
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
+++
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hbase.client;
+import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
@@ -26,10 +27,15 @@ import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
+import java.util.zip.CRC32;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
/**
* TableDescriptor contains the details about an HBase table such as the
descriptors of all the
@@ -337,4 +343,23 @@ public interface TableDescriptor {
* {@link org.apache.hadoop.hbase.rsgroup.RSGroupInfo#DEFAULT_GROUP}.
*/
Optional<String> getRegionServerGroup();
+
+ /**
+ * Computes a CRC32 hash of the table descriptor's protobuf representation.
This hash can be used
+ * to detect changes in the table descriptor configuration.
+ * @return A hex string representation of the CRC32 hash, or "UNKNOWN" if
computation fails
+ */
+ default String getDescriptorHash() {
+ try {
+ HBaseProtos.TableSchema tableSchema = ProtobufUtil.toTableSchema(this);
+ ByteBuffer byteBuffer = ByteBuffer.wrap(tableSchema.toByteArray());
+ CRC32 crc32 = new CRC32();
+ crc32.update(byteBuffer);
+ return Long.toHexString(crc32.getValue());
+ } catch (Exception e) {
+ Logger log = LoggerFactory.getLogger(TableDescriptor.class);
+ log.error("Failed to compute table descriptor hash for table {}",
getTableName(), e);
+ return "UNKNOWN";
+ }
+ }
}
diff --git
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
index 3c8b7ad34b7..4a1611e9a74 100644
---
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
+++
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
@@ -47,7 +47,6 @@ import org.apache.hadoop.hbase.util.PrettyPrinter;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
@@ -633,6 +632,11 @@ public class TableDescriptorBuilder {
private final Map<byte[], ColumnFamilyDescriptor> families =
new TreeMap<>(Bytes.BYTES_RAWCOMPARATOR);
+ /**
+ * Cached hash of the table descriptor. Computed lazily on first access.
+ */
+ private volatile String descriptorHash;
+
/**
* Construct a table descriptor specifying a TableName object
* @param name Table name. TODO: make this private after removing the
HTableDescriptor
@@ -1619,6 +1623,14 @@ public class TableDescriptorBuilder {
return Optional.empty();
}
}
+
+ @Override
+ public String getDescriptorHash() {
+ if (descriptorHash == null) {
+ descriptorHash = TableDescriptor.super.getDescriptorHash();
+ }
+ return descriptorHash;
+ }
}
private static Optional<CoprocessorDescriptor>
toCoprocessorDescriptor(String spec) {
diff --git
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSource.java
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSource.java
index c3d955592d6..41267de1981 100644
---
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSource.java
+++
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSource.java
@@ -56,6 +56,8 @@ public interface MetricsRegionSource extends
Comparable<MetricsRegionSource> {
String ROW_READS_ONLY_ON_MEMSTORE_DESC = "Row reads happening completely out
of memstore";
String MIXED_ROW_READS = "mixedRowReadsCount";
String MIXED_ROW_READS_ON_STORE_DESC = "Row reads happening out of files and
memstore on store";
+ String TABLE_DESCRIPTOR_HASH = "tableDescriptorHash";
+ String TABLE_DESCRIPTOR_HASH_DESC = "The hash of the current table
descriptor";
/**
* Close the region's metrics as this region is closing.
diff --git
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapper.java
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapper.java
index 4d8a028d89b..c3e37586e28 100644
---
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapper.java
+++
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapper.java
@@ -161,4 +161,12 @@ public interface MetricsRegionWrapper {
/** Returns the number of row reads on memstore and file per store */
Map<String, Long> getMixedRowReadsCount();
+ /**
+ * Returns a hash of the table descriptor that this region was opened with.
This hash uniquely
+ * identifies the table configuration (column families, compression, TTL,
block size, etc.) and
+ * can be used to determine if a region needs to be reopened to pick up
descriptor changes.
+ * @return hex-encoded hash of the serialized TableDescriptor
+ */
+ String getTableDescriptorHash();
+
}
diff --git
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
index 92ecaa58088..5e8f01a7579 100644
---
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
+++
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
@@ -284,6 +284,13 @@ public class MetricsRegionSourceImpl implements
MetricsRegionSource {
MetricsRegionSource.ROW_READS_ONLY_ON_MEMSTORE_DESC);
addCounter(mrb, this.regionWrapper.getMixedRowReadsCount(),
MetricsRegionSource.MIXED_ROW_READS,
MetricsRegionSource.MIXED_ROW_READS_ON_STORE_DESC);
+ mrb.add(
+ Interns.tag(
+ regionNamePrefix + MetricsRegionSource.TABLE_DESCRIPTOR_HASH,
+ MetricsRegionSource.TABLE_DESCRIPTOR_HASH_DESC,
+ this.regionWrapper.getTableDescriptorHash()
+ )
+ );
}
}
diff --git
a/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionSourceImpl.java
b/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionSourceImpl.java
index 2c8205085d1..cf2a4786f18 100644
---
a/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionSourceImpl.java
+++
b/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionSourceImpl.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.regionserver;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
-
import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
@@ -232,5 +231,10 @@ public class TestMetricsRegionSourceImpl {
map.put("info", 0L);
return map;
}
+
+ @Override
+ public String getTableDescriptorHash() {
+ return "testhash";
+ }
}
}
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java
index bce961e8f27..eef58ed9f3e 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java
@@ -64,9 +64,11 @@ public class MetricsRegionWrapperImpl implements
MetricsRegionWrapper, Closeable
private ScheduledFuture<?> regionMetricsUpdateTask;
private float currentRegionCacheRatio;
+ private final String tableDescriptorHash;
public MetricsRegionWrapperImpl(HRegion region) {
this.region = region;
+ this.tableDescriptorHash = determineTableDescriptorHash();
this.executor =
CompatibilitySingletonFactory.getInstance(MetricsExecutor.class).getExecutor();
this.runnable = new HRegionMetricsWrapperRunnable();
this.regionMetricsUpdateTask =
@@ -352,6 +354,19 @@ public class MetricsRegionWrapperImpl implements
MetricsRegionWrapper, Closeable
}
}
+ @Override
+ public String getTableDescriptorHash() {
+ return tableDescriptorHash;
+ }
+
+ private String determineTableDescriptorHash() {
+ TableDescriptor tableDesc = this.region.getTableDescriptor();
+ if (tableDesc == null) {
+ return UNKNOWN;
+ }
+ return tableDesc.getDescriptorHash();
+ }
+
@Override
public void close() throws IOException {
regionMetricsUpdateTask.cancel(true);
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperStub.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperStub.java
index 0995b0faee0..ac77279882a 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperStub.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperStub.java
@@ -198,4 +198,9 @@ public class MetricsRegionWrapperStub implements
MetricsRegionWrapper {
map.put("info", 0L);
return map;
}
+
+ @Override
+ public String getTableDescriptorHash() {
+ return "testhash123abc";
+ }
}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionWrapperTableDescriptorHash.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionWrapperTableDescriptorHash.java
new file mode 100644
index 00000000000..7c70b56ebdd
--- /dev/null
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionWrapperTableDescriptorHash.java
@@ -0,0 +1,155 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertNotNull;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.client.RegionInfoBuilder;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+import org.apache.hadoop.hbase.testclassification.RegionServerTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category({ RegionServerTests.class, SmallTests.class })
+public class TestMetricsRegionWrapperTableDescriptorHash {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+
HBaseClassTestRule.forClass(TestMetricsRegionWrapperTableDescriptorHash.class);
+
+ private HBaseTestingUtility testUtil;
+ private Configuration conf;
+
+ @Before
+ public void setUp() throws Exception {
+ conf = HBaseConfiguration.create();
+ testUtil = new HBaseTestingUtility(conf);
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ if (testUtil != null) {
+ testUtil.cleanupTestDir();
+ }
+ }
+
+ @Test
+ public void testTableDescriptorHashGeneration() throws Exception {
+ TableName tableName = TableName.valueOf("testTable");
+ TableDescriptor tableDescriptor =
TableDescriptorBuilder.newBuilder(tableName)
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();
+
+ RegionInfo regionInfo =
RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("a"))
+ .setEndKey(Bytes.toBytes("z")).build();
+
+ Path testDir =
testUtil.getDataTestDir("testTableDescriptorHashGeneration");
+ HRegion region =
+ HBaseTestingUtility.createRegionAndWAL(regionInfo, testDir, conf,
tableDescriptor);
+
+ try (MetricsRegionWrapperImpl wrapper = new
MetricsRegionWrapperImpl(region)) {
+ String hash = wrapper.getTableDescriptorHash();
+ assertNotNull(hash);
+ assertNotEquals("unknown", hash);
+ assertEquals(8, hash.length());
+ } finally {
+ HBaseTestingUtility.closeRegionAndWAL(region);
+ }
+ }
+
+ @Test
+ public void testHashConsistency() throws Exception {
+ TableName tableName = TableName.valueOf("testTable2");
+ TableDescriptor tableDescriptor =
TableDescriptorBuilder.newBuilder(tableName)
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();
+
+ RegionInfo regionInfo1 =
RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("a"))
+ .setEndKey(Bytes.toBytes("m")).build();
+ RegionInfo regionInfo2 =
RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("m"))
+ .setEndKey(Bytes.toBytes("z")).build();
+
+ Path testDir1 = testUtil.getDataTestDir("testHashConsistency1");
+ HRegion region1 =
+ HBaseTestingUtility.createRegionAndWAL(regionInfo1, testDir1, conf,
tableDescriptor);
+
+ Path testDir2 = testUtil.getDataTestDir("testHashConsistency2");
+ HRegion region2 =
+ HBaseTestingUtility.createRegionAndWAL(regionInfo2, testDir2, conf,
tableDescriptor);
+ try (MetricsRegionWrapperImpl wrapper1 = new
MetricsRegionWrapperImpl(region1);
+ MetricsRegionWrapperImpl wrapper2 = new
MetricsRegionWrapperImpl(region2)) {
+
+ String hash1 = wrapper1.getTableDescriptorHash();
+ String hash2 = wrapper2.getTableDescriptorHash();
+
+ assertEquals(hash1, hash2);
+ } finally {
+ HBaseTestingUtility.closeRegionAndWAL(region1);
+ HBaseTestingUtility.closeRegionAndWAL(region2);
+ }
+ }
+
+ @Test
+ public void testHashChangeOnDescriptorChange() throws Exception {
+ TableName tableName = TableName.valueOf("testTable3");
+ TableDescriptor tableDescriptor1 =
TableDescriptorBuilder.newBuilder(tableName)
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();
+ TableDescriptor tableDescriptor2 =
TableDescriptorBuilder.newBuilder(tableName)
+ .setColumnFamily(
+
ColumnFamilyDescriptorBuilder.newBuilder("cf".getBytes()).setTimeToLive(86400).build())
+ .build();
+
+ RegionInfo regionInfo1 =
RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("a"))
+ .setEndKey(Bytes.toBytes("m")).build();
+ RegionInfo regionInfo2 =
RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("m"))
+ .setEndKey(Bytes.toBytes("z")).build();
+
+ Path testDir1 =
testUtil.getDataTestDir("testHashChangeOnDescriptorChange1");
+ HRegion region1 =
+ HBaseTestingUtility.createRegionAndWAL(regionInfo1, testDir1, conf,
tableDescriptor1);
+
+ Path testDir2 =
testUtil.getDataTestDir("testHashChangeOnDescriptorChange2");
+ HRegion region2 =
+ HBaseTestingUtility.createRegionAndWAL(regionInfo2, testDir2, conf,
tableDescriptor2);
+
+ try (MetricsRegionWrapperImpl wrapper1 = new
MetricsRegionWrapperImpl(region1);
+ MetricsRegionWrapperImpl wrapper2 = new
MetricsRegionWrapperImpl(region2)) {
+ String hash1 = wrapper1.getTableDescriptorHash();
+ String hash2 = wrapper2.getTableDescriptorHash();
+
+ assertNotEquals(hash1, hash2);
+ } finally {
+ HBaseTestingUtility.closeRegionAndWAL(region1);
+ HBaseTestingUtility.closeRegionAndWAL(region2);
+ }
+ }
+}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
index dba0c141952..81cbb031171 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
@@ -18,9 +18,9 @@
package org.apache.hadoop.hbase.regionserver;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
-
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
@@ -649,4 +649,21 @@ public class TestRegionServerMetrics {
assertEquals("Total zero-byte read bytes should be equal to 0", 0,
metricsRegionServer.getRegionServerWrapper().getZeroCopyBytesRead());
}
+
+ @Test
+ public void testTableDescriptorHashMetric() throws Exception {
+ doNPuts(1, false);
+ metricsRegionServer.getRegionServerWrapper().forceRecompute();
+
+ HRegion region = rs.getRegions(tableName).get(0);
+ assertNotNull("Region should exist", region);
+
+ try (MetricsRegionWrapperImpl wrapper = new
MetricsRegionWrapperImpl(region)) {
+ String hash = wrapper.getTableDescriptorHash();
+
+ assertNotNull("TableDescriptorHash should not be null", hash);
+ assertNotEquals("TableDescriptorHash should not be 'UNKNOWN'",
"UNKNOWN", hash);
+ assertEquals("Hash should be 8 characters (CRC32 hex)", 8,
hash.length());
+ }
+ }
}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTableDescriptorHashComputation.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTableDescriptorHashComputation.java
new file mode 100644
index 00000000000..9c793ef7fb4
--- /dev/null
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTableDescriptorHashComputation.java
@@ -0,0 +1,127 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertNotNull;
+
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+import org.apache.hadoop.hbase.io.compress.Compression;
+import org.apache.hadoop.hbase.testclassification.RegionServerTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category({ RegionServerTests.class, SmallTests.class })
+public class TestTableDescriptorHashComputation {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(TestTableDescriptorHashComputation.class);
+
+ @Test
+ public void testHashLength() {
+ TableDescriptor td =
TableDescriptorBuilder.newBuilder(TableName.valueOf("testTable"))
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();
+
+ String hash = td.getDescriptorHash();
+ assertNotNull(hash);
+ assertEquals(8, hash.length());
+ }
+
+ @Test
+ public void testIdenticalDescriptorsProduceSameHash() {
+ TableDescriptor td1 =
TableDescriptorBuilder.newBuilder(TableName.valueOf("testTable"))
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();
+
+ TableDescriptor td2 =
TableDescriptorBuilder.newBuilder(TableName.valueOf("testTable"))
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();
+
+ String hash1 = td1.getDescriptorHash();
+ String hash2 = td2.getDescriptorHash();
+
+ assertEquals(hash1, hash2);
+ }
+
+ @Test
+ public void testDifferentDescriptorsProduceDifferentHashes() {
+ TableDescriptor td1 =
TableDescriptorBuilder.newBuilder(TableName.valueOf("testTable"))
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();
+
+ TableDescriptor td2 =
TableDescriptorBuilder.newBuilder(TableName.valueOf("testTable"))
+ .setColumnFamily(
+
ColumnFamilyDescriptorBuilder.newBuilder("cf".getBytes()).setTimeToLive(86400).build())
+ .build();
+
+ String hash1 = td1.getDescriptorHash();
+ String hash2 = td2.getDescriptorHash();
+
+ assertNotEquals(hash1, hash2);
+ }
+
+ @Test
+ public void testDifferentCompressionProducesDifferentHash() {
+ TableDescriptor td1 = TableDescriptorBuilder
+
.newBuilder(TableName.valueOf("testTable")).setColumnFamily(ColumnFamilyDescriptorBuilder
+
.newBuilder("cf".getBytes()).setCompressionType(Compression.Algorithm.NONE).build())
+ .build();
+
+ TableDescriptor td2 = TableDescriptorBuilder
+
.newBuilder(TableName.valueOf("testTable")).setColumnFamily(ColumnFamilyDescriptorBuilder
+
.newBuilder("cf".getBytes()).setCompressionType(Compression.Algorithm.SNAPPY).build())
+ .build();
+
+ String hash1 = td1.getDescriptorHash();
+ String hash2 = td2.getDescriptorHash();
+
+ assertNotEquals(hash1, hash2);
+ }
+
+ @Test
+ public void testMultipleColumnFamilies() {
+ TableDescriptor td1 =
TableDescriptorBuilder.newBuilder(TableName.valueOf("testTable"))
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf1"))
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf2")).build();
+
+ TableDescriptor td2 =
TableDescriptorBuilder.newBuilder(TableName.valueOf("testTable"))
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf1")).build();
+
+ String hash1 = td1.getDescriptorHash();
+ String hash2 = td2.getDescriptorHash();
+
+ assertNotEquals(hash1, hash2);
+ }
+
+ @Test
+ public void testHashCaching() {
+ TableDescriptor td =
TableDescriptorBuilder.newBuilder(TableName.valueOf("testTable"))
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();
+
+ String hash1 = td.getDescriptorHash();
+ String hash2 = td.getDescriptorHash();
+
+ assertNotNull(hash1);
+ assertEquals(hash1, hash2);
+ }
+}