This is an automated email from the ASF dual-hosted git repository.
cconnell pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/master by this push:
new 9805ddc7680 HBASE-29729 Add per-region table descriptor hash to
regionServer JMX Metrics (#7481)
9805ddc7680 is described below
commit 9805ddc7680ce00f5cd9ae6a61f96b13db367cae
Author: Alex Hughes <[email protected]>
AuthorDate: Mon Dec 15 16:32:38 2025 +0100
HBASE-29729 Add per-region table descriptor hash to regionServer JMX
Metrics (#7481)
Co-authored-by: Alex Hughes <[email protected]>
Signed-off by: Ray Mattingly <[email protected]>
---
.../hadoop/hbase/client/TableDescriptor.java | 27 ++++
.../hbase/client/TableDescriptorBuilder.java | 13 ++
.../hbase/regionserver/MetricsRegionSource.java | 2 +
.../regionserver/MetricsRegionSourceImpl.java | 3 +
.../hbase/regionserver/MetricsRegionWrapper.java | 8 ++
.../regionserver/TestMetricsRegionSourceImpl.java | 5 +
.../regionserver/MetricsRegionWrapperImpl.java | 15 ++
.../regionserver/MetricsRegionWrapperStub.java | 5 +
...estMetricsRegionWrapperTableDescriptorHash.java | 155 +++++++++++++++++++++
.../regionserver/TestRegionServerMetrics.java | 18 +++
.../TestTableDescriptorHashComputation.java | 127 +++++++++++++++++
11 files changed, 378 insertions(+)
diff --git
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
index 817f9e2d4b1..1a0c82390bb 100644
---
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
+++
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hbase.client;
+import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
@@ -25,10 +26,16 @@ import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Stream;
+import java.util.zip.CRC32;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
/**
* TableDescriptor contains the details about an HBase table such as the
descriptors of all the
@@ -39,6 +46,8 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Public
public interface TableDescriptor {
+ Logger LOG = LoggerFactory.getLogger(TableDescriptor.class);
+
@InterfaceAudience.Private
Comparator<TableDescriptor> COMPARATOR =
getComparator(ColumnFamilyDescriptor.COMPARATOR);
@@ -316,4 +325,22 @@ public interface TableDescriptor {
}
return !enabled;
}
+
+ /**
+ * Computes a CRC32 hash of the table descriptor's protobuf representation.
This hash can be used
+ * to detect changes in the table descriptor configuration.
+ * @return A hex string representation of the CRC32 hash, or "UNKNOWN" if
computation fails
+ */
+ default String getDescriptorHash() {
+ try {
+ HBaseProtos.TableSchema tableSchema = ProtobufUtil.toTableSchema(this);
+ ByteBuffer byteBuffer = ByteBuffer.wrap(tableSchema.toByteArray());
+ CRC32 crc32 = new CRC32();
+ crc32.update(byteBuffer);
+ return Long.toHexString(crc32.getValue());
+ } catch (Exception e) {
+ LOG.error("Failed to compute table descriptor hash for table {}",
getTableName(), e);
+ return "UNKNOWN";
+ }
+ }
}
diff --git
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
index 8636b006e83..eed1a40a2c2 100644
---
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
+++
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
@@ -582,6 +582,11 @@ public class TableDescriptorBuilder {
private final Map<byte[], ColumnFamilyDescriptor> families =
new TreeMap<>(Bytes.BYTES_RAWCOMPARATOR);
+ /**
+ * Cached hash of the table descriptor. Computed lazily on first access.
+ */
+ private volatile String descriptorHash;
+
/**
* Construct a table descriptor specifying a TableName object
* @param name Table name.
@@ -1510,6 +1515,14 @@ public class TableDescriptorBuilder {
return Optional.empty();
}
}
+
+ @Override
+ public String getDescriptorHash() {
+ if (descriptorHash == null) {
+ descriptorHash = TableDescriptor.super.getDescriptorHash();
+ }
+ return descriptorHash;
+ }
}
/**
diff --git
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSource.java
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSource.java
index 580b4a7624f..1f8238221ce 100644
---
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSource.java
+++
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSource.java
@@ -56,6 +56,8 @@ public interface MetricsRegionSource extends
Comparable<MetricsRegionSource> {
String ROW_READS_ONLY_ON_MEMSTORE_DESC = "Row reads happening completely out
of memstore";
String MIXED_ROW_READS = "mixedRowReadsCount";
String MIXED_ROW_READS_ON_STORE_DESC = "Row reads happening out of files and
memstore on store";
+ String TABLE_DESCRIPTOR_HASH = "tableDescriptorHash";
+ String TABLE_DESCRIPTOR_HASH_DESC = "The hash of the current table
descriptor";
/**
* Close the region's metrics as this region is closing.
diff --git
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
index 2f4fbb431ab..391849cae66 100644
---
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
+++
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
@@ -286,6 +286,9 @@ public class MetricsRegionSourceImpl implements
MetricsRegionSource {
MetricsRegionSource.ROW_READS_ONLY_ON_MEMSTORE_DESC);
addCounter(mrb, this.regionWrapper.getMixedRowReadsCount(),
MetricsRegionSource.MIXED_ROW_READS,
MetricsRegionSource.MIXED_ROW_READS_ON_STORE_DESC);
+ mrb.add(Interns.tag(regionNamePrefix +
MetricsRegionSource.TABLE_DESCRIPTOR_HASH,
+ MetricsRegionSource.TABLE_DESCRIPTOR_HASH_DESC,
+ this.regionWrapper.getTableDescriptorHash()));
}
}
diff --git
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapper.java
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapper.java
index 3445faf7eaa..8908d73d729 100644
---
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapper.java
+++
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapper.java
@@ -166,4 +166,12 @@ public interface MetricsRegionWrapper {
/** Returns the number of row reads on memstore and file per store */
Map<String, Long> getMixedRowReadsCount();
+ /**
+ * Returns a hash of the table descriptor that this region was opened with.
This hash uniquely
+ * identifies the table configuration (column families, compression, TTL,
block size, etc.) and
+ * can be used to determine if a region needs to be reopened to pick up
descriptor changes.
+ * @return hex-encoded hash of the serialized TableDescriptor
+ */
+ String getTableDescriptorHash();
+
}
diff --git
a/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionSourceImpl.java
b/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionSourceImpl.java
index 28286cc1c6e..38725c09c58 100644
---
a/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionSourceImpl.java
+++
b/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionSourceImpl.java
@@ -237,5 +237,10 @@ public class TestMetricsRegionSourceImpl {
map.put("info", 0L);
return map;
}
+
+ @Override
+ public String getTableDescriptorHash() {
+ return "testhash";
+ }
}
}
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java
index b3e2e93fc9e..c635f2950bc 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java
@@ -64,9 +64,11 @@ public class MetricsRegionWrapperImpl implements
MetricsRegionWrapper, Closeable
private ScheduledFuture<?> regionMetricsUpdateTask;
private float currentRegionCacheRatio;
+ private final String tableDescriptorHash;
public MetricsRegionWrapperImpl(HRegion region) {
this.region = region;
+ this.tableDescriptorHash = determineTableDescriptorHash();
this.executor =
CompatibilitySingletonFactory.getInstance(MetricsExecutor.class).getExecutor();
this.runnable = new HRegionMetricsWrapperRunnable();
this.regionMetricsUpdateTask =
@@ -357,6 +359,19 @@ public class MetricsRegionWrapperImpl implements
MetricsRegionWrapper, Closeable
}
}
+ @Override
+ public String getTableDescriptorHash() {
+ return tableDescriptorHash;
+ }
+
+ private String determineTableDescriptorHash() {
+ TableDescriptor tableDesc = this.region.getTableDescriptor();
+ if (tableDesc == null) {
+ return UNKNOWN;
+ }
+ return tableDesc.getDescriptorHash();
+ }
+
@Override
public void close() throws IOException {
regionMetricsUpdateTask.cancel(true);
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperStub.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperStub.java
index c88e2deee4a..9db7777f981 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperStub.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperStub.java
@@ -203,4 +203,9 @@ public class MetricsRegionWrapperStub implements
MetricsRegionWrapper {
map.put("info", 0L);
return map;
}
+
+ @Override
+ public String getTableDescriptorHash() {
+ return "testhash123abc";
+ }
}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionWrapperTableDescriptorHash.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionWrapperTableDescriptorHash.java
new file mode 100644
index 00000000000..77aa9387960
--- /dev/null
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionWrapperTableDescriptorHash.java
@@ -0,0 +1,155 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertNotNull;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestingUtil;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.client.RegionInfoBuilder;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+import org.apache.hadoop.hbase.testclassification.RegionServerTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category({ RegionServerTests.class, SmallTests.class })
+public class TestMetricsRegionWrapperTableDescriptorHash {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+
HBaseClassTestRule.forClass(TestMetricsRegionWrapperTableDescriptorHash.class);
+
+ private HBaseTestingUtil testUtil;
+ private Configuration conf;
+
+ @Before
+ public void setUp() throws Exception {
+ conf = HBaseConfiguration.create();
+ testUtil = new HBaseTestingUtil(conf);
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ if (testUtil != null) {
+ testUtil.cleanupTestDir();
+ }
+ }
+
+ @Test
+ public void testTableDescriptorHashGeneration() throws Exception {
+ TableName tableName = TableName.valueOf("testTable");
+ TableDescriptor tableDescriptor =
TableDescriptorBuilder.newBuilder(tableName)
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();
+
+ RegionInfo regionInfo =
RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("a"))
+ .setEndKey(Bytes.toBytes("z")).build();
+
+ Path testDir =
testUtil.getDataTestDir("testTableDescriptorHashGeneration");
+ HRegion region =
+ HBaseTestingUtil.createRegionAndWAL(regionInfo, testDir, conf,
tableDescriptor);
+
+ try (MetricsRegionWrapperImpl wrapper = new
MetricsRegionWrapperImpl(region)) {
+ String hash = wrapper.getTableDescriptorHash();
+ assertNotNull(hash);
+ assertNotEquals("unknown", hash);
+ assertEquals(8, hash.length());
+ } finally {
+ HBaseTestingUtil.closeRegionAndWAL(region);
+ }
+ }
+
+ @Test
+ public void testHashConsistency() throws Exception {
+ TableName tableName = TableName.valueOf("testTable2");
+ TableDescriptor tableDescriptor =
TableDescriptorBuilder.newBuilder(tableName)
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();
+
+ RegionInfo regionInfo1 =
RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("a"))
+ .setEndKey(Bytes.toBytes("m")).build();
+ RegionInfo regionInfo2 =
RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("m"))
+ .setEndKey(Bytes.toBytes("z")).build();
+
+ Path testDir1 = testUtil.getDataTestDir("testHashConsistency1");
+ HRegion region1 =
+ HBaseTestingUtil.createRegionAndWAL(regionInfo1, testDir1, conf,
tableDescriptor);
+
+ Path testDir2 = testUtil.getDataTestDir("testHashConsistency2");
+ HRegion region2 =
+ HBaseTestingUtil.createRegionAndWAL(regionInfo2, testDir2, conf,
tableDescriptor);
+ try (MetricsRegionWrapperImpl wrapper1 = new
MetricsRegionWrapperImpl(region1);
+ MetricsRegionWrapperImpl wrapper2 = new
MetricsRegionWrapperImpl(region2)) {
+
+ String hash1 = wrapper1.getTableDescriptorHash();
+ String hash2 = wrapper2.getTableDescriptorHash();
+
+ assertEquals(hash1, hash2);
+ } finally {
+ HBaseTestingUtil.closeRegionAndWAL(region1);
+ HBaseTestingUtil.closeRegionAndWAL(region2);
+ }
+ }
+
+ @Test
+ public void testHashChangeOnDescriptorChange() throws Exception {
+ TableName tableName = TableName.valueOf("testTable3");
+ TableDescriptor tableDescriptor1 =
TableDescriptorBuilder.newBuilder(tableName)
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();
+ TableDescriptor tableDescriptor2 =
TableDescriptorBuilder.newBuilder(tableName)
+ .setColumnFamily(
+
ColumnFamilyDescriptorBuilder.newBuilder("cf".getBytes()).setTimeToLive(86400).build())
+ .build();
+
+ RegionInfo regionInfo1 =
RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("a"))
+ .setEndKey(Bytes.toBytes("m")).build();
+ RegionInfo regionInfo2 =
RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("m"))
+ .setEndKey(Bytes.toBytes("z")).build();
+
+ Path testDir1 =
testUtil.getDataTestDir("testHashChangeOnDescriptorChange1");
+ HRegion region1 =
+ HBaseTestingUtil.createRegionAndWAL(regionInfo1, testDir1, conf,
tableDescriptor1);
+
+ Path testDir2 =
testUtil.getDataTestDir("testHashChangeOnDescriptorChange2");
+ HRegion region2 =
+ HBaseTestingUtil.createRegionAndWAL(regionInfo2, testDir2, conf,
tableDescriptor2);
+
+ try (MetricsRegionWrapperImpl wrapper1 = new
MetricsRegionWrapperImpl(region1);
+ MetricsRegionWrapperImpl wrapper2 = new
MetricsRegionWrapperImpl(region2)) {
+ String hash1 = wrapper1.getTableDescriptorHash();
+ String hash2 = wrapper2.getTableDescriptorHash();
+
+ assertNotEquals(hash1, hash2);
+ } finally {
+ HBaseTestingUtil.closeRegionAndWAL(region1);
+ HBaseTestingUtil.closeRegionAndWAL(region2);
+ }
+ }
+}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
index d9028700f9f..97ee508cc29 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.hbase.regionserver;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
@@ -598,4 +599,21 @@ public class TestRegionServerMetrics {
assertEquals("Total zero-byte read bytes should be equal to 0", 0,
metricsRegionServer.getRegionServerWrapper().getZeroCopyBytesRead());
}
+
+ @Test
+ public void testTableDescriptorHashMetric() throws Exception {
+ doNPuts(1, false);
+ metricsRegionServer.getRegionServerWrapper().forceRecompute();
+
+ HRegion region = rs.getRegions(tableName).get(0);
+ assertNotNull("Region should exist", region);
+
+ try (MetricsRegionWrapperImpl wrapper = new
MetricsRegionWrapperImpl(region)) {
+ String hash = wrapper.getTableDescriptorHash();
+
+ assertNotNull("TableDescriptorHash should not be null", hash);
+ assertNotEquals("TableDescriptorHash should not be 'UNKNOWN'",
"UNKNOWN", hash);
+ assertEquals("Hash should be 8 characters (CRC32 hex)", 8,
hash.length());
+ }
+ }
}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTableDescriptorHashComputation.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTableDescriptorHashComputation.java
new file mode 100644
index 00000000000..9c793ef7fb4
--- /dev/null
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTableDescriptorHashComputation.java
@@ -0,0 +1,127 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertNotNull;
+
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+import org.apache.hadoop.hbase.io.compress.Compression;
+import org.apache.hadoop.hbase.testclassification.RegionServerTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category({ RegionServerTests.class, SmallTests.class })
+public class TestTableDescriptorHashComputation {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(TestTableDescriptorHashComputation.class);
+
+ @Test
+ public void testHashLength() {
+ TableDescriptor td =
TableDescriptorBuilder.newBuilder(TableName.valueOf("testTable"))
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();
+
+ String hash = td.getDescriptorHash();
+ assertNotNull(hash);
+ assertEquals(8, hash.length());
+ }
+
+ @Test
+ public void testIdenticalDescriptorsProduceSameHash() {
+ TableDescriptor td1 =
TableDescriptorBuilder.newBuilder(TableName.valueOf("testTable"))
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();
+
+ TableDescriptor td2 =
TableDescriptorBuilder.newBuilder(TableName.valueOf("testTable"))
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();
+
+ String hash1 = td1.getDescriptorHash();
+ String hash2 = td2.getDescriptorHash();
+
+ assertEquals(hash1, hash2);
+ }
+
+ @Test
+ public void testDifferentDescriptorsProduceDifferentHashes() {
+ TableDescriptor td1 =
TableDescriptorBuilder.newBuilder(TableName.valueOf("testTable"))
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();
+
+ TableDescriptor td2 =
TableDescriptorBuilder.newBuilder(TableName.valueOf("testTable"))
+ .setColumnFamily(
+
ColumnFamilyDescriptorBuilder.newBuilder("cf".getBytes()).setTimeToLive(86400).build())
+ .build();
+
+ String hash1 = td1.getDescriptorHash();
+ String hash2 = td2.getDescriptorHash();
+
+ assertNotEquals(hash1, hash2);
+ }
+
+ @Test
+ public void testDifferentCompressionProducesDifferentHash() {
+ TableDescriptor td1 = TableDescriptorBuilder
+
.newBuilder(TableName.valueOf("testTable")).setColumnFamily(ColumnFamilyDescriptorBuilder
+
.newBuilder("cf".getBytes()).setCompressionType(Compression.Algorithm.NONE).build())
+ .build();
+
+ TableDescriptor td2 = TableDescriptorBuilder
+
.newBuilder(TableName.valueOf("testTable")).setColumnFamily(ColumnFamilyDescriptorBuilder
+
.newBuilder("cf".getBytes()).setCompressionType(Compression.Algorithm.SNAPPY).build())
+ .build();
+
+ String hash1 = td1.getDescriptorHash();
+ String hash2 = td2.getDescriptorHash();
+
+ assertNotEquals(hash1, hash2);
+ }
+
+ @Test
+ public void testMultipleColumnFamilies() {
+ TableDescriptor td1 =
TableDescriptorBuilder.newBuilder(TableName.valueOf("testTable"))
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf1"))
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf2")).build();
+
+ TableDescriptor td2 =
TableDescriptorBuilder.newBuilder(TableName.valueOf("testTable"))
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf1")).build();
+
+ String hash1 = td1.getDescriptorHash();
+ String hash2 = td2.getDescriptorHash();
+
+ assertNotEquals(hash1, hash2);
+ }
+
+ @Test
+ public void testHashCaching() {
+ TableDescriptor td =
TableDescriptorBuilder.newBuilder(TableName.valueOf("testTable"))
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();
+
+ String hash1 = td.getDescriptorHash();
+ String hash2 = td.getDescriptorHash();
+
+ assertNotNull(hash1);
+ assertEquals(hash1, hash2);
+ }
+}