http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
index 4441c08..32c08a9 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
@@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.Tag;
 import org.apache.hadoop.hbase.TagType;
 import org.apache.hadoop.hbase.TagUtil;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.io.util.StreamUtils;
@@ -267,7 +268,7 @@ public class VisibilityUtils {
   public static Filter createVisibilityLabelFilter(Region region, 
Authorizations authorizations)
       throws IOException {
     Map<ByteRange, Integer> cfVsMaxVersions = new HashMap<>();
-    for (HColumnDescriptor hcd : region.getTableDesc().getFamilies()) {
+    for (ColumnFamilyDescriptor hcd : 
region.getTableDescriptor().getColumnFamilies()) {
       cfVsMaxVersions.put(new SimpleMutableByteRange(hcd.getName()), 
hcd.getMaxVersions());
     }
     VisibilityLabelService vls = VisibilityLabelServiceManager.getInstance()

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
index 7ba5312..86687d9 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
@@ -216,7 +216,7 @@ public final class SnapshotManifest {
 
     for (Store store : region.getStores()) {
       // 2.1. build the snapshot reference for the store
-      Object familyData = visitor.familyOpen(regionData, 
store.getFamily().getName());
+      Object familyData = visitor.familyOpen(regionData, 
store.getColumnFamilyDescriptor().getName());
       monitor.rethrowException();
 
       List<StoreFile> storeFiles = new ArrayList<>(store.getStorefiles());

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
index 1e1aa9a..3316ec5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
@@ -71,6 +71,7 @@ import org.apache.hadoop.hbase.TableNotEnabledException;
 import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Get;
@@ -79,6 +80,7 @@ import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
 import org.apache.hadoop.hbase.tool.Canary.RegionTask.TaskType;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -117,14 +119,14 @@ public final class Canary implements Tool {
     public long getReadFailureCount();
     public long incReadFailureCount();
     public void publishReadFailure(ServerName serverName, HRegionInfo region, 
Exception e);
-    public void publishReadFailure(ServerName serverName, HRegionInfo region, 
HColumnDescriptor column, Exception e);
+    public void publishReadFailure(ServerName serverName, HRegionInfo region, 
ColumnFamilyDescriptor column, Exception e);
     public void updateReadFailedHostList(HRegionInfo region, String 
serverName);
     public Map<String,String> getReadFailures();
-    public void publishReadTiming(ServerName serverName, HRegionInfo region, 
HColumnDescriptor column, long msTime);
+    public void publishReadTiming(ServerName serverName, HRegionInfo region, 
ColumnFamilyDescriptor column, long msTime);
     public long getWriteFailureCount();
     public void publishWriteFailure(ServerName serverName, HRegionInfo region, 
Exception e);
-    public void publishWriteFailure(ServerName serverName, HRegionInfo region, 
HColumnDescriptor column, Exception e);
-    public void publishWriteTiming(ServerName serverName, HRegionInfo region, 
HColumnDescriptor column, long msTime);
+    public void publishWriteFailure(ServerName serverName, HRegionInfo region, 
ColumnFamilyDescriptor column, Exception e);
+    public void publishWriteTiming(ServerName serverName, HRegionInfo region, 
ColumnFamilyDescriptor column, long msTime);
     public void updateWriteFailedHostList(HRegionInfo region, String 
serverName);
     public Map<String,String> getWriteFailures();
   }
@@ -161,7 +163,7 @@ public final class Canary implements Tool {
     }
 
     @Override
-    public void publishReadFailure(ServerName serverName, HRegionInfo region, 
HColumnDescriptor column, Exception e) {
+    public void publishReadFailure(ServerName serverName, HRegionInfo region, 
ColumnFamilyDescriptor column, Exception e) {
       readFailureCount.incrementAndGet();
       LOG.error(String.format("read from region %s on regionserver %s column 
family %s failed",
                 region.getRegionNameAsString(), serverName, 
column.getNameAsString()), e);
@@ -173,7 +175,7 @@ public final class Canary implements Tool {
     }
 
     @Override
-    public void publishReadTiming(ServerName serverName, HRegionInfo region, 
HColumnDescriptor column, long msTime) {
+    public void publishReadTiming(ServerName serverName, HRegionInfo region, 
ColumnFamilyDescriptor column, long msTime) {
       LOG.info(String.format("read from region %s on regionserver %s column 
family %s in %dms",
         region.getRegionNameAsString(), serverName, column.getNameAsString(), 
msTime));
     }
@@ -200,14 +202,14 @@ public final class Canary implements Tool {
     }
 
     @Override
-    public void publishWriteFailure(ServerName serverName, HRegionInfo region, 
HColumnDescriptor column, Exception e) {
+    public void publishWriteFailure(ServerName serverName, HRegionInfo region, 
ColumnFamilyDescriptor column, Exception e) {
       writeFailureCount.incrementAndGet();
       LOG.error(String.format("write to region %s on regionserver %s column 
family %s failed",
         region.getRegionNameAsString(), serverName, column.getNameAsString()), 
e);
     }
 
     @Override
-    public void publishWriteTiming(ServerName serverName, HRegionInfo region, 
HColumnDescriptor column, long msTime) {
+    public void publishWriteTiming(ServerName serverName, HRegionInfo region, 
ColumnFamilyDescriptor column, long msTime) {
       LOG.info(String.format("write to region %s on regionserver %s column 
family %s in %dms",
         region.getRegionNameAsString(), serverName, column.getNameAsString(), 
msTime));
     }
@@ -348,14 +350,14 @@ public final class Canary implements Tool {
 
     public Void read() {
       Table table = null;
-      HTableDescriptor tableDesc = null;
+      TableDescriptor tableDesc = null;
       try {
         if (LOG.isDebugEnabled()) {
           LOG.debug(String.format("reading table descriptor for table %s",
             region.getTable()));
         }
         table = connection.getTable(region.getTable());
-        tableDesc = table.getTableDescriptor();
+        tableDesc = table.getDescriptor();
       } catch (IOException e) {
         LOG.debug("sniffRegion failed", e);
         sink.publishReadFailure(serverName, region, e);
@@ -374,7 +376,7 @@ public final class Canary implements Tool {
       Scan scan = null;
       ResultScanner rs = null;
       StopWatch stopWatch = new StopWatch();
-      for (HColumnDescriptor column : tableDesc.getColumnFamilies()) {
+      for (ColumnFamilyDescriptor column : tableDesc.getColumnFamilies()) {
         stopWatch.reset();
         startKey = region.getStartKey();
         // Can't do a get on empty start row so do a Scan of first element if 
any instead.
@@ -439,17 +441,17 @@ public final class Canary implements Tool {
      */
     private Void write() {
       Table table = null;
-      HTableDescriptor tableDesc = null;
+      TableDescriptor tableDesc = null;
       try {
         table = connection.getTable(region.getTable());
-        tableDesc = table.getTableDescriptor();
+        tableDesc = table.getDescriptor();
         byte[] rowToCheck = region.getStartKey();
         if (rowToCheck.length == 0) {
           rowToCheck = new byte[]{0x0};
         }
         int writeValueSize =
             
connection.getConfiguration().getInt(HConstants.HBASE_CANARY_WRITE_VALUE_SIZE_KEY,
 10);
-        for (HColumnDescriptor column : tableDesc.getColumnFamilies()) {
+        for (ColumnFamilyDescriptor column : tableDesc.getColumnFamilies()) {
           Put put = new Put(rowToCheck);
           byte[] value = new byte[writeValueSize];
           Bytes.random(value);

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
index 87ff010..1e56e7f 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
@@ -43,12 +43,12 @@ import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.ClusterStatus;
 import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
-import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MetaTableAccessor;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
@@ -66,6 +66,8 @@ import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 
 /**
  * The {@link RegionSplitter} class provides several utilities to help in the
@@ -378,16 +380,16 @@ public class RegionSplitter {
     LOG.debug("Creating table " + tableName + " with " + columnFamilies.length
         + " column families.  Presplitting to " + splitCount + " regions");
 
-    HTableDescriptor desc = new HTableDescriptor(tableName);
+    TableDescriptorBuilder builder = 
TableDescriptorBuilder.newBuilder(tableName);
     for (String cf : columnFamilies) {
-      desc.addFamily(new HColumnDescriptor(Bytes.toBytes(cf)));
+      
builder.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(cf)).build());
     }
     try (Connection connection = ConnectionFactory.createConnection(conf)) {
       Admin admin = connection.getAdmin();
       try {
         Preconditions.checkArgument(!admin.tableExists(tableName),
           "Table already exists: " + tableName);
-        admin.createTable(desc, splitAlgo.split(splitCount));
+        admin.createTable(builder.build(), splitAlgo.split(splitCount));
       } finally {
         admin.close();
       }
@@ -684,9 +686,9 @@ public class RegionSplitter {
     FileSystem fs = tableDir.getFileSystem(connection.getConfiguration());
     // Clear the cache to forcibly refresh region information
     ((ClusterConnection)connection).clearRegionCache();
-    HTableDescriptor htd = null;
+    TableDescriptor htd = null;
     try (Table table = connection.getTable(tableName)) {
-      htd = table.getTableDescriptor();
+      htd = table.getDescriptor();
     }
     try (RegionLocator regionLocator = connection.getRegionLocator(tableName)) 
{
 
@@ -725,7 +727,7 @@ public class RegionSplitter {
 
             // Check every Column Family for that region -- check does not 
have references.
             boolean refFound = false;
-            for (HColumnDescriptor c : htd.getFamilies()) {
+            for (ColumnFamilyDescriptor c : htd.getColumnFamilies()) {
               if ((refFound = regionFs.hasReferences(c.getNameAsString()))) {
                 break;
               }

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
index 648ccc6..34a9759 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
@@ -97,7 +97,7 @@ public class ServerRegionReplicaUtil extends 
RegionReplicaUtil {
    * @return whether the replica is read only
    */
   public static boolean isReadOnly(HRegion region) {
-    return region.getTableDesc().isReadOnly()
+    return region.getTableDescriptor().isReadOnly()
       || !isDefaultReplica(region.getRegionInfo());
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index 5c8b29b..24a2f9c 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -76,6 +76,7 @@ import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.client.TableState;
 import org.apache.hadoop.hbase.fs.HFileSystem;
 import org.apache.hadoop.hbase.io.compress.Compression;
@@ -137,6 +138,9 @@ import org.apache.zookeeper.ZooKeeper;
 import org.apache.zookeeper.ZooKeeper.States;
 
 import edu.umd.cs.findbugs.annotations.Nullable;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 
 /**
  * Facility for testing HBase. Replacement for
@@ -1360,7 +1364,7 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * Create a table.
    * @param tableName
    * @param family
-   * @return An HTable instance for the created table.
+   * @return A Table instance for the created table.
    * @throws IOException
    */
   public Table createTable(TableName tableName, String family)
@@ -1372,7 +1376,7 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * Create a table.
    * @param tableName
    * @param families
-   * @return An HTable instance for the created table.
+   * @return A Table instance for the created table.
    * @throws IOException
    */
   public Table createTable(TableName tableName, String[] families)
@@ -1388,7 +1392,7 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * Create a table.
    * @param tableName
    * @param family
-   * @return An HTable instance for the created table.
+   * @return A Table instance for the created table.
    * @throws IOException
    */
   public Table createTable(TableName tableName, byte[] family)
@@ -1401,7 +1405,7 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * @param tableName
    * @param family
    * @param numRegions
-   * @return An HTable instance for the created table.
+   * @return A Table instance for the created table.
    * @throws IOException
    */
   public Table createMultiRegionTable(TableName tableName, byte[] family, int 
numRegions)
@@ -1418,7 +1422,7 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * Create a table.
    * @param tableName
    * @param families
-   * @return An HTable instance for the created table.
+   * @return A Table instance for the created table.
    * @throws IOException
    */
   public Table createTable(TableName tableName, byte[][] families)
@@ -1430,7 +1434,7 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * Create a table with multiple regions.
    * @param tableName
    * @param families
-   * @return An HTable instance for the created table.
+   * @return A Table instance for the created table.
    * @throws IOException
    */
   public Table createMultiRegionTable(TableName tableName, byte[][] families) 
throws IOException {
@@ -1442,7 +1446,7 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * @param tableName
    * @param families
    * @param splitKeys
-   * @return An HTable instance for the created table.
+   * @return A Table instance for the created table.
    * @throws IOException
    */
   public Table createTable(TableName tableName, byte[][] families, byte[][] 
splitKeys)
@@ -1467,11 +1471,27 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * @param htd
    * @param families
    * @param c Configuration to use
-   * @return An HTable instance for the created table.
+   * @return A Table instance for the created table.
    * @throws IOException
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #createTable(TableDescriptor, byte[][], 
Configuration)}
    */
+  @Deprecated
   public Table createTable(HTableDescriptor htd, byte[][] families, 
Configuration c)
   throws IOException {
+    return createTable((TableDescriptor) htd, families, c);
+  }
+
+  /**
+   * Create a table.
+   * @param htd
+   * @param families
+   * @param c Configuration to use
+   * @return A Table instance for the created table.
+   * @throws IOException
+   */
+  public Table createTable(TableDescriptor htd, byte[][] families, 
Configuration c)
+  throws IOException {
     return createTable(htd, families, null, c);
   }
 
@@ -1481,35 +1501,69 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * @param families
    * @param splitKeys
    * @param c Configuration to use
-   * @return An HTable instance for the created table.
+   * @return A Table instance for the created table.
    * @throws IOException
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #createTable(TableDescriptor, byte[][], byte[][], 
Configuration)}
    */
+  @Deprecated
   public Table createTable(HTableDescriptor htd, byte[][] families, byte[][] 
splitKeys,
       Configuration c) throws IOException {
+    return createTable((TableDescriptor) htd, families, splitKeys, c);
+  }
+
+  /**
+   * Create a table.
+   * @param htd
+   * @param families
+   * @param splitKeys
+   * @param c Configuration to use
+   * @return A Table instance for the created table.
+   * @throws IOException
+   */
+  public Table createTable(TableDescriptor htd, byte[][] families, byte[][] 
splitKeys,
+      Configuration c) throws IOException {
+    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(htd);
     for (byte[] family : families) {
-      HColumnDescriptor hcd = new HColumnDescriptor(family);
       // Disable blooms (they are on by default as of 0.95) but we disable 
them here because
       // tests have hard coded counts of what to expect in block cache, etc., 
and blooms being
       // on is interfering.
-      hcd.setBloomFilterType(BloomType.NONE);
-      htd.addFamily(hcd);
+      builder.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(family)
+                              .setBloomFilterType(BloomType.NONE)
+                              .build());
     }
-    getAdmin().createTable(htd, splitKeys);
+    TableDescriptor td = builder.build();
+    getAdmin().createTable(td, splitKeys);
     // HBaseAdmin only waits for regions to appear in hbase:meta
     // we should wait until they are assigned
-    waitUntilAllRegionsAssigned(htd.getTableName());
-    return getConnection().getTable(htd.getTableName());
+    waitUntilAllRegionsAssigned(td.getTableName());
+    return getConnection().getTable(td.getTableName());
   }
 
   /**
    * Create a table.
    * @param htd
    * @param splitRows
-   * @return An HTable instance for the created table.
+   * @return A Table instance for the created table.
    * @throws IOException
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #createTable(TableDescriptor, byte[][])}
    */
+  @Deprecated
   public Table createTable(HTableDescriptor htd, byte[][] splitRows)
       throws IOException {
+    return createTable((TableDescriptor) htd, splitRows);
+  }
+
+  /**
+   * Create a table.
+   * @param htd
+   * @param splitRows
+   * @return A Table instance for the created table.
+   * @throws IOException
+   */
+  public Table createTable(TableDescriptor htd, byte[][] splitRows)
+      throws IOException {
     getAdmin().createTable(htd, splitRows);
     // HBaseAdmin only waits for regions to appear in hbase:meta
     // we should wait until they are assigned
@@ -1523,7 +1577,7 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * @param families
    * @param splitKeys
    * @param c Configuration to use
-   * @return An HTable instance for the created table.
+   * @return A Table instance for the created table.
    * @throws IOException
    */
   public Table createTable(TableName tableName, byte[][] families, byte[][] 
splitKeys,
@@ -1536,7 +1590,7 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * @param tableName
    * @param family
    * @param numVersions
-   * @return An HTable instance for the created table.
+   * @return A Table instance for the created table.
    * @throws IOException
    */
   public Table createTable(TableName tableName, byte[] family, int numVersions)
@@ -1549,7 +1603,7 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * @param tableName
    * @param families
    * @param numVersions
-   * @return An HTable instance for the created table.
+   * @return A Table instance for the created table.
    * @throws IOException
    */
   public Table createTable(TableName tableName, byte[][] families, int 
numVersions)
@@ -1563,7 +1617,7 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * @param families
    * @param numVersions
    * @param splitKeys
-   * @return An HTable instance for the created table.
+   * @return A Table instance for the created table.
    * @throws IOException
    */
   public Table createTable(TableName tableName, byte[][] families, int 
numVersions,
@@ -1585,7 +1639,7 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * @param tableName
    * @param families
    * @param numVersions
-   * @return An HTable instance for the created table.
+   * @return A Table instance for the created table.
    * @throws IOException
    */
   public Table createMultiRegionTable(TableName tableName, byte[][] families, 
int numVersions)
@@ -1599,7 +1653,7 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * @param families
    * @param numVersions
    * @param blockSize
-   * @return An HTable instance for the created table.
+   * @return A Table instance for the created table.
    * @throws IOException
    */
   public Table createTable(TableName tableName, byte[][] families,
@@ -1642,7 +1696,7 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * @param tableName
    * @param families
    * @param numVersions
-   * @return An HTable instance for the created table.
+   * @return A Table instance for the created table.
    * @throws IOException
    */
   public Table createTable(TableName tableName, byte[][] families,
@@ -1668,7 +1722,7 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * @param tableName
    * @param family
    * @param splitRows
-   * @return An HTable instance for the created table.
+   * @return A Table instance for the created table.
    * @throws IOException
    */
   public Table createTable(TableName tableName, byte[] family, byte[][] 
splitRows)
@@ -1687,7 +1741,7 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * Create a table with multiple regions.
    * @param tableName
    * @param family
-   * @return An HTable instance for the created table.
+   * @return A Table instance for the created table.
    * @throws IOException
    */
   public Table createMultiRegionTable(TableName tableName, byte[] family) 
throws IOException {
@@ -1696,11 +1750,22 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
 
   /**
    * Modify a table, synchronous. Waiting logic similar to that of {@code 
admin.rb#alter_status}.
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #modifyTableSync(Admin, TableDescriptor)}
    */
+  @Deprecated
   @SuppressWarnings("serial")
   public static void modifyTableSync(Admin admin, HTableDescriptor desc)
       throws IOException, InterruptedException {
-    admin.modifyTable(desc.getTableName(), desc);
+    modifyTableSync(admin, (TableDescriptor) desc);
+  }
+  /**
+   * Modify a table, synchronous. Waiting logic similar to that of {@code 
admin.rb#alter_status}.
+   */
+  @SuppressWarnings("serial")
+  public static void modifyTableSync(Admin admin, TableDescriptor desc)
+      throws IOException, InterruptedException {
+    admin.modifyTable(desc);
     Pair<Integer, Integer> status = new Pair<Integer, Integer>() {{
       setFirst(0);
       setSecond(0);
@@ -1840,8 +1905,24 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * @param endKey
    * @return
    * @throws IOException
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #createLocalHRegion(TableDescriptor, byte[], 
byte[])}
    */
+  @Deprecated
   public HRegion createLocalHRegion(HTableDescriptor desc, byte [] startKey,
+      byte [] endKey) throws IOException {
+    return createLocalHRegion((TableDescriptor) desc, startKey, endKey);
+  }
+
+  /**
+   * Create an HRegion that writes to the local tmp dirs
+   * @param desc
+   * @param startKey
+   * @param endKey
+   * @return
+   * @throws IOException
+   */
+  public HRegion createLocalHRegion(TableDescriptor desc, byte [] startKey,
       byte [] endKey)
   throws IOException {
     HRegionInfo hri = new HRegionInfo(desc.getTableName(), startKey, endKey);
@@ -1851,8 +1932,19 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
   /**
    * Create an HRegion that writes to the local tmp dirs. Creates the WAL for 
you. Be sure to call
    * {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} when you're 
finished with it.
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #createLocalHRegion(HRegionInfo, TableDescriptor)}
    */
+  @Deprecated
   public HRegion createLocalHRegion(HRegionInfo info, HTableDescriptor desc) 
throws IOException {
+    return createLocalHRegion(info, (TableDescriptor) desc);
+  }
+
+  /**
+   * Create an HRegion that writes to the local tmp dirs. Creates the WAL for 
you. Be sure to call
+   * {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} when you're 
finished with it.
+   */
+  public HRegion createLocalHRegion(HRegionInfo info, TableDescriptor desc) 
throws IOException {
     return createRegionAndWAL(info, getDataTestDir(), getConfiguration(), 
desc);
   }
 
@@ -1863,9 +1955,25 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * @param wal wal for this region.
    * @return created hregion
    * @throws IOException
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #createLocalHRegion(HRegionInfo, TableDescriptor, 
WAL)}
    */
+  @Deprecated
   public HRegion createLocalHRegion(HRegionInfo info, HTableDescriptor desc, 
WAL wal)
       throws IOException {
+    return createLocalHRegion(info, (TableDescriptor) desc, wal);
+  }
+
+  /**
+   * Create an HRegion that writes to the local tmp dirs with specified wal
+   * @param info regioninfo
+   * @param desc table descriptor
+   * @param wal wal for this region.
+   * @return created hregion
+   * @throws IOException
+   */
+  public HRegion createLocalHRegion(HRegionInfo info, TableDescriptor desc, 
WAL wal)
+      throws IOException {
     return HRegion.createHRegion(info, getDataTestDir(), getConfiguration(), 
desc, wal);
   }
 
@@ -2375,9 +2483,26 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * @param startKeys
    * @return list of region info for regions added to meta
    * @throws IOException
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #createMultiRegionsInMeta(Configuration, 
TableDescriptor, byte[][])}
+   */
+  @Deprecated
+  public List<HRegionInfo> createMultiRegionsInMeta(final Configuration conf,
+      final HTableDescriptor htd, byte [][] startKeys) throws IOException {
+    return createMultiRegionsInMeta(conf, (TableDescriptor) htd, startKeys);
+  }
+  /**
+   * Create rows in hbase:meta for regions of the specified table with the 
specified
+   * start keys.  The first startKey should be a 0 length byte array if you
+   * want to form a proper range of regions.
+   * @param conf
+   * @param htd
+   * @param startKeys
+   * @return list of region info for regions added to meta
+   * @throws IOException
    */
   public List<HRegionInfo> createMultiRegionsInMeta(final Configuration conf,
-      final HTableDescriptor htd, byte [][] startKeys)
+      final TableDescriptor htd, byte [][] startKeys)
   throws IOException {
     Table meta = getConnection().getTable(TableName.META_TABLE_NAME);
     Arrays.sort(startKeys, Bytes.BYTES_COMPARATOR);
@@ -2415,19 +2540,44 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
   /**
    * Create a region with it's own WAL. Be sure to call
    * {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} to clean up all 
resources.
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #createRegionAndWAL(HRegionInfo, Path, 
Configuration, TableDescriptor)}
    */
+  @Deprecated
   public static HRegion createRegionAndWAL(final HRegionInfo info, final Path 
rootDir,
       final Configuration conf, final HTableDescriptor htd) throws IOException 
{
+    return createRegionAndWAL(info, rootDir, conf, (TableDescriptor) htd);
+  }
+
+  /**
+   * Create a region with it's own WAL. Be sure to call
+   * {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} to clean up all 
resources.
+   */
+  public static HRegion createRegionAndWAL(final HRegionInfo info, final Path 
rootDir,
+      final Configuration conf, final TableDescriptor htd) throws IOException {
     return createRegionAndWAL(info, rootDir, conf, htd, true);
   }
 
   /**
    * Create a region with it's own WAL. Be sure to call
    * {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} to clean up all 
resources.
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #createRegionAndWAL(HRegionInfo, Path, 
Configuration, TableDescriptor, boolean)}
    */
+  @Deprecated
   public static HRegion createRegionAndWAL(final HRegionInfo info, final Path 
rootDir,
       final Configuration conf, final HTableDescriptor htd, boolean initialize)
       throws IOException {
+    return createRegionAndWAL(info, rootDir, conf, (TableDescriptor) htd, 
initialize);
+  }
+
+  /**
+   * Create a region with it's own WAL. Be sure to call
+   * {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} to clean up all 
resources.
+   */
+  public static HRegion createRegionAndWAL(final HRegionInfo info, final Path 
rootDir,
+      final Configuration conf, final TableDescriptor htd, boolean initialize)
+      throws IOException {
     ChunkCreator.initialize(MemStoreLABImpl.CHUNK_SIZE_DEFAULT, false, 0, 0, 
0, null);
     WAL wal = createWal(conf, rootDir, info);
     return HRegion.createHRegion(info, rootDir, conf, htd, wal, initialize);
@@ -2738,7 +2888,7 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * [2] https://issues.apache.org/jira/browse/ZOOKEEPER-1105
    *
    * @param nodeZK - the ZK watcher to expire
-   * @param checkStatus - true to check if we can create an HTable with the
+   * @param checkStatus - true to check if we can create a Table with the
    *                    current configuration.
    */
   public void expireSession(ZooKeeperWatcher nodeZK, boolean checkStatus)
@@ -3435,7 +3585,7 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
                                                 Get get) throws IOException {
     Scan scan = new Scan(get);
     InternalScanner scanner = (InternalScanner) store.getScanner(scan,
-        scan.getFamilyMap().get(store.getFamily().getName()),
+        scan.getFamilyMap().get(store.getColumnFamilyDescriptor().getName()),
         // originally MultiVersionConcurrencyControl.resetThreadReadPoint() 
was called to set
         // readpoint 0.
         0);
@@ -3480,7 +3630,7 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
                                                 ) throws IOException {
     Get get = new Get(row);
     Map<byte[], NavigableSet<byte[]>> s = get.getFamilyMap();
-    s.put(store.getFamily().getName(), columns);
+    s.put(store.getColumnFamilyDescriptor().getName(), columns);
 
     return getFromStoreFile(store,get);
   }
@@ -3802,9 +3952,24 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * Creates a pre-split table for load testing. If the table already exists,
    * logs a warning and continues.
    * @return the number of regions the table was split into
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #createPreSplitLoadTestTable(Configuration,
+   *                 TableDescriptor, ColumnFamilyDescriptor)}
    */
+  @Deprecated
   public static int createPreSplitLoadTestTable(Configuration conf,
       HTableDescriptor desc, HColumnDescriptor hcd) throws IOException {
+    return createPreSplitLoadTestTable(conf, (TableDescriptor) desc,
+            (ColumnFamilyDescriptor) hcd);
+  }
+
+  /**
+   * Creates a pre-split table for load testing. If the table already exists,
+   * logs a warning and continues.
+   * @return the number of regions the table was split into
+   */
+  public static int createPreSplitLoadTestTable(Configuration conf,
+      TableDescriptor desc, ColumnFamilyDescriptor hcd) throws IOException {
     return createPreSplitLoadTestTable(conf, desc, hcd, 
DEFAULT_REGIONS_PER_SERVER);
   }
 
@@ -3812,10 +3977,25 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * Creates a pre-split table for load testing. If the table already exists,
    * logs a warning and continues.
    * @return the number of regions the table was split into
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #createPreSplitLoadTestTable(Configuration,
+   *                 TableDescriptor, ColumnFamilyDescriptor, int)}
    */
+  @Deprecated
   public static int createPreSplitLoadTestTable(Configuration conf,
       HTableDescriptor desc, HColumnDescriptor hcd, int numRegionsPerServer) 
throws IOException {
-    return createPreSplitLoadTestTable(conf, desc, new HColumnDescriptor[] 
{hcd},
+    return createPreSplitLoadTestTable(conf, (TableDescriptor) desc,
+            (ColumnFamilyDescriptor) hcd, numRegionsPerServer);
+  }
+
+  /**
+   * Creates a pre-split table for load testing. If the table already exists,
+   * logs a warning and continues.
+   * @return the number of regions the table was split into
+   */
+  public static int createPreSplitLoadTestTable(Configuration conf,
+      TableDescriptor desc, ColumnFamilyDescriptor hcd, int 
numRegionsPerServer) throws IOException {
+    return createPreSplitLoadTestTable(conf, desc, new 
ColumnFamilyDescriptor[] {hcd},
         numRegionsPerServer);
   }
 
@@ -3823,10 +4003,26 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * Creates a pre-split table for load testing. If the table already exists,
    * logs a warning and continues.
    * @return the number of regions the table was split into
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #createPreSplitLoadTestTable(Configuration,
+   *                 TableDescriptor, ColumnFamilyDescriptor[], int)}
    */
+  @Deprecated
   public static int createPreSplitLoadTestTable(Configuration conf,
-      HTableDescriptor desc, HColumnDescriptor[] hcds, int 
numRegionsPerServer) throws IOException {
+      HTableDescriptor desc, HColumnDescriptor[] hcds,
+      int numRegionsPerServer) throws IOException {
+    return createPreSplitLoadTestTable(conf, (TableDescriptor) desc,
+            (ColumnFamilyDescriptor[]) hcds, numRegionsPerServer);
+  }
 
+  /**
+   * Creates a pre-split table for load testing. If the table already exists,
+   * logs a warning and continues.
+   * @return the number of regions the table was split into
+   */
+  public static int createPreSplitLoadTestTable(Configuration conf,
+      TableDescriptor desc, ColumnFamilyDescriptor[] hcds,
+      int numRegionsPerServer) throws IOException {
     return createPreSplitLoadTestTable(conf, desc, hcds,
       new RegionSplitter.HexStringSplit(), numRegionsPerServer);
   }
@@ -3837,13 +4033,15 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * @return the number of regions the table was split into
    */
   public static int createPreSplitLoadTestTable(Configuration conf,
-      HTableDescriptor desc, HColumnDescriptor[] hcds,
+      TableDescriptor td, ColumnFamilyDescriptor[] cds,
       SplitAlgorithm splitter, int numRegionsPerServer) throws IOException {
-    for (HColumnDescriptor hcd : hcds) {
-      if (!desc.hasFamily(hcd.getName())) {
-        desc.addFamily(hcd);
+    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(td);
+    for (ColumnFamilyDescriptor cd : cds) {
+      if (!td.hasColumnFamily(cd.getName())) {
+        builder.addColumnFamily(cd);
       }
     }
+    td = builder.build();
     int totalNumberOfRegions = 0;
     Connection unmanagedConnection = ConnectionFactory.createConnection(conf);
     Admin admin = unmanagedConnection.getAdmin();
@@ -3865,12 +4063,12 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
       byte[][] splits = splitter.split(
           totalNumberOfRegions);
 
-      admin.createTable(desc, splits);
+      admin.createTable(td, splits);
     } catch (MasterNotRunningException e) {
       LOG.error("Master not running", e);
       throw new IOException(e);
     } catch (TableExistsException e) {
-      LOG.warn("Table " + desc.getTableName() +
+      LOG.warn("Table " + td.getTableName() +
           " already exists, continuing");
     } finally {
       admin.close();
@@ -3940,13 +4138,25 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
       + " on server " + server);
   }
 
-  public HRegion createTestRegion(String tableName, HColumnDescriptor hcd)
+  /**
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #createTestRegion(String, ColumnFamilyDescriptor)}
+   */
+  @Deprecated
+  public HRegion createTestRegion(String tableName, HColumnDescriptor cd)
+      throws IOException {
+    return createTestRegion(tableName, (ColumnFamilyDescriptor) cd);
+  }
+
+  public HRegion createTestRegion(String tableName, ColumnFamilyDescriptor cd)
       throws IOException {
-    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));
-    htd.addFamily(hcd);
+    TableDescriptor td
+        = TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName))
+            .addColumnFamily(cd)
+            .build();
     HRegionInfo info =
         new HRegionInfo(TableName.valueOf(tableName), null, null, false);
-    return createRegionAndWAL(info, getDataTestDir(), getConfiguration(), htd);
+    return createRegionAndWAL(info, getDataTestDir(), getConfiguration(), td);
   }
 
   public void setFileSystemURI(String fsURI) {
@@ -4049,13 +4259,13 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
         boolean tableAvailable = getAdmin().isTableAvailable(tableName);
         if (tableAvailable) {
           try (Table table = getConnection().getTable(tableName)) {
-            HTableDescriptor htd = table.getTableDescriptor();
+            TableDescriptor htd = table.getDescriptor();
             for (HRegionLocation loc : 
getConnection().getRegionLocator(tableName)
                 .getAllRegionLocations()) {
               Scan scan = new 
Scan().withStartRow(loc.getRegionInfo().getStartKey())
                   
.withStopRow(loc.getRegionInfo().getEndKey()).setOneRowLimit()
                   .setMaxResultsPerColumnFamily(1).setCacheBlocks(false);
-              for (byte[] family : htd.getFamiliesKeys()) {
+              for (byte[] family : htd.getColumnFamilyNames()) {
                 scan.addFamily(family);
               }
               try (ResultScanner scanner = table.getScanner(scan)) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
index 1044a18..2914e4b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
@@ -629,7 +629,7 @@ public class MiniHBaseCluster extends HBaseCluster {
     for (JVMClusterUtil.RegionServerThread t:
         this.hbaseCluster.getRegionServers()) {
       for(Region r: t.getRegionServer().getOnlineRegionsLocalContext()) {
-        if(r.getTableDesc().getTableName().equals(tableName)) {
+        if(r.getTableDescriptor().getTableName().equals(tableName)) {
           r.flush(true);
         }
       }
@@ -657,7 +657,7 @@ public class MiniHBaseCluster extends HBaseCluster {
     for (JVMClusterUtil.RegionServerThread t:
         this.hbaseCluster.getRegionServers()) {
       for(Region r: t.getRegionServer().getOnlineRegionsLocalContext()) {
-        if(r.getTableDesc().getTableName().equals(tableName)) {
+        if(r.getTableDescriptor().getTableName().equals(tableName)) {
           r.compact(major);
         }
       }
@@ -703,7 +703,7 @@ public class MiniHBaseCluster extends HBaseCluster {
     for (JVMClusterUtil.RegionServerThread rst : getRegionServerThreads()) {
       HRegionServer hrs = rst.getRegionServer();
       for (Region region : hrs.getOnlineRegionsLocalContext()) {
-        if (region.getTableDesc().getTableName().equals(tableName)) {
+        if (region.getTableDescriptor().getTableName().equals(tableName)) {
           ret.add((HRegion)region);
         }
       }
@@ -801,7 +801,7 @@ public class MiniHBaseCluster extends HBaseCluster {
     for (JVMClusterUtil.RegionServerThread rst : getRegionServerThreads()) {
       HRegionServer hrs = rst.getRegionServer();
       for (Region region : hrs.getOnlineRegions(tableName)) {
-        if (region.getTableDesc().getTableName().equals(tableName)) {
+        if (region.getTableDescriptor().getTableName().equals(tableName)) {
           ret.add((HRegion)region);
         }
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java
index 7640138..35d7eb1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java
@@ -33,7 +33,9 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.regionserver.CompactingMemStore;
 import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;
 import org.apache.hadoop.hbase.regionserver.HRegion;
@@ -98,7 +100,7 @@ public class TestIOFencing {
     @SuppressWarnings("deprecation")
     public CompactionBlockerRegion(Path tableDir, WAL log,
         FileSystem fs, Configuration confParam, HRegionInfo info,
-        HTableDescriptor htd, RegionServerServices rsServices) {
+        TableDescriptor htd, RegionServerServices rsServices) {
       super(tableDir, log, fs, confParam, info, htd, rsServices);
     }
 
@@ -158,7 +160,7 @@ public class TestIOFencing {
 
     public BlockCompactionsInPrepRegion(Path tableDir, WAL log,
         FileSystem fs, Configuration confParam, HRegionInfo info,
-        HTableDescriptor htd, RegionServerServices rsServices) {
+        TableDescriptor htd, RegionServerServices rsServices) {
       super(tableDir, log, fs, confParam, info, htd, rsServices);
     }
     @Override
@@ -181,18 +183,18 @@ public class TestIOFencing {
   public static class BlockCompactionsInCompletionRegion extends 
CompactionBlockerRegion {
     public BlockCompactionsInCompletionRegion(Path tableDir, WAL log,
         FileSystem fs, Configuration confParam, HRegionInfo info,
-        HTableDescriptor htd, RegionServerServices rsServices) {
+        TableDescriptor htd, RegionServerServices rsServices) {
       super(tableDir, log, fs, confParam, info, htd, rsServices);
     }
     @Override
-    protected HStore instantiateHStore(final HColumnDescriptor family) throws 
IOException {
+    protected HStore instantiateHStore(final ColumnFamilyDescriptor family) 
throws IOException {
       return new BlockCompactionsInCompletionHStore(this, family, this.conf);
     }
   }
 
   public static class BlockCompactionsInCompletionHStore extends HStore {
     CompactionBlockerRegion r;
-    protected BlockCompactionsInCompletionHStore(HRegion region, 
HColumnDescriptor family,
+    protected BlockCompactionsInCompletionHStore(HRegion region, 
ColumnFamilyDescriptor family,
         Configuration confParam) throws IOException {
       super(region, family, confParam);
       r = (CompactionBlockerRegion) region;

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java
index 1acb842..9ed63f7 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java
@@ -256,7 +256,7 @@ public class TestHFileArchiving {
     clearArchiveDirectory();
 
     // then get the current store files
-    byte[][]columns = region.getTableDesc().getFamiliesKeys().toArray(new 
byte[0][]);
+    byte[][]columns = 
region.getTableDescriptor().getColumnFamilyNames().toArray(new byte[0][]);
     List<String> storeFiles = region.getStoreFileList(columns);
 
     // then delete the table so the hfiles get archived
@@ -335,7 +335,7 @@ public class TestHFileArchiving {
     clearArchiveDirectory();
 
     // then get the current store files
-    byte[][]columns = region.getTableDesc().getFamiliesKeys().toArray(new 
byte[0][]);
+    byte[][]columns = 
region.getTableDescriptor().getColumnFamilyNames().toArray(new byte[0][]);
     List<String> storeFiles = region.getStoreFileList(columns);
 
     // then delete the table so the hfiles get archived

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackup.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackup.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackup.java
index 77bed43..48deabb 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackup.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackup.java
@@ -181,7 +181,7 @@ public class TestIncrementalBackup extends TestBackupBase {
       false, tablesRestoreIncMultiple, tablesMapIncMultiple, true));
 
     hTable = (HTable) conn.getTable(table1_restore);
-    LOG.debug("After incremental restore: " + hTable.getTableDescriptor());
+    LOG.debug("After incremental restore: " + hTable.getDescriptor());
     LOG.debug("f1 has " + TEST_UTIL.countRows(hTable, famName) + " rows");
     Assert.assertEquals(TEST_UTIL.countRows(hTable, famName), NB_ROWS_IN_BATCH 
+ ADD_ROWS);
     LOG.debug("f2 has " + TEST_UTIL.countRows(hTable, fam2Name) + " rows");

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
index c5681b1..8c7f87f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
@@ -420,8 +420,8 @@ public class TestAdmin1 {
     htd.addFamily(fam3);
     this.admin.createTable(htd);
     Table table = TEST_UTIL.getConnection().getTable(htd.getTableName());
-    HTableDescriptor confirmedHtd = table.getTableDescriptor();
-    assertEquals(htd.compareTo(confirmedHtd), 0);
+    TableDescriptor confirmedHtd = table.getDescriptor();
+    assertEquals(TableDescriptor.COMPARATOR.compare(htd, confirmedHtd), 0);
     MetaTableAccessor.fullScanMetaAndPrint(TEST_UTIL.getConnection());
     table.close();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.java
index e7d7f0a..62c8e7b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.java
@@ -270,7 +270,7 @@ public class TestAvoidCellReferencesIntoShippedBlocks {
         List<? extends KeyValueScanner> scanners, ScanType scanType, long 
earliestPutTs)
         throws IOException {
       Scan scan = new Scan();
-      scan.setMaxVersions(store.getFamily().getMaxVersions());
+      scan.setMaxVersions(store.getColumnFamilyDescriptor().getMaxVersions());
       return new CompactorStoreScanner(store, store.getScanInfo(), scan, 
scanners, scanType,
           store.getSmallestReadPoint(), earliestPutTs);
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java
index ffeb617..7a32e6a 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java
@@ -260,7 +260,7 @@ public class TestMetaCache {
                                   HBaseProtos.RegionSpecifier regionSpec) 
throws ServiceException {
       try {
         return TABLE_NAME.equals(
-            rpcServices.getRegion(regionSpec).getTableDesc().getTableName());
+            
rpcServices.getRegion(regionSpec).getTableDescriptor().getTableName());
       } catch (IOException ioe) {
         throw new ServiceException(ioe);
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java
index 148da52..437afaf 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java
@@ -302,11 +302,13 @@ public class TestReplicaWithCluster {
 
   @Test (timeout=120000)
   public void testChangeTable() throws Exception {
-    HTableDescriptor hdt = HTU.createTableDescriptor("testChangeTable");
-    hdt.setRegionReplication(NB_SERVERS);
-    hdt.addCoprocessor(SlowMeCopro.class.getName());
-    Table table = HTU.createTable(hdt, new byte[][]{f}, null);
-
+    TableDescriptor td = 
TableDescriptorBuilder.newBuilder(TableName.valueOf("testChangeTable"))
+            .setRegionReplication(NB_SERVERS)
+            .addCoprocessor(SlowMeCopro.class.getName())
+            
.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(f).build())
+            .build();
+    HTU.getAdmin().createTable(td);
+    Table table = HTU.getConnection().getTable(td.getTableName());
     // basic test: it should work.
     Put p = new Put(row);
     p.addColumn(f, row, row);
@@ -317,13 +319,14 @@ public class TestReplicaWithCluster {
     Assert.assertFalse(r.isStale());
 
     // Add a CF, it should work.
-    HTableDescriptor bHdt = 
HTU.getAdmin().getTableDescriptor(hdt.getTableName());
-    HColumnDescriptor hcd = new HColumnDescriptor(row);
-    hdt.addFamily(hcd);
-    HTU.getAdmin().disableTable(hdt.getTableName());
-    HTU.getAdmin().modifyTable(hdt.getTableName(), hdt);
-    HTU.getAdmin().enableTable(hdt.getTableName());
-    HTableDescriptor nHdt = 
HTU.getAdmin().getTableDescriptor(hdt.getTableName());
+    TableDescriptor bHdt = 
HTU.getAdmin().listTableDescriptor(td.getTableName());
+    td = TableDescriptorBuilder.newBuilder(td)
+            
.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(row).build())
+            .build();
+    HTU.getAdmin().disableTable(td.getTableName());
+    HTU.getAdmin().modifyTable(td);
+    HTU.getAdmin().enableTable(td.getTableName());
+    TableDescriptor nHdt = 
HTU.getAdmin().listTableDescriptor(td.getTableName());
     Assert.assertEquals("fams=" + Arrays.toString(nHdt.getColumnFamilies()),
         bHdt.getColumnFamilyCount() + 1, nHdt.getColumnFamilyCount());
 
@@ -347,12 +350,12 @@ public class TestReplicaWithCluster {
     }
 
     Admin admin = HTU.getAdmin();
-    nHdt =admin.getTableDescriptor(hdt.getTableName());
+    nHdt =admin.listTableDescriptor(td.getTableName());
     Assert.assertEquals("fams=" + Arrays.toString(nHdt.getColumnFamilies()),
         bHdt.getColumnFamilyCount() + 1, nHdt.getColumnFamilyCount());
 
-    admin.disableTable(hdt.getTableName());
-    admin.deleteTable(hdt.getTableName());
+    admin.disableTable(td.getTableName());
+    admin.deleteTable(td.getTableName());
     admin.close();
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java
index c508b02..d65e85b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java
@@ -348,7 +348,7 @@ public class SimpleRegionObserver implements RegionObserver 
{
     assertNotNull(e.getRegion());
     assertNotNull(get);
     assertNotNull(results);
-    if (e.getRegion().getTableDesc().getTableName().equals(
+    if (e.getRegion().getTableDescriptor().getTableName().equals(
         TestRegionObserverInterface.TEST_TABLE)) {
       boolean foundA = false;
       boolean foundB = false;
@@ -380,7 +380,7 @@ public class SimpleRegionObserver implements RegionObserver 
{
     assertNotNull(e);
     assertNotNull(e.getRegion());
     assertNotNull(familyMap);
-    if (e.getRegion().getTableDesc().getTableName().equals(
+    if (e.getRegion().getTableDescriptor().getTableName().equals(
         TestRegionObserverInterface.TEST_TABLE)) {
       List<Cell> cells = familyMap.get(TestRegionObserverInterface.A);
       assertNotNull(cells);
@@ -417,7 +417,7 @@ public class SimpleRegionObserver implements RegionObserver 
{
     assertNotNull(e.getRegion());
     assertNotNull(familyMap);
     List<Cell> cells = familyMap.get(TestRegionObserverInterface.A);
-    if (e.getRegion().getTableDesc().getTableName().equals(
+    if (e.getRegion().getTableDescriptor().getTableName().equals(
         TestRegionObserverInterface.TEST_TABLE)) {
       assertNotNull(cells);
       assertNotNull(cells.get(0));
@@ -615,7 +615,7 @@ public class SimpleRegionObserver implements RegionObserver 
{
     RegionCoprocessorEnvironment e = ctx.getEnvironment();
     assertNotNull(e);
     assertNotNull(e.getRegion());
-    if (e.getRegion().getTableDesc().getTableName().equals(
+    if (e.getRegion().getTableDescriptor().getTableName().equals(
         TestRegionObserverInterface.TEST_TABLE)) {
       assertNotNull(familyPaths);
       assertEquals(1,familyPaths.size());
@@ -634,7 +634,7 @@ public class SimpleRegionObserver implements RegionObserver 
{
     RegionCoprocessorEnvironment e = ctx.getEnvironment();
     assertNotNull(e);
     assertNotNull(e.getRegion());
-    if (e.getRegion().getTableDesc().getTableName().equals(
+    if (e.getRegion().getTableDescriptor().getTableName().equals(
         TestRegionObserverInterface.TEST_TABLE)) {
       assertNotNull(familyPaths);
       assertEquals(1,familyPaths.size());

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorConfiguration.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorConfiguration.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorConfiguration.java
index fb2f20c..1102cf8 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorConfiguration.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorConfiguration.java
@@ -99,7 +99,7 @@ public class TestCoprocessorConfiguration {
     Configuration conf = new Configuration(CONF);
     HRegion region = mock(HRegion.class);
     when(region.getRegionInfo()).thenReturn(REGIONINFO);
-    when(region.getTableDesc()).thenReturn(TABLEDESC);
+    when(region.getTableDescriptor()).thenReturn(TABLEDESC);
     RegionServerServices rsServices = mock(RegionServerServices.class);
     systemCoprocessorLoaded.set(false);
     tableCoprocessorLoaded.set(false);
@@ -141,7 +141,7 @@ public class TestCoprocessorConfiguration {
     conf.setBoolean(CoprocessorHost.COPROCESSORS_ENABLED_CONF_KEY, false);
     HRegion region = mock(HRegion.class);
     when(region.getRegionInfo()).thenReturn(REGIONINFO);
-    when(region.getTableDesc()).thenReturn(TABLEDESC);
+    when(region.getTableDescriptor()).thenReturn(TABLEDESC);
     RegionServerServices rsServices = mock(RegionServerServices.class);
     systemCoprocessorLoaded.set(false);
     tableCoprocessorLoaded.set(false);
@@ -159,7 +159,7 @@ public class TestCoprocessorConfiguration {
     conf.setBoolean(CoprocessorHost.USER_COPROCESSORS_ENABLED_CONF_KEY, false);
     HRegion region = mock(HRegion.class);
     when(region.getRegionInfo()).thenReturn(REGIONINFO);
-    when(region.getTableDesc()).thenReturn(TABLEDESC);
+    when(region.getTableDescriptor()).thenReturn(TABLEDESC);
     RegionServerServices rsServices = mock(RegionServerServices.class);
     systemCoprocessorLoaded.set(false);
     tableCoprocessorLoaded.set(false);

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
index b99087d..26cfed7 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
@@ -23,7 +23,6 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNull;
 
 import java.io.IOException;
-import java.util.Collections;
 import java.util.List;
 import java.util.NavigableSet;
 import java.util.concurrent.CountDownLatch;
@@ -46,6 +45,7 @@ import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.filter.FilterBase;
 import org.apache.hadoop.hbase.regionserver.ChunkCreator;
 import org.apache.hadoop.hbase.regionserver.HRegion;
@@ -228,7 +228,7 @@ public class TestRegionObserverScannerOpenHook {
     @SuppressWarnings("deprecation")
     public CompactionCompletionNotifyingRegion(Path tableDir, WAL log,
         FileSystem fs, Configuration confParam, HRegionInfo info,
-        HTableDescriptor htd, RegionServerServices rsServices) {
+        TableDescriptor htd, RegionServerServices rsServices) {
       super(tableDir, log, fs, confParam, info, htd, rsServices);
     }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
index 67add2f..b3859ff 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
@@ -52,6 +52,7 @@ import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.favored.FavoredNodeAssignmentHelper;
 import org.apache.hadoop.hbase.favored.FavoredNodeLoadBalancer;
 import org.apache.hadoop.hbase.favored.FavoredNodesPlan;
@@ -424,13 +425,12 @@ public class TestRegionPlacement {
         // All regions are supposed to have favored nodes,
         // except for hbase:meta and ROOT
         if (favoredServerList == null) {
-          HTableDescriptor desc = region.getTableDesc();
+          TableDescriptor desc = region.getTableDescriptor();
           // Verify they are ROOT and hbase:meta regions since no favored nodes
           assertNull(favoredSocketAddress);
           assertTrue("User region " +
-              region.getTableDesc().getTableName() +
-              " should have favored nodes",
-              (desc.isRootRegion() || desc.isMetaRegion()));
+              region.getTableDescriptor().getTableName() +
+              " should have favored nodes", desc.isMetaRegion());
         } else {
           // For user region, the favored nodes in the region server should be
           // identical to favored nodes in the assignmentPlan

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
index 8f88af7..d3f8295 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
@@ -69,6 +69,9 @@ import org.junit.experimental.categories.Category;
 import org.mockito.Mockito;
 
 import com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 
 /**
  * Test the master-related aspects of a snapshot
@@ -280,9 +283,11 @@ public class TestSnapshotFromMaster {
     // recreate test table with disabled compactions; otherwise compaction may 
happen before
     // snapshot, the call after snapshot will be a no-op and checks will fail
     UTIL.deleteTable(TABLE_NAME);
-    HTableDescriptor htd = new HTableDescriptor(TABLE_NAME);
-    htd.setCompactionEnabled(false);
-    UTIL.createTable(htd, new byte[][] { TEST_FAM }, null);
+    TableDescriptor td = TableDescriptorBuilder.newBuilder(TABLE_NAME)
+            
.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(TEST_FAM).build())
+            .setCompactionEnabled(false)
+            .build();
+    UTIL.getAdmin().createTable(td);
 
     // load the table
     for (int i = 0; i < blockingStoreFiles / 2; i ++) {
@@ -292,7 +297,6 @@ public class TestSnapshotFromMaster {
 
     // disable the table so we can take a snapshot
     admin.disableTable(TABLE_NAME);
-    htd.setCompactionEnabled(true);
 
     // take a snapshot of the table
     String snapshotName = "snapshot";
@@ -305,8 +309,11 @@ public class TestSnapshotFromMaster {
     // ensure we only have one snapshot
     SnapshotTestingUtils.assertOneSnapshotThatMatches(admin, 
snapshotNameBytes, TABLE_NAME);
 
+    td = TableDescriptorBuilder.newBuilder(td)
+            .setCompactionEnabled(true)
+            .build();
     // enable compactions now
-    admin.modifyTable(TABLE_NAME, htd);
+    admin.modifyTable(td);
 
     // renable the table so we can compact the regions
     admin.enableTable(TABLE_NAME);

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/NoOpScanPolicyObserver.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/NoOpScanPolicyObserver.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/NoOpScanPolicyObserver.java
index c47ed68..b090cdd 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/NoOpScanPolicyObserver.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/NoOpScanPolicyObserver.java
@@ -45,7 +45,7 @@ public class NoOpScanPolicyObserver implements RegionObserver 
{
   public InternalScanner preFlushScannerOpen(final 
ObserverContext<RegionCoprocessorEnvironment> c,
       Store store, List<KeyValueScanner> scanners, InternalScanner s) throws 
IOException {
     ScanInfo oldSI = store.getScanInfo();
-    ScanInfo scanInfo = new ScanInfo(oldSI.getConfiguration(), 
store.getFamily(), oldSI.getTtl(),
+    ScanInfo scanInfo = new ScanInfo(oldSI.getConfiguration(), 
store.getColumnFamilyDescriptor(), oldSI.getTtl(),
         oldSI.getTimeToPurgeDeletes(), oldSI.getComparator());
     Scan scan = new Scan();
     scan.setMaxVersions(oldSI.getMaxVersions());
@@ -62,7 +62,7 @@ public class NoOpScanPolicyObserver implements RegionObserver 
{
       InternalScanner s) throws IOException {
     // this demonstrates how to override the scanners default behavior
     ScanInfo oldSI = store.getScanInfo();
-    ScanInfo scanInfo = new ScanInfo(oldSI.getConfiguration(), 
store.getFamily(), oldSI.getTtl(),
+    ScanInfo scanInfo = new ScanInfo(oldSI.getConfiguration(), 
store.getColumnFamilyDescriptor(), oldSI.getTtl(),
         oldSI.getTimeToPurgeDeletes(), oldSI.getComparator());
     Scan scan = new Scan();
     scan.setMaxVersions(oldSI.getMaxVersions());

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java
index 9b96ff2..6a6fdb0 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java
@@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.client.Row;
 import org.apache.hadoop.hbase.client.RowMutations;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.client.coprocessor.Batch.Call;
 import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback;
 import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
@@ -73,7 +74,7 @@ public class RegionAsTable implements Table {
 
   @Override
   public TableName getName() {
-    return this.region.getTableDesc().getTableName();
+    return this.region.getTableDescriptor().getTableName();
   }
 
   @Override
@@ -83,7 +84,12 @@ public class RegionAsTable implements Table {
 
   @Override
   public HTableDescriptor getTableDescriptor() throws IOException {
-    return this.region.getTableDesc();
+    return new HTableDescriptor(this.region.getTableDescriptor());
+  }
+
+  @Override
+  public TableDescriptor getDescriptor() throws IOException {
+    return this.region.getTableDescriptor();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
index ef3ce06..1cef625 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
@@ -59,6 +59,7 @@ import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.RowMutations;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.filter.BinaryComparator;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.io.HeapSize;
@@ -660,7 +661,7 @@ public class TestAtomicOperation {
   public static class MockHRegion extends HRegion {
 
     public MockHRegion(Path tableDir, WAL log, FileSystem fs, Configuration 
conf,
-        final HRegionInfo regionInfo, final HTableDescriptor htd, 
RegionServerServices rsServices) {
+        final HRegionInfo regionInfo, final TableDescriptor htd, 
RegionServerServices rsServices) {
       super(tableDir, log, fs, conf, regionInfo, htd, rsServices);
     }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
index 384608c..26172f5 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
@@ -291,7 +291,7 @@ public class TestCompoundBloomFilter {
     Store store = mock(Store.class);
     HColumnDescriptor hcd = mock(HColumnDescriptor.class);
     
when(hcd.getName()).thenReturn(Bytes.toBytes(RandomKeyValueUtil.COLUMN_FAMILY_NAME));
-    when(store.getFamily()).thenReturn(hcd);
+    when(store.getColumnFamilyDescriptor()).thenReturn(hcd);
     return scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java
index 2eb88f4..2cea121 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java
@@ -219,7 +219,7 @@ public class TestHMobStore {
 
     Scan scan = new Scan(get);
     InternalScanner scanner = (InternalScanner) store.getScanner(scan,
-        scan.getFamilyMap().get(store.getFamily().getName()),
+        scan.getFamilyMap().get(store.getColumnFamilyDescriptor().getName()),
         0);
 
     List<Cell> results = new ArrayList<>();
@@ -264,7 +264,7 @@ public class TestHMobStore {
 
     Scan scan = new Scan(get);
     InternalScanner scanner = (InternalScanner) store.getScanner(scan,
-        scan.getFamilyMap().get(store.getFamily().getName()),
+        scan.getFamilyMap().get(store.getColumnFamilyDescriptor().getName()),
         0);
 
     List<Cell> results = new ArrayList<>();
@@ -309,7 +309,7 @@ public class TestHMobStore {
     Scan scan = new Scan(get);
     scan.setAttribute(MobConstants.MOB_SCAN_RAW, Bytes.toBytes(Boolean.TRUE));
     InternalScanner scanner = (InternalScanner) store.getScanner(scan,
-      scan.getFamilyMap().get(store.getFamily().getName()),
+      scan.getFamilyMap().get(store.getColumnFamilyDescriptor().getName()),
       0);
 
     List<Cell> results = new ArrayList<>();
@@ -354,7 +354,7 @@ public class TestHMobStore {
 
     Scan scan = new Scan(get);
     InternalScanner scanner = (InternalScanner) store.getScanner(scan,
-        scan.getFamilyMap().get(store.getFamily().getName()),
+        scan.getFamilyMap().get(store.getColumnFamilyDescriptor().getName()),
         0);
 
     List<Cell> results = new ArrayList<>();
@@ -406,7 +406,7 @@ public class TestHMobStore {
     Scan scan = new Scan(get);
     scan.setAttribute(MobConstants.MOB_SCAN_RAW, Bytes.toBytes(Boolean.TRUE));
     InternalScanner scanner = (InternalScanner) store.getScanner(scan,
-      scan.getFamilyMap().get(store.getFamily().getName()),
+      scan.getFamilyMap().get(store.getColumnFamilyDescriptor().getName()),
       0);
 
     List<Cell> results = new ArrayList<>();
@@ -421,7 +421,7 @@ public class TestHMobStore {
       //this is not mob reference cell.
       Assert.assertFalse(MobUtils.isMobReferenceCell(cell));
       Assert.assertEquals(expected.get(i), results.get(i));
-      Assert.assertEquals(100, store.getFamily().getMobThreshold());
+      Assert.assertEquals(100, 
store.getColumnFamilyDescriptor().getMobThreshold());
     }
   }
 
@@ -522,7 +522,7 @@ public class TestHMobStore {
     // Scan the values
     Scan scan = new Scan(get);
     InternalScanner scanner = (InternalScanner) store.getScanner(scan,
-        scan.getFamilyMap().get(store.getFamily().getName()),
+        scan.getFamilyMap().get(store.getColumnFamilyDescriptor().getName()),
         0);
 
     List<Cell> results = new ArrayList<>();

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
index 4f46c88..7c4e329 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
@@ -111,6 +111,7 @@ import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.RowMutations;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
 import org.apache.hadoop.hbase.filter.BinaryComparator;
 import org.apache.hadoop.hbase.filter.ColumnCountGetFilter;
@@ -812,7 +813,7 @@ public class TestHRegion {
       Path regiondir = region.getRegionFileSystem().getRegionDir();
       FileSystem fs = region.getRegionFileSystem().getFileSystem();
       byte[] regionName = region.getRegionInfo().getEncodedNameAsBytes();
-      byte[][] columns = region.getTableDesc().getFamiliesKeys().toArray(new 
byte[0][]);
+      byte[][] columns = 
region.getTableDescriptor().getColumnFamilyNames().toArray(new byte[0][]);
 
       assertEquals(0, region.getStoreFileList(columns).size());
 
@@ -945,7 +946,7 @@ public class TestHRegion {
       writer.close();
 
       // close the region now, and reopen again
-      region.getTableDesc();
+      region.getTableDescriptor();
       region.getRegionInfo();
       region.close();
       try {
@@ -4157,7 +4158,7 @@ public class TestHRegion {
       // use the static method to compute the value, it should be the same.
       // static method is used by load balancer or other components
       HDFSBlocksDistribution blocksDistribution2 = 
HRegion.computeHDFSBlocksDistribution(
-          htu.getConfiguration(), firstRegion.getTableDesc(), 
firstRegion.getRegionInfo());
+          htu.getConfiguration(), firstRegion.getTableDescriptor(), 
firstRegion.getRegionInfo());
       long uniqueBlocksWeight2 = 
blocksDistribution2.getUniqueBlocksTotalWeight();
 
       assertTrue(uniqueBlocksWeight1 == uniqueBlocksWeight2);
@@ -5757,7 +5758,7 @@ public class TestHRegion {
   static class HRegionWithSeqId extends HRegion {
     public HRegionWithSeqId(final Path tableDir, final WAL wal, final 
FileSystem fs,
         final Configuration confParam, final HRegionInfo regionInfo,
-        final HTableDescriptor htd, final RegionServerServices rsServices) {
+        final TableDescriptor htd, final RegionServerServices rsServices) {
       super(tableDir, wal, fs, confParam, regionInfo, htd, rsServices);
     }
     @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
index 5467c3f..a1a70e8 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
@@ -1134,7 +1134,7 @@ public class TestHRegionReplayEvents {
     secondaryRegion.replayWALFlushStartMarker(FlushDescriptor.newBuilder().
       setFlushSequenceNumber(10)
       .setTableName(UnsafeByteOperations.unsafeWrap(
-          primaryRegion.getTableDesc().getTableName().getName()))
+          primaryRegion.getTableDescriptor().getTableName().getName()))
       .setAction(FlushAction.START_FLUSH)
       .setEncodedRegionName(
           
UnsafeByteOperations.unsafeWrap(primaryRegion.getRegionInfo().getEncodedNameAsBytes()))
@@ -1542,7 +1542,7 @@ public class TestHRegionReplayEvents {
     // from primary and also deleted from the archive directory
     secondaryRegion.replayWALFlushCommitMarker(FlushDescriptor.newBuilder().
       setFlushSequenceNumber(Long.MAX_VALUE)
-      
.setTableName(UnsafeByteOperations.unsafeWrap(primaryRegion.getTableDesc().getTableName().getName()))
+      
.setTableName(UnsafeByteOperations.unsafeWrap(primaryRegion.getTableDescriptor().getTableName().getName()))
       .setAction(FlushAction.COMMIT_FLUSH)
       .setEncodedRegionName(
           
UnsafeByteOperations.unsafeWrap(primaryRegion.getRegionInfo().getEncodedNameAsBytes()))
@@ -1562,7 +1562,7 @@ public class TestHRegionReplayEvents {
     // from primary and also deleted from the archive directory
     secondaryRegion.replayWALCompactionMarker(CompactionDescriptor.newBuilder()
       .setTableName(UnsafeByteOperations.unsafeWrap(
-          primaryRegion.getTableDesc().getTableName().getName()))
+          primaryRegion.getTableDescriptor().getTableName().getName()))
       .setEncodedRegionName(
           
UnsafeByteOperations.unsafeWrap(primaryRegion.getRegionInfo().getEncodedNameAsBytes()))
       .setFamilyName(UnsafeByteOperations.unsafeWrap(families[0]))
@@ -1580,7 +1580,7 @@ public class TestHRegionReplayEvents {
     // from primary and also deleted from the archive directory
     
secondaryRegion.replayWALRegionEventMarker(RegionEventDescriptor.newBuilder()
       .setTableName(UnsafeByteOperations.unsafeWrap(
-          primaryRegion.getTableDesc().getTableName().getName()))
+          primaryRegion.getTableDescriptor().getTableName().getName()))
       .setEncodedRegionName(
           
UnsafeByteOperations.unsafeWrap(primaryRegion.getRegionInfo().getEncodedNameAsBytes()))
       
.setRegionName(UnsafeByteOperations.unsafeWrap(primaryRegion.getRegionInfo().getRegionName()))
@@ -1600,7 +1600,7 @@ public class TestHRegionReplayEvents {
     // tests replaying bulk load event marker, but the bulk load files have 
already been compacted
     // from primary and also deleted from the archive directory
     
secondaryRegion.replayWALBulkLoadEventMarker(BulkLoadDescriptor.newBuilder()
-      
.setTableName(ProtobufUtil.toProtoTableName(primaryRegion.getTableDesc().getTableName()))
+      
.setTableName(ProtobufUtil.toProtoTableName(primaryRegion.getTableDescriptor().getTableName()))
       .setEncodedRegionName(
           
UnsafeByteOperations.unsafeWrap(primaryRegion.getRegionInfo().getEncodedNameAsBytes()))
       .setBulkloadSeqNum(Long.MAX_VALUE)

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
index 7070a80..6cbe23c 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
@@ -210,7 +210,7 @@ public class TestHStoreFile extends HBaseTestCase {
     HColumnDescriptor hcd = mock(HColumnDescriptor.class);
     byte[] cf = Bytes.toBytes("ty");
     when(hcd.getName()).thenReturn(cf);
-    when(store.getFamily()).thenReturn(hcd);
+    when(store.getColumnFamilyDescriptor()).thenReturn(hcd);
     StoreFileScanner scanner =
         new StoreFileScanner(reader, mock(HFileScanner.class), false, false, 
0, 0, true);
     Scan scan = new Scan();
@@ -530,7 +530,7 @@ public class TestHStoreFile extends HBaseTestCase {
       Store store = mock(Store.class);
       HColumnDescriptor hcd = mock(HColumnDescriptor.class);
       when(hcd.getName()).thenReturn(Bytes.toBytes("family"));
-      when(store.getFamily()).thenReturn(hcd);
+      when(store.getColumnFamilyDescriptor()).thenReturn(hcd);
       boolean exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
       if (i % 2 == 0) {
         if (!exists) falseNeg++;
@@ -717,7 +717,7 @@ public class TestHStoreFile extends HBaseTestCase {
       Store store = mock(Store.class);
       HColumnDescriptor hcd = mock(HColumnDescriptor.class);
       when(hcd.getName()).thenReturn(Bytes.toBytes("family"));
-      when(store.getFamily()).thenReturn(hcd);
+      when(store.getColumnFamilyDescriptor()).thenReturn(hcd);
       // check false positives rate
       int falsePos = 0;
       int falseNeg = 0;
@@ -861,7 +861,7 @@ public class TestHStoreFile extends HBaseTestCase {
     Store store = mock(Store.class);
     HColumnDescriptor hcd = mock(HColumnDescriptor.class);
     when(hcd.getName()).thenReturn(family);
-    when(store.getFamily()).thenReturn(hcd);
+    when(store.getColumnFamilyDescriptor()).thenReturn(hcd);
     hsf.initReader();
     StoreFileReader reader = hsf.getReader();
     StoreFileScanner scanner = getStoreFileScanner(reader, false, false);

Reply via email to