Repository: hbase
Updated Branches:
  refs/heads/branch-2 b24e33312 -> 205016ca7


http://git-wip-us.apache.org/repos/asf/hbase/blob/205016ca/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index 18b1114..28d2a24 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -71,6 +71,7 @@ import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.ImmutableHTableDescriptor;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Result;
@@ -466,10 +467,20 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
 
   /**
    * @return META table descriptor
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #getMetaDescriptor()}
    */
+  @Deprecated
   public HTableDescriptor getMetaTableDescriptor() {
+    return new 
ImmutableHTableDescriptor(getMetaTableDescriptorBuilder().build());
+  }
+
+  /**
+   * @return META table descriptor
+   */
+  public TableDescriptorBuilder getMetaTableDescriptorBuilder() {
     try {
-      return new FSTableDescriptors(conf).get(TableName.META_TABLE_NAME);
+      return FSTableDescriptors.createMetaTableDescriptorBuilder(conf);
     } catch (IOException e) {
       throw new RuntimeException("Unable to create META table descriptor", e);
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/205016ca/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFSTableDescriptorForceCreation.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFSTableDescriptorForceCreation.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFSTableDescriptorForceCreation.java
index 7457f43..95997f2 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFSTableDescriptorForceCreation.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFSTableDescriptorForceCreation.java
@@ -25,10 +25,13 @@ import java.io.IOException;
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.util.FSTableDescriptors;
-import org.junit.*;
+import org.junit.Rule;
+import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.rules.TestName;
 
@@ -46,9 +49,9 @@ public class TestFSTableDescriptorForceCreation {
     FileSystem fs = FileSystem.get(UTIL.getConfiguration());
     Path rootdir = new Path(UTIL.getDataTestDir(), name);
     FSTableDescriptors fstd = new FSTableDescriptors(UTIL.getConfiguration(), 
fs, rootdir);
-    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name));
 
-    assertTrue("Should create new table descriptor", 
fstd.createTableDescriptor(htd, false));
+    assertTrue("Should create new table descriptor",
+      
fstd.createTableDescriptor(TableDescriptorBuilder.newBuilder(TableName.valueOf(name)).build(),
 false));
   }
 
   @Test
@@ -59,7 +62,7 @@ public class TestFSTableDescriptorForceCreation {
     // Cleanup old tests if any detritus laying around.
     Path rootdir = new Path(UTIL.getDataTestDir(), name);
     FSTableDescriptors fstd = new FSTableDescriptors(UTIL.getConfiguration(), 
fs, rootdir);
-    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name));
+    TableDescriptor htd = 
TableDescriptorBuilder.newBuilder(TableName.valueOf(name)).build();
     fstd.add(htd);
     assertFalse("Should not create new table descriptor", 
fstd.createTableDescriptor(htd, false));
   }
@@ -71,7 +74,7 @@ public class TestFSTableDescriptorForceCreation {
     FileSystem fs = FileSystem.get(UTIL.getConfiguration());
     Path rootdir = new Path(UTIL.getDataTestDir(), name);
     FSTableDescriptors fstd = new FSTableDescriptors(UTIL.getConfiguration(), 
fs, rootdir);
-    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name));
+    TableDescriptor htd = 
TableDescriptorBuilder.newBuilder(TableName.valueOf(name)).build();
     fstd.createTableDescriptor(htd, false);
     assertTrue("Should create new table descriptor",
         fstd.createTableDescriptor(htd, true));

http://git-wip-us.apache.org/repos/asf/hbase/blob/205016ca/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptorDefaultVersions.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptorDefaultVersions.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptorDefaultVersions.java
index 20cf8bb..d85326f 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptorDefaultVersions.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptorDefaultVersions.java
@@ -24,6 +24,8 @@ import java.io.IOException;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.master.MasterFileSystem;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
@@ -152,22 +154,22 @@ public class TestHColumnDescriptorDefaultVersions {
     Admin admin = TEST_UTIL.getAdmin();
 
     // Verify descriptor from master
-    HTableDescriptor htd = admin.getTableDescriptor(tableName);
-    HColumnDescriptor[] hcds = htd.getColumnFamilies();
+    TableDescriptor htd = admin.listTableDescriptor(tableName);
+    ColumnFamilyDescriptor[] hcds = htd.getColumnFamilies();
     verifyHColumnDescriptor(expected, hcds, tableName, families);
 
     // Verify descriptor from HDFS
     MasterFileSystem mfs = 
TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterFileSystem();
     Path tableDir = FSUtils.getTableDir(mfs.getRootDir(), tableName);
-    HTableDescriptor td = 
FSTableDescriptors.getTableDescriptorFromFs(mfs.getFileSystem(), tableDir);
+    TableDescriptor td = 
FSTableDescriptors.getTableDescriptorFromFs(mfs.getFileSystem(), tableDir);
     hcds = td.getColumnFamilies();
     verifyHColumnDescriptor(expected, hcds, tableName, families);
   }
 
-  private void verifyHColumnDescriptor(int expected, final HColumnDescriptor[] 
hcds,
+  private void verifyHColumnDescriptor(int expected, final 
ColumnFamilyDescriptor[] hcds,
       final TableName tableName,
       final byte[]... families) {
-    for (HColumnDescriptor hcd : hcds) {
+    for (ColumnFamilyDescriptor hcd : hcds) {
       assertEquals(expected, hcd.getMaxVersions());
     }
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/205016ca/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
index d17c782..121647e 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
@@ -38,14 +38,11 @@ import java.util.regex.Pattern;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
-import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.TableNotEnabledException;
 import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder.ModifyableTableDescriptor;
 import org.apache.hadoop.hbase.master.LoadBalancer;
 import org.apache.hadoop.hbase.master.MasterFileSystem;
@@ -751,7 +748,7 @@ public class TestAsyncTableAdminApi extends 
TestAsyncAdminBase {
     // Verify descriptor from HDFS
     MasterFileSystem mfs = 
TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterFileSystem();
     Path tableDir = FSUtils.getTableDir(mfs.getRootDir(), tableName);
-    HTableDescriptor td =
+    TableDescriptor td =
         FSTableDescriptors.getTableDescriptorFromFs(mfs.getFileSystem(), 
tableDir);
     verifyTableDescriptor(td, tableName, families);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/205016ca/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java
index 27c9a5f..9f4ce35 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java
@@ -23,15 +23,14 @@ import java.util.List;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.ChoreService;
 import org.apache.hadoop.hbase.CoordinatedStateManager;
-import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.ProcedureInfo;
 import org.apache.hadoop.hbase.Server;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableDescriptors;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.ClusterConnection;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.MasterSwitchType;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.executor.ExecutorService;
@@ -75,7 +74,7 @@ public class MockNoopMasterServices implements 
MasterServices, Server {
 
   @Override
   public long createTable(
-      final HTableDescriptor desc,
+      final TableDescriptor desc,
       final byte[][] splitKeys,
       final long nonceGroup,
       final long nonce) throws IOException {
@@ -84,7 +83,7 @@ public class MockNoopMasterServices implements 
MasterServices, Server {
   }
 
   @Override
-  public long createSystemTable(final HTableDescriptor hTableDescriptor) 
throws IOException {
+  public long createSystemTable(final TableDescriptor tableDescriptor) throws 
IOException {
     return -1;
   }
 
@@ -267,7 +266,7 @@ public class MockNoopMasterServices implements 
MasterServices, Server {
   @Override
   public long modifyTable(
       final TableName tableName,
-      final HTableDescriptor descriptor,
+      final TableDescriptor descriptor,
       final long nonceGroup,
       final long nonce) throws IOException {
     return -1;
@@ -290,13 +289,13 @@ public class MockNoopMasterServices implements 
MasterServices, Server {
   }
 
   @Override
-  public long addColumn(final TableName tableName, final HColumnDescriptor 
columnDescriptor,
+  public long addColumn(final TableName tableName, final 
ColumnFamilyDescriptor columnDescriptor,
       final long nonceGroup, final long nonce) throws IOException {
     return -1;
   }
 
   @Override
-  public long modifyColumn(final TableName tableName, final HColumnDescriptor 
descriptor,
+  public long modifyColumn(final TableName tableName, final 
ColumnFamilyDescriptor descriptor,
       final long nonceGroup, final long nonce) throws IOException {
     return -1;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/205016ca/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
index 48386a6..9101d5e 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
@@ -24,19 +24,19 @@ import java.util.NavigableMap;
 import java.util.SortedSet;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.CoordinatedStateManager;
-import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.ServerLoad;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableDescriptors;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.YouAreDeadException;
 import org.apache.hadoop.hbase.client.ClusterConnection;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
 import org.apache.hadoop.hbase.client.HConnectionTestingUtility;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 import org.apache.hadoop.hbase.master.LoadBalancer;
 import org.apache.hadoop.hbase.master.MasterFileSystem;
 import org.apache.hadoop.hbase.master.MasterServices;
@@ -300,36 +300,36 @@ public class MockMasterServices extends 
MockNoopMasterServices {
   public TableDescriptors getTableDescriptors() {
     return new TableDescriptors() {
       @Override
-      public HTableDescriptor remove(TableName tablename) throws IOException {
+      public TableDescriptor remove(TableName tablename) throws IOException {
         // noop
         return null;
       }
 
       @Override
-      public Map<String, HTableDescriptor> getAll() throws IOException {
+      public Map<String, TableDescriptor> getAll() throws IOException {
         // noop
         return null;
       }
 
-      @Override public Map<String, HTableDescriptor> getAllDescriptors() 
throws IOException {
+      @Override public Map<String, TableDescriptor> getAllDescriptors() throws 
IOException {
         // noop
         return null;
       }
 
       @Override
-      public HTableDescriptor get(TableName tablename) throws IOException {
-        HTableDescriptor htd = new HTableDescriptor(tablename);
-        htd.addFamily(new HColumnDescriptor(DEFAULT_COLUMN_FAMILY_NAME));
-        return htd;
+      public TableDescriptor get(TableName tablename) throws IOException {
+        TableDescriptorBuilder builder = 
TableDescriptorBuilder.newBuilder(tablename);
+        
builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(DEFAULT_COLUMN_FAMILY_NAME));
+        return builder.build();
       }
 
       @Override
-      public Map<String, HTableDescriptor> getByNamespace(String name) throws 
IOException {
+      public Map<String, TableDescriptor> getByNamespace(String name) throws 
IOException {
         return null;
       }
 
       @Override
-      public void add(HTableDescriptor htd) throws IOException {
+      public void add(TableDescriptor htd) throws IOException {
         // noop
       }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/205016ca/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java
index 6dfcad1..226f9f1 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java
@@ -33,20 +33,23 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
-import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MetaTableAccessor;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.RegionLocations;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.BufferedMutator;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 import org.apache.hadoop.hbase.client.TableState;
 import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.master.MasterMetaBootstrap;
@@ -61,6 +64,7 @@ import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.MD5Hash;
 import org.apache.hadoop.hbase.util.ModifyRegionUtils;
 
+@InterfaceAudience.Private
 public class MasterProcedureTestingUtility {
   private static final Log LOG = 
LogFactory.getLog(MasterProcedureTestingUtility.class);
 
@@ -136,17 +140,17 @@ public class MasterProcedureTestingUtility {
   // ==========================================================================
   //  Table Helpers
   // ==========================================================================
-  public static HTableDescriptor createHTD(final TableName tableName, final 
String... family) {
-    HTableDescriptor htd = new HTableDescriptor(tableName);
+  public static TableDescriptor createHTD(final TableName tableName, final 
String... family) {
+    TableDescriptorBuilder builder = 
TableDescriptorBuilder.newBuilder(tableName);
     for (int i = 0; i < family.length; ++i) {
-      htd.addFamily(new HColumnDescriptor(family[i]));
+      builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(family[i]));
     }
-    return htd;
+    return builder.build();
   }
 
   public static HRegionInfo[] createTable(final 
ProcedureExecutor<MasterProcedureEnv> procExec,
       final TableName tableName, final byte[][] splitKeys, String... family) 
throws IOException {
-    HTableDescriptor htd = createHTD(tableName, family);
+    TableDescriptor htd = createHTD(tableName, family);
     HRegionInfo[] regions = ModifyRegionUtils.createHRegionInfos(htd, 
splitKeys);
     long procId = ProcedureTestingUtility.submitAndWait(procExec,
       new CreateTableProcedure(procExec.getEnvironment(), htd, regions));
@@ -194,12 +198,12 @@ public class MasterProcedureTestingUtility {
     assertEquals(regions.length, countMetaRegions(master, tableName));
 
     // check htd
-    HTableDescriptor htd = master.getTableDescriptors().get(tableName);
+    TableDescriptor htd = master.getTableDescriptors().get(tableName);
     assertTrue("table descriptor not found", htd != null);
     for (int i = 0; i < family.length; ++i) {
-      assertTrue("family not found " + family[i], 
htd.getFamily(Bytes.toBytes(family[i])) != null);
+      assertTrue("family not found " + family[i], 
htd.getColumnFamily(Bytes.toBytes(family[i])) != null);
     }
-    assertEquals(family.length, htd.getFamilies().size());
+    assertEquals(family.length, htd.getColumnFamilyCount());
   }
 
   public static void validateTableDeletion(
@@ -267,18 +271,18 @@ public class MasterProcedureTestingUtility {
 
   public static void validateColumnFamilyAddition(final HMaster master, final 
TableName tableName,
       final String family) throws IOException {
-    HTableDescriptor htd = master.getTableDescriptors().get(tableName);
+    TableDescriptor htd = master.getTableDescriptors().get(tableName);
     assertTrue(htd != null);
 
-    assertTrue(htd.hasFamily(family.getBytes()));
+    assertTrue(htd.hasColumnFamily(family.getBytes()));
   }
 
   public static void validateColumnFamilyDeletion(final HMaster master, final 
TableName tableName,
       final String family) throws IOException {
     // verify htd
-    HTableDescriptor htd = master.getTableDescriptors().get(tableName);
+    TableDescriptor htd = master.getTableDescriptors().get(tableName);
     assertTrue(htd != null);
-    assertFalse(htd.hasFamily(family.getBytes()));
+    assertFalse(htd.hasColumnFamily(family.getBytes()));
 
     // verify fs
     final FileSystem fs = master.getMasterFileSystem().getFileSystem();
@@ -290,13 +294,13 @@ public class MasterProcedureTestingUtility {
   }
 
   public static void validateColumnFamilyModification(final HMaster master,
-      final TableName tableName, final String family, HColumnDescriptor 
columnDescriptor)
+      final TableName tableName, final String family, ColumnFamilyDescriptor 
columnDescriptor)
       throws IOException {
-    HTableDescriptor htd = master.getTableDescriptors().get(tableName);
+    TableDescriptor htd = master.getTableDescriptors().get(tableName);
     assertTrue(htd != null);
 
-    HColumnDescriptor hcfd = htd.getFamily(family.getBytes());
-    assertTrue(hcfd.equals(columnDescriptor));
+    ColumnFamilyDescriptor hcfd = htd.getColumnFamily(family.getBytes());
+    assertEquals(0, ColumnFamilyDescriptor.COMPARATOR.compare(hcfd, 
columnDescriptor));
   }
 
   public static void loadData(final Connection connection, final TableName 
tableName,

http://git-wip-us.apache.org/repos/asf/hbase/blob/205016ca/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure.java
index 177d862..eda7fcd 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure.java
@@ -22,9 +22,11 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableExistsException;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 import org.apache.hadoop.hbase.procedure2.Procedure;
 import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
 import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
@@ -76,10 +78,11 @@ public class TestCreateTableProcedure extends 
TestTableDDLProcedureBase {
     final ProcedureExecutor<MasterProcedureEnv> procExec = 
getMasterProcedureExecutor();
     final TableName tableName = TableName.valueOf(name.getMethodName());
     // create table with 0 families will fail
-    final HTableDescriptor htd = 
MasterProcedureTestingUtility.createHTD(tableName);
+    final TableDescriptorBuilder builder = 
TableDescriptorBuilder.newBuilder(MasterProcedureTestingUtility.createHTD(tableName));
 
     // disable sanity check
-    htd.setConfiguration("hbase.table.sanity.checks", 
Boolean.FALSE.toString());
+    builder.setConfiguration("hbase.table.sanity.checks", 
Boolean.FALSE.toString());
+    TableDescriptor htd = builder.build();
     final HRegionInfo[] regions = ModifyRegionUtils.createHRegionInfos(htd, 
null);
 
     long procId =
@@ -96,7 +99,7 @@ public class TestCreateTableProcedure extends 
TestTableDDLProcedureBase {
   public void testCreateExisting() throws Exception {
     final TableName tableName = TableName.valueOf(name.getMethodName());
     final ProcedureExecutor<MasterProcedureEnv> procExec = 
getMasterProcedureExecutor();
-    final HTableDescriptor htd = 
MasterProcedureTestingUtility.createHTD(tableName, "f");
+    final TableDescriptor htd = 
MasterProcedureTestingUtility.createHTD(tableName, "f");
     final HRegionInfo[] regions = ModifyRegionUtils.createHRegionInfos(htd, 
null);
 
     // create the table
@@ -125,7 +128,7 @@ public class TestCreateTableProcedure extends 
TestTableDDLProcedureBase {
 
     // Start the Create procedure && kill the executor
     byte[][] splitKeys = null;
-    HTableDescriptor htd = MasterProcedureTestingUtility.createHTD(tableName, 
"f1", "f2");
+    TableDescriptor htd = MasterProcedureTestingUtility.createHTD(tableName, 
"f1", "f2");
     HRegionInfo[] regions = ModifyRegionUtils.createHRegionInfos(htd, 
splitKeys);
     long procId = procExec.submitProcedure(
       new CreateTableProcedure(procExec.getEnvironment(), htd, regions));
@@ -138,18 +141,21 @@ public class TestCreateTableProcedure extends 
TestTableDDLProcedureBase {
   @Test(timeout=90000)
   public void testRollbackAndDoubleExecution() throws Exception {
     final TableName tableName = TableName.valueOf(name.getMethodName());
-    
testRollbackAndDoubleExecution(MasterProcedureTestingUtility.createHTD(tableName,
 F1, F2));
+    
testRollbackAndDoubleExecution(TableDescriptorBuilder.newBuilder(MasterProcedureTestingUtility.createHTD(tableName,
 F1, F2)));
   }
 
   @Test(timeout=90000)
   public void testRollbackAndDoubleExecutionOnMobTable() throws Exception {
     final TableName tableName = TableName.valueOf(name.getMethodName());
-    HTableDescriptor htd = MasterProcedureTestingUtility.createHTD(tableName, 
F1, F2);
-    htd.getFamily(Bytes.toBytes(F1)).setMobEnabled(true);
-    testRollbackAndDoubleExecution(htd);
+    TableDescriptor htd = MasterProcedureTestingUtility.createHTD(tableName, 
F1, F2);
+    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(htd)
+            
.modifyColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(htd.getColumnFamily(Bytes.toBytes(F1)))
+                    .setMobEnabled(true)
+                    .build());
+    testRollbackAndDoubleExecution(builder);
   }
 
-  private void testRollbackAndDoubleExecution(HTableDescriptor htd) throws 
Exception {
+  private void testRollbackAndDoubleExecution(TableDescriptorBuilder builder) 
throws Exception {
     // create the table
     final ProcedureExecutor<MasterProcedureEnv> procExec = 
getMasterProcedureExecutor();
     ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, true);
@@ -158,7 +164,8 @@ public class TestCreateTableProcedure extends 
TestTableDDLProcedureBase {
     final byte[][] splitKeys = new byte[][] {
       Bytes.toBytes("a"), Bytes.toBytes("b"), Bytes.toBytes("c")
     };
-    htd.setRegionReplication(3);
+    builder.setRegionReplication(3);
+    TableDescriptor htd = builder.build();
     HRegionInfo[] regions = ModifyRegionUtils.createHRegionInfos(htd, 
splitKeys);
     long procId = procExec.submitProcedure(
       new CreateTableProcedure(procExec.getEnvironment(), htd, regions));
@@ -181,9 +188,9 @@ public class TestCreateTableProcedure extends 
TestTableDDLProcedureBase {
       splitKeys[i] = Bytes.toBytes(String.format("%08d", i));
     }
 
-    final HTableDescriptor htd = MasterProcedureTestingUtility.createHTD(
+    final TableDescriptor htd = MasterProcedureTestingUtility.createHTD(
       TableName.valueOf("TestMRegions"), F1, F2);
-    UTIL.getHBaseAdmin().createTableAsync(htd, splitKeys)
+    UTIL.getAdmin().createTableAsync(htd, splitKeys)
       .get(10, java.util.concurrent.TimeUnit.HOURS);
     LOG.info("TABLE CREATED");
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/205016ca/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java
index d2df2bf..db5eafa 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java
@@ -27,8 +27,8 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.CategoryBasedTimeout;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.procedure2.Procedure;
 import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
 import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
@@ -112,7 +112,7 @@ public class TestMasterFailoverWithProcedures {
 
     // Start the Create procedure && kill the executor
     byte[][] splitKeys = null;
-    HTableDescriptor htd = MasterProcedureTestingUtility.createHTD(tableName, 
"f1", "f2");
+    TableDescriptor htd = MasterProcedureTestingUtility.createHTD(tableName, 
"f1", "f2");
     HRegionInfo[] regions = ModifyRegionUtils.createHRegionInfos(htd, 
splitKeys);
     long procId = procExec.submitProcedure(
         new CreateTableProcedure(procExec.getEnvironment(), htd, regions));

http://git-wip-us.apache.org/repos/asf/hbase/blob/205016ca/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureWalLease.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureWalLease.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureWalLease.java
index a75cbc1..68013fb 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureWalLease.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureWalLease.java
@@ -31,8 +31,8 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.CategoryBasedTimeout;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.procedure2.Procedure;
 import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
@@ -135,7 +135,7 @@ public class TestMasterProcedureWalLease {
     backupStore3.recoverLease();
 
     // Try to trigger a command on the master (WAL lease expired on the active 
one)
-    HTableDescriptor htd = 
MasterProcedureTestingUtility.createHTD(TableName.valueOf(name.getMethodName()),
 "f");
+    TableDescriptor htd = 
MasterProcedureTestingUtility.createHTD(TableName.valueOf(name.getMethodName()),
 "f");
     HRegionInfo[] regions = ModifyRegionUtils.createHRegionInfos(htd, null);
     LOG.debug("submit proc");
     try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/205016ca/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTableDescriptorModificationFromClient.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTableDescriptorModificationFromClient.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTableDescriptorModificationFromClient.java
index 77e1fc9..9d60bd8 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTableDescriptorModificationFromClient.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTableDescriptorModificationFromClient.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.InvalidFamilyOperationException;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.master.MasterFileSystem;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -274,14 +275,14 @@ public class TestTableDescriptorModificationFromClient {
     // Verify descriptor from HDFS
     MasterFileSystem mfs = 
TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterFileSystem();
     Path tableDir = FSUtils.getTableDir(mfs.getRootDir(), tableName);
-    HTableDescriptor td =
+    TableDescriptor td =
         FSTableDescriptors.getTableDescriptorFromFs(mfs.getFileSystem(), 
tableDir);
     verifyTableDescriptor(td, tableName, families);
   }
 
-  private void verifyTableDescriptor(final HTableDescriptor htd,
+  private void verifyTableDescriptor(final TableDescriptor htd,
       final TableName tableName, final byte[]... families) {
-    Set<byte[]> htdFamilies = htd.getFamiliesKeys();
+    Set<byte[]> htdFamilies = htd.getColumnFamilyNames();
     assertEquals(tableName, htd.getTableName());
     assertEquals(families.length, htdFamilies.size());
     for (byte[] familyName: families) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/205016ca/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java
index f93ce98..2fe8085 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java
@@ -54,6 +54,7 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.Type;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.MobCompactPartitionPolicy;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
@@ -514,7 +515,7 @@ public class TestPartitionedMobCompactor {
     CacheConfig cacheConfig = null;
 
     MyPartitionedMobCompactor(Configuration conf, FileSystem fs, TableName 
tableName,
-        HColumnDescriptor column, ExecutorService pool, final int 
delPartitionSize,
+        ColumnFamilyDescriptor column, ExecutorService pool, final int 
delPartitionSize,
         final CacheConfig cacheConf, final int PartitionsIncludeDelFiles)
         throws IOException {
       super(conf, fs, tableName, column, pool);

http://git-wip-us.apache.org/repos/asf/hbase/blob/205016ca/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java
index 570d2d8..6b01256 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java
@@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -79,10 +80,11 @@ public class TestGetClosestAtOrBefore  {
     FileSystem filesystem = FileSystem.get(conf);
     Path rootdir = UTIL.getDataTestDirOnTestFS();
     // Up flush size else we bind up when we use default catalog flush of 16k.
-    UTIL.getMetaTableDescriptor().setMemStoreFlushSize(64 * 1024 * 1024);
+    TableDescriptorBuilder metaBuilder = UTIL.getMetaTableDescriptorBuilder()
+            .setMemStoreFlushSize(64 * 1024 * 1024);
 
     Region mr = 
HBaseTestingUtility.createRegionAndWAL(HRegionInfo.FIRST_META_REGIONINFO,
-        rootdir, this.conf, UTIL.getMetaTableDescriptor());
+        rootdir, this.conf, metaBuilder.build());
     try {
     // Write rows for three tables 'A', 'B', and 'C'.
     for (char c = 'A'; c < 'D'; c++) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/205016ca/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
index 11c985d..e40bb43 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
@@ -39,22 +39,22 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.CategoryBasedTimeout;
 import org.apache.hadoop.hbase.CoordinatedStateManager;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MetaTableAccessor;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.UnknownRegionException;
 import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.RegionReplicaUtil;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.exceptions.MergeRegionException;
 import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.master.MasterRpcServices;
@@ -207,7 +207,7 @@ public class TestRegionMergeTransactionOnCluster {
       List<Pair<HRegionInfo, ServerName>> tableRegions = MetaTableAccessor
           .getTableRegionsAndLocations(MASTER.getConnection(), tableName);
       HRegionInfo mergedRegionInfo = tableRegions.get(0).getFirst();
-      HTableDescriptor tableDescriptor = MASTER.getTableDescriptors().get(
+      TableDescriptor tableDescriptor = MASTER.getTableDescriptors().get(
           tableName);
       Result mergedRegionResult = MetaTableAccessor.getRegionResult(
         MASTER.getConnection(), mergedRegionInfo.getRegionName());
@@ -231,11 +231,11 @@ public class TestRegionMergeTransactionOnCluster {
       assertTrue(fs.exists(regionAdir));
       assertTrue(fs.exists(regionBdir));
 
-      HColumnDescriptor[] columnFamilies = tableDescriptor.getColumnFamilies();
+      ColumnFamilyDescriptor[] columnFamilies = 
tableDescriptor.getColumnFamilies();
       HRegionFileSystem hrfs = new HRegionFileSystem(
         TEST_UTIL.getConfiguration(), fs, tabledir, mergedRegionInfo);
       int count = 0;
-      for(HColumnDescriptor colFamily : columnFamilies) {
+      for(ColumnFamilyDescriptor colFamily : columnFamilies) {
         count += hrfs.getStoreFiles(colFamily.getName()).size();
       }
       ADMIN.compactRegion(mergedRegionInfo.getRegionName());
@@ -244,7 +244,7 @@ public class TestRegionMergeTransactionOnCluster {
       long timeout = System.currentTimeMillis() + waitTime;
       int newcount = 0;
       while (System.currentTimeMillis() < timeout) {
-        for(HColumnDescriptor colFamily : columnFamilies) {
+        for(ColumnFamilyDescriptor colFamily : columnFamilies) {
           newcount += hrfs.getStoreFiles(colFamily.getName()).size();
         }
         if(newcount > count) {
@@ -263,7 +263,7 @@ public class TestRegionMergeTransactionOnCluster {
       }
       while (System.currentTimeMillis() < timeout) {
         int newcount1 = 0;
-        for(HColumnDescriptor colFamily : columnFamilies) {
+        for(ColumnFamilyDescriptor colFamily : columnFamilies) {
           newcount1 += hrfs.getStoreFiles(colFamily.getName()).size();
         }
         if(newcount1 <= 1) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/205016ca/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java
index 89598ad..3b66a1d 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java
@@ -26,13 +26,13 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.NotServingRegionException;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
@@ -261,7 +261,7 @@ public class TestRegionServerNoMaster {
         hri.getEncodedNameAsBytes()));
 
     // Let's start the open handler
-    HTableDescriptor htd = getRS().tableDescriptors.get(hri.getTable());
+    TableDescriptor htd = getRS().tableDescriptors.get(hri.getTable());
 
     getRS().service.submit(new OpenRegionHandler(getRS(), getRS(), hri, htd, 
-1));
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/205016ca/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java
index 18290f5..126c4e4 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java
@@ -36,20 +36,20 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Coprocessor;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.NamespaceDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.TableNotEnabledException;
 import org.apache.hadoop.hbase.Waiter.Predicate;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 import org.apache.hadoop.hbase.coprocessor.MasterObserver;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
@@ -652,31 +652,30 @@ public class SecureTestUtil {
 
   public static Table createTable(HBaseTestingUtility testUtil, TableName 
tableName,
       byte[][] families) throws Exception {
-    HTableDescriptor htd = new HTableDescriptor(tableName);
+    TableDescriptorBuilder builder = 
TableDescriptorBuilder.newBuilder(tableName);
     for (byte[] family : families) {
-      HColumnDescriptor hcd = new HColumnDescriptor(family);
-      htd.addFamily(hcd);
+      builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(family));
     }
-    createTable(testUtil, testUtil.getAdmin(), htd);
-    return testUtil.getConnection().getTable(htd.getTableName());
+    createTable(testUtil, testUtil.getAdmin(), builder.build());
+    return testUtil.getConnection().getTable(tableName);
   }
 
-  public static void createTable(HBaseTestingUtility testUtil, 
HTableDescriptor htd)
+  public static void createTable(HBaseTestingUtility testUtil, TableDescriptor 
htd)
       throws Exception {
     createTable(testUtil, testUtil.getAdmin(), htd);
   }
 
-  public static void createTable(HBaseTestingUtility testUtil, 
HTableDescriptor htd,
+  public static void createTable(HBaseTestingUtility testUtil, TableDescriptor 
htd,
       byte[][] splitKeys) throws Exception {
     createTable(testUtil, testUtil.getAdmin(), htd, splitKeys);
   }
 
-  public static void createTable(HBaseTestingUtility testUtil, Admin admin, 
HTableDescriptor htd)
+  public static void createTable(HBaseTestingUtility testUtil, Admin admin, 
TableDescriptor htd)
       throws Exception {
     createTable(testUtil, admin, htd, null);
   }
 
-  public static void createTable(HBaseTestingUtility testUtil, Admin admin, 
HTableDescriptor htd,
+  public static void createTable(HBaseTestingUtility testUtil, Admin admin, 
TableDescriptor htd,
       byte[][] splitKeys) throws Exception {
     // NOTE: We need a latch because admin is not sync,
     // so the postOp coprocessor method may be called after the admin 
operation returned.

http://git-wip-us.apache.org/repos/asf/hbase/blob/205016ca/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/MobSnapshotTestingUtils.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/MobSnapshotTestingUtils.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/MobSnapshotTestingUtils.java
index 3e1abb9..1a33f13 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/MobSnapshotTestingUtils.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/MobSnapshotTestingUtils.java
@@ -24,18 +24,20 @@ import java.io.IOException;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.regionserver.BloomType;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.Assert;
 
 public class MobSnapshotTestingUtils {
@@ -60,15 +62,17 @@ public class MobSnapshotTestingUtils {
   private static void createMobTable(final HBaseTestingUtility util,
       final TableName tableName, final byte[][] splitKeys, int 
regionReplication,
       final byte[]... families) throws IOException, InterruptedException {
-    HTableDescriptor htd = new HTableDescriptor(tableName);
-    htd.setRegionReplication(regionReplication);
+    TableDescriptorBuilder builder
+      = TableDescriptorBuilder.newBuilder(tableName)
+            .setRegionReplication(regionReplication);
     for (byte[] family : families) {
-      HColumnDescriptor hcd = new HColumnDescriptor(family);
-      hcd.setMobEnabled(true);
-      hcd.setMobThreshold(0L);
-      htd.addFamily(hcd);
+      builder.addColumnFamily(ColumnFamilyDescriptorBuilder
+          .newBuilder(family)
+          .setMobEnabled(true)
+          .setMobThreshold(0L)
+          .build());
     }
-    util.getAdmin().createTable(htd, splitKeys);
+    util.getAdmin().createTable(builder.build(), splitKeys);
     SnapshotTestingUtils.waitForTableToBeOnline(util, tableName);
     assertEquals((splitKeys.length + 1) * regionReplication, util
         .getAdmin().getTableRegions(tableName).size());
@@ -80,29 +84,29 @@ public class MobSnapshotTestingUtils {
    * @param util
    * @param tableName
    * @param families
-   * @return An HTable instance for the created table.
+   * @return An Table instance for the created table.
    * @throws IOException
    */
   public static Table createMobTable(final HBaseTestingUtility util,
       final TableName tableName, final byte[]... families) throws IOException {
-    HTableDescriptor htd = new HTableDescriptor(tableName);
+    TableDescriptorBuilder builder = 
TableDescriptorBuilder.newBuilder(tableName);
     for (byte[] family : families) {
-      HColumnDescriptor hcd = new HColumnDescriptor(family);
       // Disable blooms (they are on by default as of 0.95) but we disable them
       // here because
       // tests have hard coded counts of what to expect in block cache, etc.,
       // and blooms being
       // on is interfering.
-      hcd.setBloomFilterType(BloomType.NONE);
-      hcd.setMobEnabled(true);
-      hcd.setMobThreshold(0L);
-      htd.addFamily(hcd);
+      builder.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(family)
+              .setBloomFilterType(BloomType.NONE)
+              .setMobEnabled(true)
+              .setMobThreshold(0L)
+              .build());
     }
-    util.getAdmin().createTable(htd);
+    util.getAdmin().createTable(builder.build());
     // HBaseAdmin only waits for regions to appear in hbase:meta we should wait
     // until they are assigned
-    util.waitUntilAllRegionsAssigned(htd.getTableName());
-    return 
ConnectionFactory.createConnection(util.getConfiguration()).getTable(htd.getTableName());
+    util.waitUntilAllRegionsAssigned(tableName);
+    return 
ConnectionFactory.createConnection(util.getConfiguration()).getTable(tableName);
   }
 
   /**
@@ -146,13 +150,14 @@ public class MobSnapshotTestingUtils {
     }
 
     @Override
-    public HTableDescriptor createHtd(final String tableName) {
-      HTableDescriptor htd = new 
HTableDescriptor(TableName.valueOf(tableName));
-      HColumnDescriptor hcd = new HColumnDescriptor(TEST_FAMILY);
-      hcd.setMobEnabled(true);
-      hcd.setMobThreshold(0L);
-      htd.addFamily(hcd);
-      return htd;
+    public TableDescriptor createHtd(final String tableName) {
+      return TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName))
+              .addColumnFamily(ColumnFamilyDescriptorBuilder
+                  .newBuilder(Bytes.toBytes(TEST_FAMILY))
+                  .setMobEnabled(true)
+                  .setMobThreshold(0L)
+                  .build())
+              .build();
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/205016ca/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java
index dab55f6..71dac9c 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java
@@ -40,36 +40,35 @@ import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.TableNotEnabledException;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.SnapshotType;
 import org.apache.hadoop.hbase.client.BufferedMutator;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.RegionReplicaUtil;
+import org.apache.hadoop.hbase.client.SnapshotDescription;
+import org.apache.hadoop.hbase.client.SnapshotType;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
-import org.apache.hadoop.hbase.client.RegionReplicaUtil;
 import org.apache.hadoop.hbase.io.HFileLink;
 import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.master.MasterFileSystem;
 import org.apache.hadoop.hbase.mob.MobUtils;
+import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
+import org.apache.hadoop.hbase.regionserver.HRegionServer;
+import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-import org.apache.hadoop.hbase.client.SnapshotDescription;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse;
-import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-import org.apache.hadoop.hbase.regionserver.HRegionServer;
-import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSTableDescriptors;
 import org.apache.hadoop.hbase.util.FSVisitor;
@@ -492,7 +491,7 @@ public final class SnapshotTestingUtils {
     public static class SnapshotBuilder {
       private final RegionData[] tableRegions;
       private final SnapshotProtos.SnapshotDescription desc;
-      private final HTableDescriptor htd;
+      private final TableDescriptor htd;
       private final Configuration conf;
       private final FileSystem fs;
       private final Path rootDir;
@@ -500,7 +499,7 @@ public final class SnapshotTestingUtils {
       private int snapshotted = 0;
 
       public SnapshotBuilder(final Configuration conf, final FileSystem fs,
-          final Path rootDir, final HTableDescriptor htd,
+          final Path rootDir, final TableDescriptor htd,
           final SnapshotProtos.SnapshotDescription desc, final RegionData[] 
tableRegions)
           throws IOException {
         this.fs = fs;
@@ -514,7 +513,7 @@ public final class SnapshotTestingUtils {
           .createTableDescriptorForTableDirectory(snapshotDir, htd, false);
       }
 
-      public HTableDescriptor getTableDescriptor() {
+      public TableDescriptor getTableDescriptor() {
         return this.htd;
       }
 
@@ -680,11 +679,11 @@ public final class SnapshotTestingUtils {
 
     private SnapshotBuilder createSnapshot(final String snapshotName, final 
String tableName,
         final int numRegions, final int version) throws IOException {
-      HTableDescriptor htd = createHtd(tableName);
+      TableDescriptor htd = createHtd(tableName);
       RegionData[] regions = createTable(htd, numRegions);
 
       SnapshotProtos.SnapshotDescription desc = 
SnapshotProtos.SnapshotDescription.newBuilder()
-        .setTable(htd.getNameAsString())
+        .setTable(htd.getTableName().getNameAsString())
         .setName(snapshotName)
         .setVersion(version)
         .build();
@@ -694,13 +693,13 @@ public final class SnapshotTestingUtils {
       return new SnapshotBuilder(conf, fs, rootDir, htd, desc, regions);
     }
 
-    public HTableDescriptor createHtd(final String tableName) {
-      HTableDescriptor htd = new 
HTableDescriptor(TableName.valueOf(tableName));
-      htd.addFamily(new HColumnDescriptor(TEST_FAMILY));
-      return htd;
+    public TableDescriptor createHtd(final String tableName) {
+      return TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName))
+              .addColumnFamily(ColumnFamilyDescriptorBuilder.of(TEST_FAMILY))
+              .build();
     }
 
-    private RegionData[] createTable(final HTableDescriptor htd, final int 
nregions)
+    private RegionData[] createTable(final TableDescriptor htd, final int 
nregions)
         throws IOException {
       Path tableDir = FSUtils.getTableDir(rootDir, htd.getTableName());
       new 
FSTableDescriptors(conf).createTableDescriptorForTableDirectory(tableDir, htd, 
false);
@@ -766,14 +765,15 @@ public final class SnapshotTestingUtils {
   public static void createTable(final HBaseTestingUtility util, final 
TableName tableName,
       int regionReplication, int nRegions, final byte[]... families)
       throws IOException, InterruptedException {
-    HTableDescriptor htd = new HTableDescriptor(tableName);
-    htd.setRegionReplication(regionReplication);
+    TableDescriptorBuilder builder
+      = TableDescriptorBuilder
+          .newBuilder(tableName)
+          .setRegionReplication(regionReplication);
     for (byte[] family : families) {
-      HColumnDescriptor hcd = new HColumnDescriptor(family);
-      htd.addFamily(hcd);
+      builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(family));
     }
     byte[][] splitKeys = getSplitKeys(nRegions);
-    util.createTable(htd, splitKeys);
+    util.createTable(builder.build(), splitKeys);
     assertEquals((splitKeys.length + 1) * regionReplication,
         util.getAdmin().getTableRegions(tableName).size());
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/205016ca/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
index 4b684e3..b7110b2 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
@@ -30,7 +30,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -104,11 +104,11 @@ public class TestRestoreSnapshotHelper {
     builder.addRegionV2();
     builder.addRegionV1();
     Path snapshotDir = builder.commit();
-    HTableDescriptor htd = builder.getTableDescriptor();
+    TableDescriptor htd = builder.getTableDescriptor();
     SnapshotDescription desc = builder.getSnapshotDescription();
 
     // Test clone a snapshot
-    HTableDescriptor htdClone = snapshotMock.createHtd("testtb-clone");
+    TableDescriptor htdClone = snapshotMock.createHtd("testtb-clone");
     testRestore(snapshotDir, desc, htdClone);
     verifyRestore(rootDir, htd, htdClone);
 
@@ -118,13 +118,13 @@ public class TestRestoreSnapshotHelper {
         .setTable("testtb-clone")
         .build();
     Path cloneDir = FSUtils.getTableDir(rootDir, htdClone.getTableName());
-    HTableDescriptor htdClone2 = snapshotMock.createHtd("testtb-clone2");
+    TableDescriptor htdClone2 = snapshotMock.createHtd("testtb-clone2");
     testRestore(cloneDir, cloneDesc, htdClone2);
     verifyRestore(rootDir, htd, htdClone2);
   }
 
-  private void verifyRestore(final Path rootDir, final HTableDescriptor 
sourceHtd,
-      final HTableDescriptor htdClone) throws IOException {
+  private void verifyRestore(final Path rootDir, final TableDescriptor 
sourceHtd,
+      final TableDescriptor htdClone) throws IOException {
     List<String> files = SnapshotTestingUtils.listHFileNames(fs,
       FSUtils.getTableDir(rootDir, htdClone.getTableName()));
     assertEquals(12, files.size());
@@ -148,7 +148,7 @@ public class TestRestoreSnapshotHelper {
    * @param htdClone The HTableDescriptor of the table to restore/clone.
    */
   private void testRestore(final Path snapshotDir, final SnapshotDescription 
sd,
-      final HTableDescriptor htdClone) throws IOException {
+      final TableDescriptor htdClone) throws IOException {
     LOG.debug("pre-restore table=" + htdClone.getTableName() + " snapshot=" + 
snapshotDir);
     FSUtils.logFileSystemState(fs, rootDir, LOG);
 
@@ -164,7 +164,7 @@ public class TestRestoreSnapshotHelper {
    * Initialize the restore helper, based on the snapshot and table 
information provided.
    */
   private RestoreSnapshotHelper getRestoreHelper(final Path rootDir, final 
Path snapshotDir,
-      final SnapshotDescription sd, final HTableDescriptor htdClone) throws 
IOException {
+      final SnapshotDescription sd, final TableDescriptor htdClone) throws 
IOException {
     ForeignExceptionDispatcher monitor = 
Mockito.mock(ForeignExceptionDispatcher.class);
     MonitoredTask status = Mockito.mock(MonitoredTask.class);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/205016ca/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java
index 0ee28d1..8ba4262 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java
@@ -28,9 +28,9 @@ import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifest;
@@ -129,7 +129,7 @@ public class TestSnapshotManifest {
       SnapshotRegionManifest.Builder dataRegionManifestBuilder =
           SnapshotRegionManifest.newBuilder();
 
-      for (HColumnDescriptor hcd: builder.getTableDescriptor().getFamilies()) {
+      for (ColumnFamilyDescriptor hcd: 
builder.getTableDescriptor().getColumnFamilies()) {
         SnapshotRegionManifest.FamilyFiles.Builder family =
             SnapshotRegionManifest.FamilyFiles.newBuilder();
         family.setFamilyName(UnsafeByteOperations.unsafeWrap(hcd.getName()));
@@ -150,7 +150,7 @@ public class TestSnapshotManifest {
     }
 
     dataManifestBuilder
-        
.setTableSchema(ProtobufUtil.convertToTableSchema(builder.getTableDescriptor()));
+        
.setTableSchema(ProtobufUtil.toTableSchema(builder.getTableDescriptor()));
 
     SnapshotDataManifest dataManifest = dataManifestBuilder.build();
     return writeDataManifest(dataManifest);
@@ -163,7 +163,7 @@ public class TestSnapshotManifest {
     SnapshotRegionManifest.Builder dataRegionManifestBuilder = 
SnapshotRegionManifest.newBuilder();
     dataRegionManifestBuilder.setRegionInfo(HRegionInfo.convert(regionInfo));
 
-    for (HColumnDescriptor hcd: builder.getTableDescriptor().getFamilies()) {
+    for (ColumnFamilyDescriptor hcd: 
builder.getTableDescriptor().getColumnFamilies()) {
       SnapshotRegionManifest.FamilyFiles.Builder family =
           SnapshotRegionManifest.FamilyFiles.newBuilder();
       family.setFamilyName(UnsafeByteOperations.unsafeWrap(hcd.getName()));

http://git-wip-us.apache.org/repos/asf/hbase/blob/205016ca/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java
index 8337eb0..30a7cd6 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java
@@ -40,11 +40,12 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableDescriptors;
 import org.apache.hadoop.hbase.TableExistsException;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
@@ -78,7 +79,7 @@ public class TestFSTableDescriptors {
   @Test
   public void testCreateAndUpdate() throws IOException {
     Path testdir = UTIL.getDataTestDir(name.getMethodName());
-    HTableDescriptor htd = new 
HTableDescriptor(TableName.valueOf(name.getMethodName()));
+    TableDescriptor htd = 
TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build();
     FileSystem fs = FileSystem.get(UTIL.getConfiguration());
     FSTableDescriptors fstd = new FSTableDescriptors(UTIL.getConfiguration(), 
fs, testdir);
     assertTrue(fstd.createTableDescriptor(htd));
@@ -98,7 +99,7 @@ public class TestFSTableDescriptors {
   @Test
   public void testSequenceIdAdvancesOnTableInfo() throws IOException {
     Path testdir = UTIL.getDataTestDir(name.getMethodName());
-    HTableDescriptor htd = new 
HTableDescriptor(TableName.valueOf(name.getMethodName()));
+    TableDescriptor htd = 
TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build();
     FileSystem fs = FileSystem.get(UTIL.getConfiguration());
     FSTableDescriptors fstd = new FSTableDescriptors(UTIL.getConfiguration(), 
fs, testdir);
     Path p0 = fstd.updateTableDescriptor(htd);
@@ -118,7 +119,7 @@ public class TestFSTableDescriptors {
     assertTrue(!fs.exists(p2));
     int i3 = FSTableDescriptors.getTableInfoSequenceId(p3);
     assertTrue(i3 == i2 + 1);
-    HTableDescriptor descriptor = fstd.get(htd.getTableName());
+    TableDescriptor descriptor = fstd.get(htd.getTableName());
     assertEquals(descriptor, htd);
   }
 
@@ -161,7 +162,7 @@ public class TestFSTableDescriptors {
     // Cleanup old tests if any detrius laying around.
     Path rootdir = new Path(UTIL.getDataTestDir(), name);
     TableDescriptors htds = new FSTableDescriptors(UTIL.getConfiguration(), 
fs, rootdir);
-    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name));
+    TableDescriptor htd = 
TableDescriptorBuilder.newBuilder(TableName.valueOf(name)).build();
     htds.add(htd);
     assertNotNull(htds.remove(htd.getTableName()));
     assertNull(htds.remove(htd.getTableName()));
@@ -170,11 +171,11 @@ public class TestFSTableDescriptors {
   @Test public void testReadingHTDFromFS() throws IOException {
     final String name = this.name.getMethodName();
     FileSystem fs = FileSystem.get(UTIL.getConfiguration());
-    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name));
+    TableDescriptor htd = 
TableDescriptorBuilder.newBuilder(TableName.valueOf(name)).build();
     Path rootdir = UTIL.getDataTestDir(name);
     FSTableDescriptors fstd = new FSTableDescriptors(UTIL.getConfiguration(), 
fs, rootdir);
     fstd.createTableDescriptor(htd);
-    HTableDescriptor td2 =
+    TableDescriptor td2 =
       FSTableDescriptors.getTableDescriptorFromFs(fs, rootdir, 
htd.getTableName());
     assertTrue(htd.equals(td2));
   }
@@ -184,25 +185,25 @@ public class TestFSTableDescriptors {
     FileSystem fs = FileSystem.get(UTIL.getConfiguration());
     Path rootdir = UTIL.getDataTestDir(name);
     FSTableDescriptors fstd = new FSTableDescriptors(UTIL.getConfiguration(), 
fs, rootdir);
-    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name));
+    TableDescriptor htd = 
TableDescriptorBuilder.newBuilder(TableName.valueOf(name)).build();
     Path descriptorFile = fstd.updateTableDescriptor(htd);
     try (FSDataOutputStream out = fs.create(descriptorFile, true)) {
-      out.write(htd.toByteArray());
+      out.write(TableDescriptorBuilder.toByteArray(htd));
     }
     FSTableDescriptors fstd2 = new FSTableDescriptors(UTIL.getConfiguration(), 
fs, rootdir);
-    HTableDescriptor td2 = fstd2.get(htd.getTableName());
+    TableDescriptor td2 = fstd2.get(htd.getTableName());
     assertEquals(htd, td2);
     FileStatus descriptorFile2 =
         FSTableDescriptors.getTableInfoPath(fs, 
fstd2.getTableDir(htd.getTableName()));
-    byte[] buffer = htd.toByteArray();
+    byte[] buffer = TableDescriptorBuilder.toByteArray(htd);
     try (FSDataInputStream in = fs.open(descriptorFile2.getPath())) {
       in.readFully(buffer);
     }
-    HTableDescriptor td3 = HTableDescriptor.parseFrom(buffer);
+    TableDescriptor td3 = TableDescriptorBuilder.parseFrom(buffer);
     assertEquals(htd, td3);
   }
 
-  @Test public void testHTableDescriptors()
+  @Test public void testTableDescriptors()
   throws IOException, InterruptedException {
     final String name = this.name.getMethodName();
     FileSystem fs = FileSystem.get(UTIL.getConfiguration());
@@ -210,7 +211,7 @@ public class TestFSTableDescriptors {
     Path rootdir = new Path(UTIL.getDataTestDir(), name);
     FSTableDescriptors htds = new FSTableDescriptors(UTIL.getConfiguration(), 
fs, rootdir) {
       @Override
-      public HTableDescriptor get(TableName tablename)
+      public TableDescriptor get(TableName tablename)
           throws TableExistsException, FileNotFoundException, IOException {
         LOG.info(tablename + ", cachehits=" + this.cachehits);
         return super.get(tablename);
@@ -219,9 +220,7 @@ public class TestFSTableDescriptors {
     final int count = 10;
     // Write out table infos.
     for (int i = 0; i < count; i++) {
-      HTableDescriptor htd = new HTableDescriptor(
-          new HTableDescriptor(TableName.valueOf(name + i)));
-      htds.createTableDescriptor(htd);
+      
htds.createTableDescriptor(TableDescriptorBuilder.newBuilder(TableName.valueOf(name
 + i)).build());
     }
 
     for (int i = 0; i < count; i++) {
@@ -232,9 +231,9 @@ public class TestFSTableDescriptors {
     }
     // Update the table infos
     for (int i = 0; i < count; i++) {
-      HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name + i));
-      htd.addFamily(new HColumnDescriptor("" + i));
-      htds.updateTableDescriptor(htd);
+      TableDescriptorBuilder builder = 
TableDescriptorBuilder.newBuilder(TableName.valueOf(name + i));
+      builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of("" + i));
+      htds.updateTableDescriptor(builder.build());
     }
     // Wait a while so mod time we write is for sure different.
     Thread.sleep(100);
@@ -250,7 +249,7 @@ public class TestFSTableDescriptors {
   }
 
   @Test
-  public void testHTableDescriptorsNoCache()
+  public void testTableDescriptorsNoCache()
     throws IOException, InterruptedException {
     final String name = this.name.getMethodName();
     FileSystem fs = FileSystem.get(UTIL.getConfiguration());
@@ -261,8 +260,7 @@ public class TestFSTableDescriptors {
     final int count = 10;
     // Write out table infos.
     for (int i = 0; i < count; i++) {
-      HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name + i));
-      htds.createTableDescriptor(htd);
+      
htds.createTableDescriptor(TableDescriptorBuilder.newBuilder(TableName.valueOf(name
 + i)).build());
     }
 
     for (int i = 0; i < 2 * count; i++) {
@@ -270,14 +268,14 @@ public class TestFSTableDescriptors {
     }
     // Update the table infos
     for (int i = 0; i < count; i++) {
-      HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name + i));
-      htd.addFamily(new HColumnDescriptor("" + i));
-      htds.updateTableDescriptor(htd);
+      TableDescriptorBuilder builder = 
TableDescriptorBuilder.newBuilder(TableName.valueOf(name + i));
+      builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of("" + i));
+      htds.updateTableDescriptor(builder.build());
     }
     for (int i = 0; i < count; i++) {
       assertNotNull("Expected HTD, got null instead", 
htds.get(TableName.valueOf(name + i)));
       assertTrue("Column Family " + i + " missing",
-                 htds.get(TableName.valueOf(name + 
i)).hasFamily(Bytes.toBytes("" + i)));
+                 htds.get(TableName.valueOf(name + 
i)).hasColumnFamily(Bytes.toBytes("" + i)));
     }
     assertEquals(count * 4, htds.invocations);
     assertEquals("expected=0, actual=" + htds.cachehits, 0, htds.cachehits);
@@ -294,12 +292,10 @@ public class TestFSTableDescriptors {
     final int count = 4;
     // Write out table infos.
     for (int i = 0; i < count; i++) {
-      HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name + i));
-      htds.createTableDescriptor(htd);
+      
htds.createTableDescriptor(TableDescriptorBuilder.newBuilder(TableName.valueOf(name
 + i)).build());
     }
     // add hbase:meta
-    HTableDescriptor htd = new HTableDescriptor(TableName.META_TABLE_NAME);
-    htds.createTableDescriptor(htd);
+    
htds.createTableDescriptor(TableDescriptorBuilder.newBuilder(TableName.META_TABLE_NAME).build());
 
     assertEquals("getAll() didn't return all TableDescriptors, expected: " +
                    (count + 1) + " got: " + htds.getAll().size(),
@@ -321,8 +317,7 @@ public class TestFSTableDescriptors {
     final int count = 10;
     // Write out table infos via non-cached FSTableDescriptors
     for (int i = 0; i < count; i++) {
-      HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name + i));
-      nonchtds.createTableDescriptor(htd);
+      
nonchtds.createTableDescriptor(TableDescriptorBuilder.newBuilder(TableName.valueOf(name
 + i)).build());
     }
 
     // Calls to getAll() won't increase the cache counter, do per table.
@@ -333,15 +328,15 @@ public class TestFSTableDescriptors {
     assertTrue(nonchtds.getAll().size() == chtds.getAll().size());
 
     // add a new entry for hbase:meta
-    HTableDescriptor htd = new HTableDescriptor(TableName.META_TABLE_NAME);
+    TableDescriptor htd = 
TableDescriptorBuilder.newBuilder(TableName.META_TABLE_NAME).build();
     nonchtds.createTableDescriptor(htd);
 
     // hbase:meta will only increase the cachehit by 1
     assertTrue(nonchtds.getAll().size() == chtds.getAll().size());
 
-    for (Map.Entry entry: nonchtds.getAll().entrySet()) {
+    for (Map.Entry<String, TableDescriptor> entry: 
nonchtds.getAll().entrySet()) {
       String t = (String) entry.getKey();
-      HTableDescriptor nchtd = (HTableDescriptor) entry.getValue();
+      TableDescriptor nchtd = entry.getValue();
       assertTrue("expected " + htd.toString() +
                    " got: " + chtds.get(TableName.valueOf(t)).toString(),
                  (nchtd.equals(chtds.get(TableName.valueOf(t)))));
@@ -366,7 +361,7 @@ public class TestFSTableDescriptors {
     // Cleanup old tests if any detrius laying around.
     Path rootdir = new Path(UTIL.getDataTestDir(), name);
     TableDescriptors htds = new FSTableDescriptors(UTIL.getConfiguration(), 
fs, rootdir);
-    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name));
+    TableDescriptor htd = 
TableDescriptorBuilder.newBuilder(TableName.valueOf(name)).build();
     htds.add(htd);
     htds.add(htd);
     htds.add(htd);
@@ -415,12 +410,14 @@ public class TestFSTableDescriptors {
   @Test
   public void testCreateTableDescriptorUpdatesIfExistsAlready() throws 
IOException {
     Path testdir = UTIL.getDataTestDir(name.getMethodName());
-    HTableDescriptor htd = new 
HTableDescriptor(TableName.valueOf(name.getMethodName()));
+    TableDescriptor htd = 
TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build();
     FileSystem fs = FileSystem.get(UTIL.getConfiguration());
     FSTableDescriptors fstd = new FSTableDescriptors(UTIL.getConfiguration(), 
fs, testdir);
     assertTrue(fstd.createTableDescriptor(htd));
     assertFalse(fstd.createTableDescriptor(htd));
-    htd.setValue(Bytes.toBytes("mykey"), Bytes.toBytes("myValue"));
+    htd = TableDescriptorBuilder.newBuilder(htd)
+            .setValue(Bytes.toBytes("mykey"), Bytes.toBytes("myValue"))
+            .build();
     assertTrue(fstd.createTableDescriptor(htd)); //this will re-create
     Path tableDir = fstd.getTableDir(htd.getTableName());
     Path tmpTableDir = new Path(tableDir, FSTableDescriptors.TMP_DIR);
@@ -443,10 +440,10 @@ public class TestFSTableDescriptors {
     }
 
     @Override
-    public HTableDescriptor get(TableName tablename)
+    public TableDescriptor get(TableName tablename)
       throws TableExistsException, FileNotFoundException, IOException {
       LOG.info((super.isUsecache() ? "Cached" : "Non-Cached") +
-                 " HTableDescriptor.get() on " + tablename + ", cachehits=" + 
this.cachehits);
+                 " TableDescriptor.get() on " + tablename + ", cachehits=" + 
this.cachehits);
       return super.get(tablename);
     }
   }

Reply via email to