HBASE-18241 Change client.Table, client.Admin, Region, Store, and 
HBaseTestingUtility to not use HTableDescriptor or HColumnDescriptor


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/53ec9c5b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/53ec9c5b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/53ec9c5b

Branch: refs/heads/branch-2
Commit: 53ec9c5bd35ee4814d1ec742b0ca5d649d5fc30e
Parents: c05a408
Author: Chia-Ping Tsai <chia7...@gmail.com>
Authored: Sat Jul 8 17:00:15 2017 +0800
Committer: Chia-Ping Tsai <chia7...@gmail.com>
Committed: Sat Jul 8 17:00:15 2017 +0800

----------------------------------------------------------------------
 .../apache/hadoop/hbase/HTableDescriptor.java   |  16 +-
 .../org/apache/hadoop/hbase/client/Admin.java   | 280 ++++++++++++++++-
 .../apache/hadoop/hbase/client/HBaseAdmin.java  | 144 ++++++++-
 .../org/apache/hadoop/hbase/client/HTable.java  |  12 +-
 .../org/apache/hadoop/hbase/client/Table.java   |  11 +-
 .../hadoop/hbase/client/TableDescriptor.java    |   8 +
 .../hbase/client/TableDescriptorBuilder.java    |  11 +-
 .../hadoop/hbase/security/EncryptionUtil.java   |   3 +-
 .../hbase/shaded/protobuf/RequestConverter.java |   6 +-
 .../example/ZooKeeperScanPolicyObserver.java    |   2 +-
 .../hadoop/hbase/rest/client/RemoteHTable.java  |   6 +
 .../backup/mapreduce/HFileSplitterJob.java      |   2 +-
 .../hadoop/hbase/client/HTableWrapper.java      |   5 +
 .../hbase/constraint/ConstraintProcessor.java   |   3 +-
 .../hadoop/hbase/constraint/Constraints.java    |   3 +-
 .../hadoop/hbase/mapreduce/CopyTable.java       |   2 +-
 .../hbase/mapreduce/HFileOutputFormat2.java     |  68 +++--
 .../apache/hadoop/hbase/mapreduce/Import.java   |   8 +-
 .../hadoop/hbase/mapreduce/ImportTsv.java       |  10 +-
 .../hbase/mapreduce/LoadIncrementalHFiles.java  |   7 +-
 .../mapreduce/MultiTableHFileOutputFormat.java  |   5 +-
 .../hadoop/hbase/mapreduce/WALPlayer.java       |   2 +-
 .../hbase/mob/DefaultMobStoreCompactor.java     |   6 +-
 .../hbase/mob/DefaultMobStoreFlusher.java       |   6 +-
 .../apache/hadoop/hbase/mob/MobCacheConfig.java |   3 +-
 .../org/apache/hadoop/hbase/mob/MobUtils.java   |   3 +-
 .../hbase/quotas/ActivePolicyEnforcement.java   |   2 +-
 .../quotas/RegionServerRpcQuotaManager.java     |   2 +-
 .../hadoop/hbase/regionserver/CompactSplit.java |   6 +-
 .../hbase/regionserver/CompactingMemStore.java  |   2 +-
 .../ConstantSizeRegionSplitPolicy.java          |   3 +-
 .../hbase/regionserver/DefaultStoreFlusher.java |   2 +-
 .../DelimitedKeyPrefixRegionSplitPolicy.java    |   4 +-
 .../regionserver/FlushAllLargeStoresPolicy.java |   4 +-
 .../regionserver/FlushLargeStoresPolicy.java    |   8 +-
 .../hbase/regionserver/FlushPolicyFactory.java  |   5 +-
 .../hadoop/hbase/regionserver/HMobStore.java    |   9 +-
 .../hadoop/hbase/regionserver/HRegion.java      | 136 +++++----
 .../hbase/regionserver/HRegionServer.java       |   6 +-
 .../hadoop/hbase/regionserver/HStore.java       |  21 +-
 ...IncreasingToUpperBoundRegionSplitPolicy.java |   9 +-
 .../KeyPrefixRegionSplitPolicy.java             |  10 +-
 .../hbase/regionserver/MemStoreFlusher.java     |   2 +-
 .../regionserver/MetricsRegionWrapperImpl.java  |   6 +-
 .../MetricsTableWrapperAggregateImpl.java       |   2 +-
 .../hbase/regionserver/RSDumpServlet.java       |   2 +-
 .../hbase/regionserver/RSRpcServices.java       |   6 +-
 .../hadoop/hbase/regionserver/Region.java       |   3 +-
 .../regionserver/RegionCoprocessorHost.java     |  11 +-
 .../regionserver/RegionServicesForStores.java   |   2 +-
 .../hbase/regionserver/RegionSplitPolicy.java   |   7 +-
 .../hadoop/hbase/regionserver/ScanInfo.java     |   6 +-
 .../regionserver/SecureBulkLoadManager.java     |   2 +-
 .../apache/hadoop/hbase/regionserver/Store.java |   4 +-
 .../hbase/regionserver/StoreFileScanner.java    |   2 +-
 .../hbase/regionserver/StripeStoreFlusher.java  |   2 +-
 .../regionserver/compactions/Compactor.java     |  12 +-
 .../throttle/ThroughputControlUtil.java         |   3 +-
 .../security/access/AccessControlLists.java     |   2 +-
 .../hbase/security/access/AccessController.java |  15 +-
 .../security/visibility/VisibilityUtils.java    |   3 +-
 .../hadoop/hbase/snapshot/SnapshotManifest.java |   2 +-
 .../org/apache/hadoop/hbase/tool/Canary.java    |  30 +-
 .../hadoop/hbase/util/RegionSplitter.java       |  18 +-
 .../hbase/util/ServerRegionReplicaUtil.java     |   2 +-
 .../hadoop/hbase/HBaseTestingUtility.java       | 298 ++++++++++++++++---
 .../apache/hadoop/hbase/MiniHBaseCluster.java   |   8 +-
 .../org/apache/hadoop/hbase/TestIOFencing.java  |  12 +-
 .../hadoop/hbase/backup/TestHFileArchiving.java |   4 +-
 .../hbase/backup/TestIncrementalBackup.java     |   2 +-
 .../apache/hadoop/hbase/client/TestAdmin1.java  |   4 +-
 ...estAvoidCellReferencesIntoShippedBlocks.java |   2 +-
 .../hadoop/hbase/client/TestMetaCache.java      |   2 +-
 .../hbase/client/TestReplicaWithCluster.java    |  33 +-
 .../hbase/coprocessor/SimpleRegionObserver.java |  10 +-
 .../TestCoprocessorConfiguration.java           |   6 +-
 .../TestRegionObserverScannerOpenHook.java      |   4 +-
 .../hbase/master/TestRegionPlacement.java       |   8 +-
 .../master/cleaner/TestSnapshotFromMaster.java  |  17 +-
 .../regionserver/NoOpScanPolicyObserver.java    |   4 +-
 .../hbase/regionserver/RegionAsTable.java       |  10 +-
 .../hbase/regionserver/TestAtomicOperation.java |   3 +-
 .../regionserver/TestCompoundBloomFilter.java   |   2 +-
 .../hbase/regionserver/TestHMobStore.java       |  14 +-
 .../hadoop/hbase/regionserver/TestHRegion.java  |   9 +-
 .../regionserver/TestHRegionReplayEvents.java   |  10 +-
 .../hbase/regionserver/TestHStoreFile.java      |   8 +-
 .../regionserver/TestMobStoreCompaction.java    |  20 +-
 .../regionserver/TestRegionServerAbort.java     |   3 +-
 .../regionserver/TestRegionServerMetrics.java   |  22 +-
 .../regionserver/TestRegionSplitPolicy.java     |   6 +-
 .../TestScannerHeartbeatMessages.java           |   3 +-
 .../hadoop/hbase/regionserver/TestStore.java    |   2 +-
 .../compactions/TestDateTieredCompactor.java    |   2 +-
 .../compactions/TestStripeCompactionPolicy.java |   2 +-
 .../compactions/TestStripeCompactor.java        |   2 +-
 .../replication/TestMasterReplication.java      |   2 +-
 .../replication/TestMultiSlaveReplication.java  |   2 +-
 .../access/TestWithDisabledAuthorization.java   |   4 +-
 .../hbase/util/HFileArchiveTestingUtil.java     |   4 +-
 .../hbase/util/TestCoprocessorScanPolicy.java   |   7 +-
 101 files changed, 1142 insertions(+), 414 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
index 5eb737b..c09d434 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
@@ -150,17 +150,6 @@ public class HTableDescriptor implements TableDescriptor, 
Comparable<HTableDescr
   }
 
   /**
-   * Getter for accessing the metadata associated with the key
-   *
-   * @param key The key.
-   * @return The value.
-   */
-  public String getValue(String key) {
-    byte[] value = getValue(Bytes.toBytes(key));
-    return value == null ? null : Bytes.toString(value);
-  }
-
-  /**
    * @return Getter for fetching an unmodifiable map.
    */
   @Override
@@ -861,6 +850,11 @@ public class HTableDescriptor implements TableDescriptor, 
Comparable<HTableDescr
   }
 
   @Override
+  public String getValue(String key) {
+    return delegatee.getValue(key);
+  }
+
+  @Override
   public byte[] getValue(byte[] key) {
     return delegatee.getValue(key);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
index 9300372..e428012 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
@@ -98,30 +98,67 @@ public interface Admin extends Abortable, Closeable {
    *
    * @return - returns an array of read-only HTableDescriptors
    * @throws IOException if a remote or network exception occurs
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #listTableDescriptors()}
    */
+  @Deprecated
   HTableDescriptor[] listTables() throws IOException;
 
   /**
+   * List all the userspace tables.
+   *
+   * @return - returns a list of TableDescriptors
+   * @throws IOException if a remote or network exception occurs
+   */
+  List<TableDescriptor> listTableDescriptors() throws IOException;
+
+  /**
    * List all the userspace tables matching the given pattern.
    *
    * @param pattern The compiled regular expression to match against
    * @return - returns an array of read-only HTableDescriptors
    * @throws IOException if a remote or network exception occurs
    * @see #listTables()
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #listTableDescriptors(java.util.regex.Pattern)}
    */
+  @Deprecated
   HTableDescriptor[] listTables(Pattern pattern) throws IOException;
 
   /**
+   * List all the userspace tables matching the given pattern.
+   *
+   * @param pattern The compiled regular expression to match against
+   * @return - returns a list of TableDescriptors
+   * @throws IOException if a remote or network exception occurs
+   * @see #listTables()
+   */
+  List<TableDescriptor> listTableDescriptors(Pattern pattern) throws 
IOException;
+
+  /**
    * List all the userspace tables matching the given regular expression.
    *
    * @param regex The regular expression to match against
-   * @return - returns an array of HTableDescriptors
+   * @return - returns an array of read-only HTableDescriptors
    * @throws IOException if a remote or network exception occurs
    * @see #listTables(java.util.regex.Pattern)
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #listTableDescriptors(java.lang.String)}
    */
+  @Deprecated
   HTableDescriptor[] listTables(String regex) throws IOException;
 
   /**
+   * List all the userspace tables matching the given regular expression.
+   *
+   * @param regex The regular expression to match against
+   * @return - returns a list of TableDescriptors
+   * @throws IOException if a remote or network exception occurs
+   * @see #listTables(java.util.regex.Pattern)
+   */
+  List<TableDescriptor> listTableDescriptors(String regex) throws IOException;
+
+  /**
    * List all the tables matching the given pattern.
    *
    * @param pattern The compiled regular expression to match against
@@ -129,23 +166,53 @@ public interface Admin extends Abortable, Closeable {
    * @return - returns an array of read-only HTableDescriptors
    * @throws IOException if a remote or network exception occurs
    * @see #listTables()
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #listTableDescriptors(java.util.regex.Pattern, 
boolean)}
    */
+  @Deprecated
   HTableDescriptor[] listTables(Pattern pattern, boolean includeSysTables)
       throws IOException;
 
   /**
    * List all the tables matching the given pattern.
    *
+   * @param pattern The compiled regular expression to match against
+   * @param includeSysTables False to match only against userspace tables
+   * @return - returns a list of TableDescriptors
+   * @throws IOException if a remote or network exception occurs
+   * @see #listTables()
+   */
+  List<TableDescriptor> listTableDescriptors(Pattern pattern, boolean 
includeSysTables)
+      throws IOException;
+
+  /**
+   * List all the tables matching the given pattern.
+   *
    * @param regex The regular expression to match against
    * @param includeSysTables False to match only against userspace tables
    * @return - returns an array of read-only HTableDescriptors
    * @throws IOException if a remote or network exception occurs
    * @see #listTables(java.util.regex.Pattern, boolean)
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #listTableDescriptors(java.lang.String, boolean)}
    */
+  @Deprecated
   HTableDescriptor[] listTables(String regex, boolean includeSysTables)
       throws IOException;
 
   /**
+   * List all the tables matching the given pattern.
+   *
+   * @param regex The regular expression to match against
+   * @param includeSysTables False to match only against userspace tables
+   * @return - returns a list of TableDescriptors
+   * @throws IOException if a remote or network exception occurs
+   * @see #listTables(java.util.regex.Pattern, boolean)
+   */
+  List<TableDescriptor> listTableDescriptors(String regex, boolean 
includeSysTables)
+      throws IOException;
+
+  /**
    * List all of the names of userspace tables.
    *
    * @return TableName[] table names
@@ -196,11 +263,42 @@ public interface Admin extends Abortable, Closeable {
    * @return the read-only tableDescriptor
    * @throws org.apache.hadoop.hbase.TableNotFoundException
    * @throws IOException if a remote or network exception occurs
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #listTableDescriptor(TableName)}
    */
+  @Deprecated
   HTableDescriptor getTableDescriptor(final TableName tableName)
       throws TableNotFoundException, IOException;
 
   /**
+   * Method for getting the tableDescriptor
+   *
+   * @param tableName as a {@link TableName}
+   * @return the tableDescriptor
+   * @throws org.apache.hadoop.hbase.TableNotFoundException
+   * @throws IOException if a remote or network exception occurs
+   */
+  TableDescriptor listTableDescriptor(final TableName tableName)
+      throws TableNotFoundException, IOException;
+
+  /**
+   * Creates a new table. Synchronous operation.
+   *
+   * @param desc table descriptor for table
+   * @throws IllegalArgumentException if the table name is reserved
+   * @throws org.apache.hadoop.hbase.MasterNotRunningException if master is 
not running
+   * @throws org.apache.hadoop.hbase.TableExistsException if table already 
exists (If concurrent
+   * threads, the table may have been created between test-for-existence and 
attempt-at-creation).
+   * @throws IOException if a remote or network exception occurs
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #createTable(TableDescriptor)}
+   */
+  @Deprecated
+  default void createTable(HTableDescriptor desc) throws IOException {
+    createTable((TableDescriptor) desc);
+  }
+
+  /**
    * Creates a new table. Synchronous operation.
    *
    * @param desc table descriptor for table
@@ -210,7 +308,7 @@ public interface Admin extends Abortable, Closeable {
    * threads, the table may have been created between test-for-existence and 
attempt-at-creation).
    * @throws IOException if a remote or network exception occurs
    */
-  void createTable(HTableDescriptor desc) throws IOException;
+  void createTable(TableDescriptor desc) throws IOException;
 
   /**
    * Creates a new table with the specified number of regions.  The start key 
specified will become
@@ -228,8 +326,33 @@ public interface Admin extends Abortable, Closeable {
    * @throws org.apache.hadoop.hbase.TableExistsException if table already 
exists (If concurrent
    * threads, the table may have been created between test-for-existence and 
attempt-at-creation).
    * @throws IOException
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #createTable(TableDescriptor, byte[], byte[], int)}
    */
-  void createTable(HTableDescriptor desc, byte[] startKey, byte[] endKey, int 
numRegions)
+  @Deprecated
+  default void createTable(HTableDescriptor desc, byte[] startKey, byte[] 
endKey, int numRegions)
+      throws IOException {
+    createTable((TableDescriptor) desc, startKey, endKey, numRegions);
+  }
+
+  /**
+   * Creates a new table with the specified number of regions.  The start key 
specified will become
+   * the end key of the first region of the table, and the end key specified 
will become the start
+   * key of the last region of the table (the first region has a null start 
key and the last region
+   * has a null end key). BigInteger math will be used to divide the key range 
specified into enough
+   * segments to make the required number of total regions. Synchronous 
operation.
+   *
+   * @param desc table descriptor for table
+   * @param startKey beginning of key range
+   * @param endKey end of key range
+   * @param numRegions the total number of regions to create
+   * @throws IllegalArgumentException if the table name is reserved
+   * @throws org.apache.hadoop.hbase.MasterNotRunningException if master is 
not running
+   * @throws org.apache.hadoop.hbase.TableExistsException if table already 
exists (If concurrent
+   * threads, the table may have been created between test-for-existence and 
attempt-at-creation).
+   * @throws IOException
+   */
+  void createTable(TableDescriptor desc, byte[] startKey, byte[] endKey, int 
numRegions)
       throws IOException;
 
   /**
@@ -245,8 +368,52 @@ public interface Admin extends Abortable, Closeable {
    * @throws org.apache.hadoop.hbase.TableExistsException if table already 
exists (If concurrent
    * threads, the table may have been created between test-for-existence and 
attempt-at-creation).
    * @throws IOException
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #createTable(TableDescriptor, byte[][])}
+   */
+  @Deprecated
+  default void createTable(final HTableDescriptor desc, byte[][] splitKeys) 
throws IOException {
+    createTable((TableDescriptor) desc, splitKeys);
+  }
+
+  /**
+   * Creates a new table with an initial set of empty regions defined by the 
specified split keys.
+   * The total number of regions created will be the number of split keys plus 
one. Synchronous
+   * operation. Note : Avoid passing empty split key.
+   *
+   * @param desc table descriptor for table
+   * @param splitKeys array of split keys for the initial regions of the table
+   * @throws IllegalArgumentException if the table name is reserved, if the 
split keys are repeated
+   * and if the split key has empty byte array.
+   * @throws org.apache.hadoop.hbase.MasterNotRunningException if master is 
not running
+   * @throws org.apache.hadoop.hbase.TableExistsException if table already 
exists (If concurrent
+   * threads, the table may have been created between test-for-existence and 
attempt-at-creation).
+   * @throws IOException
+   */
+  void createTable(final TableDescriptor desc, byte[][] splitKeys) throws 
IOException;
+
+  /**
+   * Creates a new table but does not block and wait for it to come online.
+   * You can use Future.get(long, TimeUnit) to wait on the operation to 
complete.
+   * It may throw ExecutionException if there was an error while executing the 
operation
+   * or TimeoutException in case the wait timeout was not long enough to allow 
the
+   * operation to complete.
+   * Throws IllegalArgumentException Bad table name, if the split keys
+   *    are repeated and if the split key has empty byte array.
+   *
+   * @param desc table descriptor for table
+   * @param splitKeys keys to check if the table has been created with all 
split keys
+   * @throws IOException if a remote or network exception occurs
+   * @return the result of the async creation. You can use Future.get(long, 
TimeUnit)
+   *    to wait on the operation to complete.
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #createTableAsync(TableDescriptor, byte[][])}
    */
-  void createTable(final HTableDescriptor desc, byte[][] splitKeys) throws 
IOException;
+  @Deprecated
+  default Future<Void> createTableAsync(final HTableDescriptor desc, final 
byte[][] splitKeys)
+      throws IOException {
+    return createTableAsync((TableDescriptor) desc, splitKeys);
+  }
 
   /**
    * Creates a new table but does not block and wait for it to come online.
@@ -263,7 +430,7 @@ public interface Admin extends Abortable, Closeable {
    * @return the result of the async creation. You can use Future.get(long, 
TimeUnit)
    *    to wait on the operation to complete.
    */
-  Future<Void> createTableAsync(final HTableDescriptor desc, final byte[][] 
splitKeys)
+  Future<Void> createTableAsync(final TableDescriptor desc, final byte[][] 
splitKeys)
       throws IOException;
 
   /**
@@ -291,7 +458,8 @@ public interface Admin extends Abortable, Closeable {
   /**
    * Deletes tables matching the passed in pattern and wait on completion. 
Warning: Use this method
    * carefully, there is no prompting and the effect is immediate. Consider 
using {@link
-   * #listTables(java.lang.String)} and {@link 
#deleteTable(org.apache.hadoop.hbase.TableName)}
+   * #listTableDescriptors(java.lang.String)}
+   * and {@link #deleteTable(org.apache.hadoop.hbase.TableName)}
    *
    * @param regex The regular expression to match table names against
    * @return Table descriptors for tables that couldn't be deleted.
@@ -299,20 +467,30 @@ public interface Admin extends Abortable, Closeable {
    * @throws IOException
    * @see #deleteTables(java.util.regex.Pattern)
    * @see #deleteTable(org.apache.hadoop.hbase.TableName)
+   * @deprecated since 2.0 version and will be removed in 3.0 version
+   *             This is just a trivial helper method without any magic.
+   *             Consider using {@link #listTableDescriptors(java.lang.String)}
+   *             and {@link #enableTable(org.apache.hadoop.hbase.TableName)}
    */
+  @Deprecated
   HTableDescriptor[] deleteTables(String regex) throws IOException;
 
   /**
    * Delete tables matching the passed in pattern and wait on completion. 
Warning: Use this method
    * carefully, there is no prompting and the effect is immediate. Consider 
using {@link
-   * #listTables(java.util.regex.Pattern) } and
+   * #listTableDescriptors(java.util.regex.Pattern)} and
    * {@link #deleteTable(org.apache.hadoop.hbase.TableName)}
    *
    * @param pattern The pattern to match table names against
    * @return Table descriptors for tables that couldn't be deleted
    *         The return htds are read-only
    * @throws IOException
+   * @deprecated since 2.0 version and will be removed in 3.0 version
+   *             This is just a trivial helper method without any magic.
+   *             Consider using {@link 
#listTableDescriptors(java.util.regex.Pattern)}
+   *             and {@link #enableTable(org.apache.hadoop.hbase.TableName)}
    */
+  @Deprecated
   HTableDescriptor[] deleteTables(Pattern pattern) throws IOException;
 
   /**
@@ -372,7 +550,7 @@ public interface Admin extends Abortable, Closeable {
   /**
    * Enable tables matching the passed in pattern and wait on completion. 
Warning: Use this method
    * carefully, there is no prompting and the effect is immediate. Consider 
using {@link
-   * #listTables(java.lang.String)} and {@link 
#enableTable(org.apache.hadoop.hbase.TableName)}
+   * #listTableDescriptors(java.lang.String)} and {@link 
#enableTable(org.apache.hadoop.hbase.TableName)}
    *
    * @param regex The regular expression to match table names against
    * @throws IOException
@@ -380,20 +558,30 @@ public interface Admin extends Abortable, Closeable {
    *         The return HTDs are read-only.
    * @see #enableTables(java.util.regex.Pattern)
    * @see #enableTable(org.apache.hadoop.hbase.TableName)
+   * @deprecated since 2.0 version and will be removed in 3.0 version
+   *             This is just a trivial helper method without any magic.
+   *             Consider using {@link #listTableDescriptors(java.lang.String)}
+   *             and {@link #enableTable(org.apache.hadoop.hbase.TableName)}
    */
+  @Deprecated
   HTableDescriptor[] enableTables(String regex) throws IOException;
 
   /**
    * Enable tables matching the passed in pattern and wait on completion. 
Warning: Use this method
    * carefully, there is no prompting and the effect is immediate. Consider 
using {@link
-   * #listTables(java.util.regex.Pattern) } and
+   * #listTableDescriptors(java.util.regex.Pattern)} and
    * {@link #enableTable(org.apache.hadoop.hbase.TableName)}
    *
    * @param pattern The pattern to match table names against
    * @throws IOException
    * @return Table descriptors for tables that couldn't be enabled.
    *         The return HTDs are read-only.
+   * @deprecated since 2.0 version and will be removed in 3.0 version
+   *             This is just a trivial helper method without any magic.
+   *             Consider using {@link 
#listTableDescriptors(java.util.regex.Pattern)}
+   *             and {@link #enableTable(org.apache.hadoop.hbase.TableName)}
    */
+  @Deprecated
   HTableDescriptor[] enableTables(Pattern pattern) throws IOException;
 
   /**
@@ -425,7 +613,8 @@ public interface Admin extends Abortable, Closeable {
   /**
    * Disable tables matching the passed in pattern and wait on completion. 
Warning: Use this method
    * carefully, there is no prompting and the effect is immediate. Consider 
using {@link
-   * #listTables(java.lang.String)} and {@link 
#disableTable(org.apache.hadoop.hbase.TableName)}
+   * #listTableDescriptors(java.lang.String)}
+   * and {@link #disableTable(org.apache.hadoop.hbase.TableName)}
    *
    * @param regex The regular expression to match table names against
    * @return Table descriptors for tables that couldn't be disabled
@@ -433,20 +622,30 @@ public interface Admin extends Abortable, Closeable {
    * @throws IOException
    * @see #disableTables(java.util.regex.Pattern)
    * @see #disableTable(org.apache.hadoop.hbase.TableName)
+   * @deprecated since 2.0 version and will be removed in 3.0 version
+   *             This is just a trivial helper method without any magic.
+   *             Consider using {@link #listTableDescriptors(java.lang.String)}
+   *             and {@link #disableTable(org.apache.hadoop.hbase.TableName)}
    */
+  @Deprecated
   HTableDescriptor[] disableTables(String regex) throws IOException;
 
   /**
    * Disable tables matching the passed in pattern and wait on completion. 
Warning: Use this method
    * carefully, there is no prompting and the effect is immediate. Consider 
using {@link
-   * #listTables(java.util.regex.Pattern) } and
+   * #listTableDescriptors(java.util.regex.Pattern)} and
    * {@link #disableTable(org.apache.hadoop.hbase.TableName)}
    *
    * @param pattern The pattern to match table names against
    * @return Table descriptors for tables that couldn't be disabled
    *         The return htds are read-only
    * @throws IOException
+   * @deprecated since 2.0 version and will be removed in 3.0 version
+   *             This is just a trivial helper method without any magic.
+   *             Consider using {@link 
#listTableDescriptors(java.util.regex.Pattern)}
+   *             and {@link #disableTable(org.apache.hadoop.hbase.TableName)}
    */
+  @Deprecated
   HTableDescriptor[] disableTables(Pattern pattern) throws IOException;
 
   /**
@@ -1014,11 +1213,22 @@ public interface Admin extends Abortable, Closeable {
    * @param tableName name of table.
    * @param htd modified description of the table
    * @throws IOException if a remote or network exception occurs
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #modifyTable(TableDescriptor)}
    */
+  @Deprecated
   void modifyTable(final TableName tableName, final HTableDescriptor htd)
       throws IOException;
 
   /**
+   * Modify an existing table, more IRB friendly version.
+   *
+   * @param td modified description of the table
+   * @throws IOException if a remote or network exception occurs
+   */
+  void modifyTable(final TableDescriptor td) throws IOException;
+
+  /**
    * Modify an existing table, more IRB friendly version. Asynchronous 
operation.  This means that
    * it may be a while before your schema change is updated across all of the 
table.
    * You can use Future.get(long, TimeUnit) to wait on the operation to 
complete.
@@ -1031,11 +1241,30 @@ public interface Admin extends Abortable, Closeable {
    * @throws IOException if a remote or network exception occurs
    * @return the result of the async modify. You can use Future.get(long, 
TimeUnit) to wait on the
    *     operation to complete
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #modifyTableAsync(TableDescriptor)}
    */
+  @Deprecated
   Future<Void> modifyTableAsync(final TableName tableName, final 
HTableDescriptor htd)
       throws IOException;
 
   /**
+   * Modify an existing table, more IRB friendly version. Asynchronous 
operation.  This means that
+   * it may be a while before your schema change is updated across all of the 
table.
+   * You can use Future.get(long, TimeUnit) to wait on the operation to 
complete.
+   * It may throw ExecutionException if there was an error while executing the 
operation
+   * or TimeoutException in case the wait timeout was not long enough to allow 
the
+   * operation to complete.
+   *
+   * @param td description of the table
+   * @throws IOException if a remote or network exception occurs
+   * @return the result of the async modify. You can use Future.get(long, 
TimeUnit) to wait on the
+   *     operation to complete
+   */
+  Future<Void> modifyTableAsync(TableDescriptor td)
+      throws IOException;
+
+  /**
    * Shuts down the HBase cluster
    *
    * @throws IOException if a remote or network exception occurs
@@ -1177,11 +1406,24 @@ public interface Admin extends Abortable, Closeable {
    * @param name namespace name
    * @return HTD[] the read-only tableDescriptors
    * @throws IOException
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #listTableDescriptorsByNamespace(byte[])}
    */
+  @Deprecated
   HTableDescriptor[] listTableDescriptorsByNamespace(final String name)
       throws IOException;
 
   /**
+   * Get list of table descriptors by namespace
+   *
+   * @param name namespace name
+   * @return returns a list of TableDescriptors
+   * @throws IOException
+   */
+  List<TableDescriptor> listTableDescriptorsByNamespace(final byte[] name)
+      throws IOException;
+
+  /**
    * Get list of table names by namespace
    *
    * @param name namespace name
@@ -1210,17 +1452,33 @@ public interface Admin extends Abortable, Closeable {
    * @param tableNames List of table names
    * @return HTD[] the read-only tableDescriptors
    * @throws IOException if a remote or network exception occurs
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #listTableDescriptors(List)}
    */
+  @Deprecated
   HTableDescriptor[] getTableDescriptorsByTableName(List<TableName> tableNames)
     throws IOException;
 
   /**
    * Get tableDescriptors
    *
+   * @param tableNames List of table names
+   * @return returns a list of TableDescriptors
+   * @throws IOException if a remote or network exception occurs
+   */
+  List<TableDescriptor> listTableDescriptors(List<TableName> tableNames)
+    throws IOException;
+
+  /**
+   * Get tableDescriptors
+   *
    * @param names List of table names
    * @return HTD[] the read-only tableDescriptors
    * @throws IOException if a remote or network exception occurs
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #listTableDescriptors(List)}
    */
+  @Deprecated
   HTableDescriptor[] getTableDescriptors(List<String> names)
     throws IOException;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index 1c6ea03..7518b9c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -206,6 +206,7 @@ import com.google.common.annotations.VisibleForTesting;
 import com.google.protobuf.Descriptors;
 import com.google.protobuf.Message;
 import com.google.protobuf.RpcController;
+import java.util.stream.Collectors;
 
 /**
  * HBaseAdmin is no longer a client API. It is marked 
InterfaceAudience.Private indicating that
@@ -307,6 +308,96 @@ public class HBaseAdmin implements Admin {
     return new AbortProcedureFuture(this, procId, abortProcResponse);
   }
 
+  @Override
+  public List<TableDescriptor> listTableDescriptors() throws IOException {
+    return listTableDescriptors((Pattern)null, false);
+  }
+
+  @Override
+  public List<TableDescriptor> listTableDescriptors(Pattern pattern) throws 
IOException {
+    return listTableDescriptors(pattern, false);
+  }
+
+  @Override
+  public List<TableDescriptor> listTableDescriptors(String regex) throws 
IOException {
+    return listTableDescriptors(Pattern.compile(regex), false);
+  }
+
+  @Override
+  public List<TableDescriptor> listTableDescriptors(Pattern pattern, boolean 
includeSysTables) throws IOException {
+    return executeCallable(new 
MasterCallable<List<TableDescriptor>>(getConnection(),
+        getRpcControllerFactory()) {
+      @Override
+      protected List<TableDescriptor> rpcCall() throws Exception {
+        GetTableDescriptorsRequest req =
+            RequestConverter.buildGetTableDescriptorsRequest(pattern, 
includeSysTables);
+        return 
ProtobufUtil.toTableDescriptorList(master.getTableDescriptors(getRpcController(),
+            req));
+      }
+    });
+  }
+
+  @Override
+  public List<TableDescriptor> listTableDescriptors(String regex, boolean 
includeSysTables) throws IOException {
+    return listTableDescriptors(Pattern.compile(regex), includeSysTables);
+  }
+
+  @Override
+  public TableDescriptor listTableDescriptor(TableName tableName) throws 
TableNotFoundException, IOException {
+    return getTableDescriptor(tableName, getConnection(), rpcCallerFactory, 
rpcControllerFactory,
+       operationTimeout, rpcTimeout);
+  }
+
+  @Override
+  public void modifyTable(TableDescriptor td) throws IOException {
+    get(modifyTableAsync(td), syncWaitTimeout, TimeUnit.MILLISECONDS);
+  }
+
+  @Override
+  public Future<Void> modifyTableAsync(TableDescriptor td) throws IOException {
+    ModifyTableResponse response = executeCallable(
+      new MasterCallable<ModifyTableResponse>(getConnection(), 
getRpcControllerFactory()) {
+        @Override
+        protected ModifyTableResponse rpcCall() throws Exception {
+          setPriority(td.getTableName());
+          ModifyTableRequest request = 
RequestConverter.buildModifyTableRequest(
+            td.getTableName(), td, ng.getNonceGroup(), ng.newNonce());
+          return master.modifyTable(getRpcController(), request);
+        }
+      });
+    return new ModifyTableFuture(this, td.getTableName(), response);
+  }
+
+  @Override
+  public List<TableDescriptor> listTableDescriptorsByNamespace(byte[] name) 
throws IOException {
+    return executeCallable(new 
MasterCallable<List<TableDescriptor>>(getConnection(),
+        getRpcControllerFactory()) {
+      @Override
+      protected List<TableDescriptor> rpcCall() throws Exception {
+        return master.listTableDescriptorsByNamespace(getRpcController(),
+                ListTableDescriptorsByNamespaceRequest.newBuilder()
+                  .setNamespaceName(Bytes.toString(name)).build())
+                .getTableSchemaList()
+                .stream()
+                .map(ProtobufUtil::convertToTableDesc)
+                .collect(Collectors.toList());
+      }
+    });
+  }
+
+  @Override
+  public List<TableDescriptor> listTableDescriptors(List<TableName> 
tableNames) throws IOException {
+    return executeCallable(new 
MasterCallable<List<TableDescriptor>>(getConnection(),
+        getRpcControllerFactory()) {
+      @Override
+      protected List<TableDescriptor> rpcCall() throws Exception {
+        GetTableDescriptorsRequest req =
+            RequestConverter.buildGetTableDescriptorsRequest(tableNames);
+          return 
ProtobufUtil.toTableDescriptorList(master.getTableDescriptors(getRpcController(),
 req));
+      }
+    });
+  }
+
   private static class AbortProcedureFuture extends ProcedureFuture<Boolean> {
     private boolean isAbortInProgress;
 
@@ -419,11 +510,40 @@ public class HBaseAdmin implements Admin {
 
   @Override
   public HTableDescriptor getTableDescriptor(final TableName tableName) throws 
IOException {
-    return getTableDescriptor(tableName, getConnection(), rpcCallerFactory, 
rpcControllerFactory,
+    return getHTableDescriptor(tableName, getConnection(), rpcCallerFactory, 
rpcControllerFactory,
        operationTimeout, rpcTimeout);
   }
 
-  static HTableDescriptor getTableDescriptor(final TableName tableName, 
Connection connection,
+  static TableDescriptor getTableDescriptor(final TableName tableName, 
Connection connection,
+      RpcRetryingCallerFactory rpcCallerFactory, final RpcControllerFactory 
rpcControllerFactory,
+      int operationTimeout, int rpcTimeout) throws IOException {
+    if (tableName == null) return null;
+    TableDescriptor td =
+        executeCallable(new MasterCallable<TableDescriptor>(connection, 
rpcControllerFactory) {
+      @Override
+      protected TableDescriptor rpcCall() throws Exception {
+        GetTableDescriptorsRequest req =
+            RequestConverter.buildGetTableDescriptorsRequest(tableName);
+        GetTableDescriptorsResponse htds = 
master.getTableDescriptors(getRpcController(), req);
+        if (!htds.getTableSchemaList().isEmpty()) {
+          return 
ProtobufUtil.convertToTableDesc(htds.getTableSchemaList().get(0));
+        }
+        return null;
+      }
+    }, rpcCallerFactory, operationTimeout, rpcTimeout);
+    if (td != null) {
+      return td;
+    }
+    throw new TableNotFoundException(tableName.getNameAsString());
+  }
+
+  /**
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #getTableDescriptor(TableName,
+   *             Connection, 
RpcRetryingCallerFactory,RpcControllerFactory,int,int)}
+   */
+  @Deprecated
+  static HTableDescriptor getHTableDescriptor(final TableName tableName, 
Connection connection,
       RpcRetryingCallerFactory rpcCallerFactory, final RpcControllerFactory 
rpcControllerFactory,
       int operationTimeout, int rpcTimeout) throws IOException {
     if (tableName == null) return null;
@@ -455,13 +575,13 @@ public class HBaseAdmin implements Admin {
   }
 
   @Override
-  public void createTable(HTableDescriptor desc)
+  public void createTable(TableDescriptor desc)
   throws IOException {
     createTable(desc, null);
   }
 
   @Override
-  public void createTable(HTableDescriptor desc, byte [] startKey,
+  public void createTable(TableDescriptor desc, byte [] startKey,
       byte [] endKey, int numRegions)
   throws IOException {
     if(numRegions < 3) {
@@ -481,13 +601,13 @@ public class HBaseAdmin implements Admin {
   }
 
   @Override
-  public void createTable(final HTableDescriptor desc, byte [][] splitKeys)
+  public void createTable(final TableDescriptor desc, byte [][] splitKeys)
       throws IOException {
     get(createTableAsync(desc, splitKeys), syncWaitTimeout, 
TimeUnit.MILLISECONDS);
   }
 
   @Override
-  public Future<Void> createTableAsync(final HTableDescriptor desc, final 
byte[][] splitKeys)
+  public Future<Void> createTableAsync(final TableDescriptor desc, final 
byte[][] splitKeys)
       throws IOException {
     if (desc.getTableName() == null) {
       throw new IllegalArgumentException("TableName cannot be null");
@@ -524,19 +644,19 @@ public class HBaseAdmin implements Admin {
   }
 
   private static class CreateTableFuture extends TableFuture<Void> {
-    private final HTableDescriptor desc;
+    private final TableDescriptor desc;
     private final byte[][] splitKeys;
 
-    public CreateTableFuture(final HBaseAdmin admin, final HTableDescriptor 
desc,
+    public CreateTableFuture(final HBaseAdmin admin, final TableDescriptor 
desc,
         final byte[][] splitKeys, final CreateTableResponse response) {
       super(admin, desc.getTableName(),
               (response != null && response.hasProcId()) ? 
response.getProcId() : null);
       this.splitKeys = splitKeys;
-      this.desc = new ImmutableHTableDescriptor(desc);
+      this.desc = desc;
     }
 
     @Override
-    protected HTableDescriptor getTableDescriptor() {
+    protected TableDescriptor getTableDescriptor() {
       return desc;
     }
 
@@ -3546,7 +3666,7 @@ public class HBaseAdmin implements Admin {
     /**
      * @return the table descriptor
      */
-    protected HTableDescriptor getTableDescriptor() throws IOException {
+    protected TableDescriptor getTableDescriptor() throws IOException {
       return getAdmin().getTableDescriptorByTableName(getTableName());
     }
 
@@ -3642,7 +3762,7 @@ public class HBaseAdmin implements Admin {
 
     protected void waitForAllRegionsOnline(final long deadlineTs, final 
byte[][] splitKeys)
         throws IOException, TimeoutException {
-      final HTableDescriptor desc = getTableDescriptor();
+      final TableDescriptor desc = getTableDescriptor();
       final AtomicInteger actualRegCount = new AtomicInteger(0);
       final MetaTableAccessor.Visitor visitor = new 
MetaTableAccessor.Visitor() {
         @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
index d207f6a..46ce902 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
@@ -263,7 +263,17 @@ public class HTable implements Table {
    */
   @Override
   public HTableDescriptor getTableDescriptor() throws IOException {
-    HTableDescriptor htd = HBaseAdmin.getTableDescriptor(tableName, 
connection, rpcCallerFactory,
+    HTableDescriptor htd = HBaseAdmin.getHTableDescriptor(tableName, 
connection, rpcCallerFactory,
+      rpcControllerFactory, operationTimeout, readRpcTimeout);
+    if (htd != null) {
+      return new ImmutableHTableDescriptor(htd);
+    }
+    return null;
+  }
+
+  @Override
+  public TableDescriptor getDescriptor() throws IOException {
+    HTableDescriptor htd = HBaseAdmin.getHTableDescriptor(tableName, 
connection, rpcCallerFactory,
       rpcControllerFactory, operationTimeout, readRpcTimeout);
     if (htd != null) {
       return new ImmutableHTableDescriptor(htd);

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
index 933329f..c76c2f5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
@@ -65,10 +65,19 @@ public interface Table extends Closeable {
   /**
    * Gets the {@link org.apache.hadoop.hbase.HTableDescriptor table 
descriptor} for this table.
    * @throws java.io.IOException if a remote or network exception occurs.
+   * @deprecated since 2.0 version and will be removed in 3.0 version.
+   *             use {@link #getDescriptor()}
    */
+  @Deprecated
   HTableDescriptor getTableDescriptor() throws IOException;
 
   /**
+   * Gets the {@link org.apache.hadoop.hbase.client.TableDescriptor table 
descriptor} for this table.
+   * @throws java.io.IOException if a remote or network exception occurs.
+   */
+  TableDescriptor getDescriptor() throws IOException;
+
+  /**
    * Test for the existence of columns in the table, as specified by the Get.
    * <p>
    *
@@ -604,7 +613,7 @@ public interface Table extends Closeable {
   /**
    * Get timeout (millisecond) of each rpc request in this Table instance.
    *
-   * @returns Currently configured read timeout
+   * @return Currently configured read timeout
    * @deprecated Use getReadRpcTimeout or getWriteRpcTimeout instead
    */
   @Deprecated

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
index 33e896c..65e64f8 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
@@ -208,6 +208,14 @@ public interface TableDescriptor {
   byte[] getValue(byte[] key);
 
   /**
+   * Getter for accessing the metadata associated with the key.
+   *
+   * @param key The key.
+   * @return Null if no mapping for the key
+   */
+  String getValue(String key);
+
+  /**
    * @return Getter for fetching an unmodifiable map.
    */
   Map<Bytes, Bytes> getValues();

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
index 7a90a71..44d5c99 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
@@ -500,13 +500,20 @@ public class TableDescriptorBuilder {
 
     @Override
     public Bytes getValue(Bytes key) {
-      return values.get(key);
+      Bytes rval = values.get(key);
+      return rval == null ? null : new Bytes(rval.copyBytes());
+    }
+
+    @Override
+    public String getValue(String key) {
+      Bytes rval = values.get(new Bytes(Bytes.toBytes(key)));
+      return rval == null ? null : Bytes.toString(rval.get(), 
rval.getOffset(), rval.getLength());
     }
 
     @Override
     public byte[] getValue(byte[] key) {
       Bytes value = values.get(new Bytes(key));
-      return value == null ? null : value.get();
+      return value == null ? null : value.copyBytes();
     }
 
     private <T> T getOrDefault(Bytes key, Function<String, T> function, T 
defaultValue) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java
index c7e0be7..6a57036 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.io.crypto.Cipher;
 import org.apache.hadoop.hbase.io.crypto.Encryption;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
@@ -182,7 +183,7 @@ public final class EncryptionUtil {
    * @throws IOException if an encryption key for the column cannot be 
unwrapped
    */
   public static Encryption.Context createEncryptionContext(Configuration conf,
-    HColumnDescriptor family) throws IOException {
+    ColumnFamilyDescriptor family) throws IOException {
     Encryption.Context cryptoContext = Encryption.Context.NONE;
     String cipherName = family.getEncryptionType();
     if (cipherName != null) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
index a74d737..5c44b4e 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
@@ -1305,17 +1305,17 @@ public final class RequestConverter {
    * Creates a protocol buffer ModifyTableRequest
    *
    * @param tableName
-   * @param hTableDesc
+   * @param tableDesc
    * @return a ModifyTableRequest
    */
   public static ModifyTableRequest buildModifyTableRequest(
       final TableName tableName,
-      final HTableDescriptor hTableDesc,
+      final TableDescriptor tableDesc,
       final long nonceGroup,
       final long nonce) {
     ModifyTableRequest.Builder builder = ModifyTableRequest.newBuilder();
     builder.setTableName(ProtobufUtil.toProtoTableName((tableName)));
-    builder.setTableSchema(ProtobufUtil.convertToTableSchema(hTableDesc));
+    builder.setTableSchema(ProtobufUtil.convertToTableSchema(tableDesc));
     builder.setNonceGroup(nonceGroup);
     builder.setNonce(nonce);
     return builder.build();

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
----------------------------------------------------------------------
diff --git 
a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
 
b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
index b7df9b4..35f85f7 100644
--- 
a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
+++ 
b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
@@ -182,7 +182,7 @@ public class ZooKeeperScanPolicyObserver implements 
RegionObserver {
     }
     long ttl = Math.max(EnvironmentEdgeManager.currentTime() -
         Bytes.toLong(data), oldSI.getTtl());
-    return new ScanInfo(oldSI.getConfiguration(), store.getFamily(), ttl,
+    return new ScanInfo(oldSI.getConfiguration(), 
store.getColumnFamilyDescriptor(), ttl,
         oldSI.getTimeToPurgeDeletes(), oldSI.getComparator());
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
index 5012a5a..63dfcaa 100644
--- 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
+++ 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
@@ -54,6 +54,7 @@ import org.apache.hadoop.hbase.client.Row;
 import org.apache.hadoop.hbase.client.RowMutations;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.client.coprocessor.Batch;
 import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback;
 import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
@@ -508,6 +509,11 @@ public class RemoteHTable implements Table {
     // no-op
   }
 
+  @Override
+  public TableDescriptor getDescriptor() throws IOException {
+    return getTableDescriptor();
+  }
+
   class Scanner implements ResultScanner {
 
     String uri;

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/HFileSplitterJob.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/HFileSplitterJob.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/HFileSplitterJob.java
index 604e502..a07868d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/HFileSplitterJob.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/HFileSplitterJob.java
@@ -126,7 +126,7 @@ public class HFileSplitterJob extends Configured implements 
Tool {
       try (Connection conn = ConnectionFactory.createConnection(conf);
           Table table = conn.getTable(tableName);
           RegionLocator regionLocator = conn.getRegionLocator(tableName)) {
-        HFileOutputFormat2.configureIncrementalLoad(job, 
table.getTableDescriptor(), regionLocator);
+        HFileOutputFormat2.configureIncrementalLoad(job, 
table.getDescriptor(), regionLocator);
       }
       LOG.debug("success configuring load incremental job");
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java
index 051a8f2..8824872 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java
@@ -220,6 +220,11 @@ public final class HTableWrapper implements Table {
   }
 
   @Override
+  public TableDescriptor getDescriptor() throws IOException {
+    return table.getDescriptor();
+  }
+
+  @Override
   public TableName getName() {
     return table.getName();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintProcessor.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintProcessor.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintProcessor.java
index f217641..b836082 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintProcessor.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintProcessor.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.CoprocessorEnvironment;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Durability;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.coprocessor.RegionObserver;
@@ -67,7 +68,7 @@ public class ConstraintProcessor implements RegionObserver {
           "Constraints only act on regions - started in an environment that 
was not a region");
     }
     RegionCoprocessorEnvironment env = (RegionCoprocessorEnvironment) 
environment;
-    HTableDescriptor desc = env.getRegion().getTableDesc();
+    TableDescriptor desc = env.getRegion().getTableDescriptor();
     // load all the constraints from the HTD
     try {
       this.constraints = Constraints.getConstraints(desc, classloader);

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java
index 5ed9aa8..203442a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java
@@ -34,6 +34,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Pair;
 
@@ -555,7 +556,7 @@ public final class Constraints {
    * @throws IOException
    *           if any part of reading/arguments fails
    */
-  static List<? extends Constraint> getConstraints(HTableDescriptor desc,
+  static List<? extends Constraint> getConstraints(TableDescriptor desc,
       ClassLoader classloader) throws IOException {
     List<Constraint> constraints = new ArrayList<>();
     // loop through all the key, values looking for constraints

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
index c0d809b..21b8556 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
@@ -160,7 +160,7 @@ public class CopyTable extends Configured implements Tool {
       try (Connection conn = ConnectionFactory.createConnection(getConf());
           Admin admin = conn.getAdmin()) {
         HFileOutputFormat2.configureIncrementalLoadMap(job,
-            admin.getTableDescriptor((TableName.valueOf(dstTableName))));
+            admin.listTableDescriptor((TableName.valueOf(dstTableName))));
       }
     } else {
       TableMapReduceUtil.initTableMapperJob(tableName, scan,

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
index f847608..56af23a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
@@ -25,7 +25,6 @@ import java.net.URLEncoder;
 import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Collection;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -44,20 +43,19 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparator;
 import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionLocation;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValueUtil;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.fs.HFileSystem;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionLocation;
+import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
@@ -67,10 +65,13 @@ import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileContext;
 import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
 import org.apache.hadoop.hbase.io.hfile.HFileWriterImpl;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.regionserver.HStore;
 import org.apache.hadoop.hbase.regionserver.StoreFile;
 import org.apache.hadoop.hbase.regionserver.StoreFileWriter;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.FSUtils;
@@ -94,23 +95,33 @@ import com.google.common.annotations.VisibleForTesting;
  * all HFiles being written.
  * <p>
  * Using this class as part of a MapReduce job is best done
- * using {@link #configureIncrementalLoad(Job, HTableDescriptor, 
RegionLocator)}.
+ * using {@link #configureIncrementalLoad(Job, TableDescriptor, 
RegionLocator)}.
  */
 @InterfaceAudience.Public
 public class HFileOutputFormat2
     extends FileOutputFormat<ImmutableBytesWritable, Cell> {
   private static final Log LOG = LogFactory.getLog(HFileOutputFormat2.class);
   static class TableInfo {
-    private HTableDescriptor hTableDescriptor;
+    private TableDescriptor tableDesctiptor;
     private RegionLocator regionLocator;
 
-    public TableInfo(HTableDescriptor hTableDescriptor, RegionLocator 
regionLocator) {
-      this.hTableDescriptor = hTableDescriptor;
+    public TableInfo(TableDescriptor tableDesctiptor, RegionLocator 
regionLocator) {
+      this.tableDesctiptor = tableDesctiptor;
       this.regionLocator = regionLocator;
     }
 
+    /**
+     * The modification for the returned HTD doesn't affect the inner TD.
+     * @return A clone of inner table descriptor
+     * @deprecated use {@link #getTableDescriptor}
+     */
+    @Deprecated
     public HTableDescriptor getHTableDescriptor() {
-      return hTableDescriptor;
+      return new HTableDescriptor(tableDesctiptor);
+    }
+
+    public TableDescriptor getTableDescriptor() {
+      return tableDesctiptor;
     }
 
     public RegionLocator getRegionLocator() {
@@ -539,7 +550,7 @@ public class HFileOutputFormat2
    */
   public static void configureIncrementalLoad(Job job, Table table, 
RegionLocator regionLocator)
       throws IOException {
-    configureIncrementalLoad(job, table.getTableDescriptor(), regionLocator);
+    configureIncrementalLoad(job, table.getDescriptor(), regionLocator);
   }
 
   /**
@@ -556,7 +567,7 @@ public class HFileOutputFormat2
    * The user should be sure to set the map output value class to either 
KeyValue or Put before
    * running this function.
    */
-  public static void configureIncrementalLoad(Job job, HTableDescriptor 
tableDescriptor,
+  public static void configureIncrementalLoad(Job job, TableDescriptor 
tableDescriptor,
       RegionLocator regionLocator) throws IOException {
     ArrayList<TableInfo> singleTableInfo = new ArrayList<>();
     singleTableInfo.add(new TableInfo(tableDescriptor, regionLocator));
@@ -601,13 +612,13 @@ public class HFileOutputFormat2
     /* Now get the region start keys for every table required */
     List<String> allTableNames = new ArrayList<>(multiTableInfo.size());
     List<RegionLocator> regionLocators = new ArrayList<>( 
multiTableInfo.size());
-    List<HTableDescriptor> tableDescriptors = new ArrayList<>( 
multiTableInfo.size());
+    List<TableDescriptor> tableDescriptors = new ArrayList<>( 
multiTableInfo.size());
 
     for( TableInfo tableInfo : multiTableInfo )
     {
       regionLocators.add(tableInfo.getRegionLocator());
       
allTableNames.add(tableInfo.getRegionLocator().getName().getNameAsString());
-      tableDescriptors.add(tableInfo.getHTableDescriptor());
+      tableDescriptors.add(tableInfo.getTableDescriptor());
     }
     // Record tablenames for creating writer by favored nodes, and decoding 
compression, block size and other attributes of columnfamily per table
     conf.set(OUTPUT_TABLE_NAME_CONF_KEY, StringUtils.join(allTableNames, Bytes
@@ -635,7 +646,7 @@ public class HFileOutputFormat2
     LOG.info("Incremental output configured for tables: " + 
StringUtils.join(allTableNames, ","));
   }
 
-  public static void configureIncrementalLoadMap(Job job, HTableDescriptor 
tableDescriptor) throws
+  public static void configureIncrementalLoadMap(Job job, TableDescriptor 
tableDescriptor) throws
       IOException {
     Configuration conf = job.getConfiguration();
 
@@ -643,10 +654,10 @@ public class HFileOutputFormat2
     job.setOutputValueClass(KeyValue.class);
     job.setOutputFormatClass(HFileOutputFormat2.class);
 
-    ArrayList<HTableDescriptor> singleTableDescriptor = new ArrayList<>(1);
+    ArrayList<TableDescriptor> singleTableDescriptor = new ArrayList<>(1);
     singleTableDescriptor.add(tableDescriptor);
 
-    conf.set(OUTPUT_TABLE_NAME_CONF_KEY, tableDescriptor.getNameAsString());
+    conf.set(OUTPUT_TABLE_NAME_CONF_KEY, 
tableDescriptor.getTableName().getNameAsString());
     // Set compression algorithms based on column families
     conf.set(COMPRESSION_FAMILIES_CONF_KEY,
         serializeColumnFamilyAttribute(compressionDetails, 
singleTableDescriptor));
@@ -793,18 +804,17 @@ public class HFileOutputFormat2
 
   @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = 
"RCN_REDUNDANT_NULLCHECK_OF_NONNULL_VALUE")
   @VisibleForTesting
-  static String serializeColumnFamilyAttribute(Function<HColumnDescriptor, 
String> fn, List<HTableDescriptor> allTables)
+  static String 
serializeColumnFamilyAttribute(Function<ColumnFamilyDescriptor, String> fn, 
List<TableDescriptor> allTables)
       throws UnsupportedEncodingException {
     StringBuilder attributeValue = new StringBuilder();
     int i = 0;
-    for (HTableDescriptor tableDescriptor : allTables) {
+    for (TableDescriptor tableDescriptor : allTables) {
       if (tableDescriptor == null) {
         // could happen with mock table instance
         // CODEREVIEW: Can I set an empty string in conf if mock table 
instance?
         return "";
       }
-      Collection<HColumnDescriptor> families = tableDescriptor.getFamilies();
-      for (HColumnDescriptor familyDescriptor : families) {
+      for (ColumnFamilyDescriptor familyDescriptor : 
tableDescriptor.getColumnFamilies()) {
         if (i++ > 0) {
           attributeValue.append('&');
         }
@@ -829,7 +839,7 @@ public class HFileOutputFormat2
    *           on failure to read column family descriptors
    */
   @VisibleForTesting
-  static Function<HColumnDescriptor, String> compressionDetails = 
familyDescriptor ->
+  static Function<ColumnFamilyDescriptor, String> compressionDetails = 
familyDescriptor ->
           familyDescriptor.getCompressionType().getName();
 
   /**
@@ -845,7 +855,7 @@ public class HFileOutputFormat2
    *           on failure to read column family descriptors
    */
   @VisibleForTesting
-  static Function<HColumnDescriptor, String> blockSizeDetails = 
familyDescriptor -> String
+  static Function<ColumnFamilyDescriptor, String> blockSizeDetails = 
familyDescriptor -> String
           .valueOf(familyDescriptor.getBlocksize());
 
   /**
@@ -861,10 +871,10 @@ public class HFileOutputFormat2
    *           on failure to read column family descriptors
    */
   @VisibleForTesting
-  static Function<HColumnDescriptor, String> bloomTypeDetails = 
familyDescriptor -> {
+  static Function<ColumnFamilyDescriptor, String> bloomTypeDetails = 
familyDescriptor -> {
     String bloomType = familyDescriptor.getBloomFilterType().toString();
     if (bloomType == null) {
-      bloomType = HColumnDescriptor.DEFAULT_BLOOMFILTER;
+      bloomType = ColumnFamilyDescriptorBuilder.DEFAULT_BLOOMFILTER.name();
     }
     return bloomType;
   };
@@ -881,7 +891,7 @@ public class HFileOutputFormat2
    *           on failure to read column family descriptors
    */
   @VisibleForTesting
-  static Function<HColumnDescriptor, String> dataBlockEncodingDetails = 
familyDescriptor -> {
+  static Function<ColumnFamilyDescriptor, String> dataBlockEncodingDetails = 
familyDescriptor -> {
     DataBlockEncoding encoding = familyDescriptor.getDataBlockEncoding();
     if (encoding == null) {
       encoding = DataBlockEncoding.NONE;

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
index 3c72c2b..727d31f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
@@ -230,7 +230,7 @@ public class Import extends Configured implements Tool {
       int reduceNum = context.getNumReduceTasks();
       Configuration conf = context.getConfiguration();
       TableName tableName = 
TableName.valueOf(context.getConfiguration().get(TABLE_NAME));
-      try (Connection conn = ConnectionFactory.createConnection(conf); 
+      try (Connection conn = ConnectionFactory.createConnection(conf);
           RegionLocator regionLocator = conn.getRegionLocator(tableName)) {
         byte[][] startKeys = regionLocator.getStartKeys();
         if (startKeys.length != reduceNum) {
@@ -622,10 +622,10 @@ public class Import extends Configured implements Tool {
 
     if (hfileOutPath != null && conf.getBoolean(HAS_LARGE_RESULT, false)) {
       LOG.info("Use Large Result!!");
-      try (Connection conn = ConnectionFactory.createConnection(conf); 
+      try (Connection conn = ConnectionFactory.createConnection(conf);
           Table table = conn.getTable(tableName);
           RegionLocator regionLocator = conn.getRegionLocator(tableName)) {
-        HFileOutputFormat2.configureIncrementalLoad(job, 
table.getTableDescriptor(), regionLocator);
+        HFileOutputFormat2.configureIncrementalLoad(job, 
table.getDescriptor(), regionLocator);
         job.setMapperClass(KeyValueSortImporter.class);
         job.setReducerClass(KeyValueReducer.class);
         Path outputDir = new Path(hfileOutPath);
@@ -655,7 +655,7 @@ public class Import extends Configured implements Tool {
         FileOutputFormat.setOutputPath(job, outputDir);
         job.setMapOutputKeyClass(ImmutableBytesWritable.class);
         job.setMapOutputValueClass(KeyValue.class);
-        HFileOutputFormat2.configureIncrementalLoad(job, 
table.getTableDescriptor(), regionLocator);
+        HFileOutputFormat2.configureIncrementalLoad(job, 
table.getDescriptor(), regionLocator);
         TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(),
             com.google.common.base.Preconditions.class);
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
index aa7b129..e450659 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
@@ -63,6 +63,8 @@ import org.apache.hadoop.util.ToolRunner;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Splitter;
 import com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 
 /**
  * Tool to import data from a TSV file.
@@ -521,15 +523,15 @@ public class ImportTsv extends Configured implements Tool 
{
             if(!noStrict) {
               ArrayList<String> unmatchedFamilies = new ArrayList<>();
               Set<String> cfSet = getColumnFamilies(columns);
-              HTableDescriptor tDesc = table.getTableDescriptor();
+              TableDescriptor tDesc = table.getDescriptor();
               for (String cf : cfSet) {
-                if(tDesc.getFamily(Bytes.toBytes(cf)) == null) {
+                if(!tDesc.hasColumnFamily(Bytes.toBytes(cf))) {
                   unmatchedFamilies.add(cf);
                 }
               }
               if(unmatchedFamilies.size() > 0) {
                 ArrayList<String> familyNames = new ArrayList<>();
-                for (HColumnDescriptor family : 
table.getTableDescriptor().getFamilies()) {
+                for (ColumnFamilyDescriptor family : 
table.getDescriptor().getColumnFamilies()) {
                   familyNames.add(family.getNameAsString());
                 }
                 String msg =
@@ -553,7 +555,7 @@ public class ImportTsv extends Configured implements Tool {
             if (!isDryRun) {
               Path outputDir = new Path(hfileOutPath);
               FileOutputFormat.setOutputPath(job, outputDir);
-              HFileOutputFormat2.configureIncrementalLoad(job, 
table.getTableDescriptor(),
+              HFileOutputFormat2.configureIncrementalLoad(job, 
table.getDescriptor(),
                   regionLocator);
             }
           }

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
index 3af4290..b4b0f0f 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
@@ -70,6 +70,7 @@ import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.ClientServiceCallable;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.RegionLocator;
@@ -647,9 +648,9 @@ public class LoadIncrementalHFiles extends Configured 
implements Tool {
    */
   private void validateFamiliesInHFiles(Table table, Deque<LoadQueueItem> 
queue, boolean silence)
       throws IOException {
-    Collection<HColumnDescriptor> families = 
table.getTableDescriptor().getFamilies();
-    List<String> familyNames = new ArrayList<>(families.size());
-    for (HColumnDescriptor family : families) {
+    ColumnFamilyDescriptor[] families = 
table.getDescriptor().getColumnFamilies();
+    List<String> familyNames = new ArrayList<>(families.length);
+    for (ColumnFamilyDescriptor family : families) {
       familyNames.add(family.getNameAsString());
     }
     Iterator<LoadQueueItem> queueIter = queue.iterator();

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java
index fdcf30e..5c779d6 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java
@@ -21,9 +21,8 @@ package org.apache.hadoop.hbase.mapreduce;
 import com.google.common.annotations.VisibleForTesting;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.client.RegionLocator;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.mapreduce.Job;
@@ -84,7 +83,7 @@ public class MultiTableHFileOutputFormat extends 
HFileOutputFormat2 {
 
   /**
    * Analogous to
-   * {@link HFileOutputFormat2#configureIncrementalLoad(Job, HTableDescriptor, 
RegionLocator)},
+   * {@link HFileOutputFormat2#configureIncrementalLoad(Job, TableDescriptor, 
RegionLocator)},
    * this function will configure the requisite number of reducers to write 
HFiles for multple
    * tables simultaneously
    *

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
index 06e43e7..59678eb 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
@@ -307,7 +307,7 @@ public class WALPlayer extends Configured implements Tool {
       try (Connection conn = ConnectionFactory.createConnection(conf);
           Table table = conn.getTable(tableName);
           RegionLocator regionLocator = conn.getRegionLocator(tableName)) {
-        HFileOutputFormat2.configureIncrementalLoad(job, 
table.getTableDescriptor(), regionLocator);
+        HFileOutputFormat2.configureIncrementalLoad(job, 
table.getDescriptor(), regionLocator);
       }
       TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(),
           com.google.common.base.Preconditions.class);

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
index 56c0242..c475b17 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
@@ -75,7 +75,7 @@ public class DefaultMobStoreCompactor extends 
DefaultCompactor {
     public InternalScanner createScanner(List<StoreFileScanner> scanners,
         ScanType scanType, FileDetails fd, long smallestReadPoint) throws 
IOException {
       Scan scan = new Scan();
-      scan.setMaxVersions(store.getFamily().getMaxVersions());
+      scan.setMaxVersions(store.getColumnFamilyDescriptor().getMaxVersions());
       return new StoreScanner(store, store.getScanInfo(), scan, scanners, 
scanType,
           smallestReadPoint, fd.earliestPutTs);
     }
@@ -103,7 +103,7 @@ public class DefaultMobStoreCompactor extends 
DefaultCompactor {
       throw new IllegalArgumentException("The store " + store + " is not a 
HMobStore");
     }
     mobStore = (HMobStore) store;
-    mobSizeThreshold = store.getFamily().getMobThreshold();
+    mobSizeThreshold = store.getColumnFamilyDescriptor().getMobThreshold();
   }
 
   @Override
@@ -195,7 +195,7 @@ public class DefaultMobStoreCompactor extends 
DefaultCompactor {
         ScannerContext.newBuilder().setBatchLimit(compactionKVMax).build();
     throughputController.start(compactionName);
     KeyValueScanner kvs = (scanner instanceof KeyValueScanner)? 
(KeyValueScanner)scanner : null;
-    long shippedCallSizeLimit = (long) numofFilesToCompact * 
this.store.getFamily().getBlocksize();
+    long shippedCallSizeLimit = (long) numofFilesToCompact * 
this.store.getColumnFamilyDescriptor().getBlocksize();
     try {
       try {
         // If the mob file writer could not be created, directly write the 
cell to the store file.

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
index 1a1c5a7..f11bd99 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
@@ -72,7 +72,7 @@ public class DefaultMobStoreFlusher extends 
DefaultStoreFlusher {
 
   public DefaultMobStoreFlusher(Configuration conf, Store store) throws 
IOException {
     super(conf, store);
-    mobCellValueSizeThreshold = store.getFamily().getMobThreshold();
+    mobCellValueSizeThreshold = 
store.getColumnFamilyDescriptor().getMobThreshold();
     this.targetPath = MobUtils.getMobFamilyPath(conf, store.getTableName(),
         store.getColumnFamilyName());
     if (!this.store.getFileSystem().exists(targetPath)) {
@@ -115,7 +115,7 @@ public class DefaultMobStoreFlusher extends 
DefaultStoreFlusher {
       synchronized (flushLock) {
         status.setStatus("Flushing " + store + ": creating writer");
         // Write the map out to the disk
-        writer = store.createWriterInTmp(cellsCount, 
store.getFamily().getCompressionType(),
+        writer = store.createWriterInTmp(cellsCount, 
store.getColumnFamilyDescriptor().getCompressionType(),
             false, true, true, false, snapshot.getTimeRangeTracker());
         IOException e = null;
         try {
@@ -173,7 +173,7 @@ public class DefaultMobStoreFlusher extends 
DefaultStoreFlusher {
     long mobSize = 0;
     long time = snapshot.getTimeRangeTracker().getMax();
     mobFileWriter = mobStore.createWriterInTmp(new Date(time), 
snapshot.getCellsCount(),
-        store.getFamily().getCompressionType(), 
store.getRegionInfo().getStartKey(), false);
+        store.getColumnFamilyDescriptor().getCompressionType(), 
store.getRegionInfo().getStartKey(), false);
     // the target path is {tableName}/.mob/{cfName}/mobFiles
     // the relative path is mobFiles
     byte[] fileName = Bytes.toBytes(mobFileWriter.getPath().getName());

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobCacheConfig.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobCacheConfig.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobCacheConfig.java
index 6c80355..d18d282 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobCacheConfig.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobCacheConfig.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.mob;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 
 /**
@@ -31,7 +32,7 @@ public class MobCacheConfig extends CacheConfig {
 
   private static MobFileCache mobFileCache;
 
-  public MobCacheConfig(Configuration conf, HColumnDescriptor family) {
+  public MobCacheConfig(Configuration conf, ColumnFamilyDescriptor family) {
     super(conf, family);
     instantiateMobFileCache(conf);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java
index a869b7a..80bda28 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java
@@ -54,6 +54,7 @@ import org.apache.hadoop.hbase.TagType;
 import org.apache.hadoop.hbase.TagUtil;
 import org.apache.hadoop.hbase.backup.HFileArchiver;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.MobCompactPartitionPolicy;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.HFileLink;
@@ -651,7 +652,7 @@ public final class MobUtils {
    * @throws IOException
    */
   public static StoreFileWriter createWriter(Configuration conf, FileSystem fs,
-      HColumnDescriptor family, Path path, long maxKeyCount,
+      ColumnFamilyDescriptor family, Path path, long maxKeyCount,
       Compression.Algorithm compression, CacheConfig cacheConfig, 
Encryption.Context cryptoContext,
       ChecksumType checksumType, int bytesPerChecksum, int blocksize, 
BloomType bloomType,
       boolean isCompaction)

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/ActivePolicyEnforcement.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/ActivePolicyEnforcement.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/ActivePolicyEnforcement.java
index c558b26..b7e7554 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/ActivePolicyEnforcement.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/ActivePolicyEnforcement.java
@@ -69,7 +69,7 @@ public class ActivePolicyEnforcement {
    * @see #getPolicyEnforcement(TableName)
    */
   public SpaceViolationPolicyEnforcement getPolicyEnforcement(Region r) {
-    return 
getPolicyEnforcement(Objects.requireNonNull(r).getTableDesc().getTableName());
+    return 
getPolicyEnforcement(Objects.requireNonNull(r).getTableDescriptor().getTableName());
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.java
index 756251a..4d3c1bc 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.java
@@ -183,7 +183,7 @@ public class RegionServerRpcQuotaManager {
     } else {
       ugi = User.getCurrent().getUGI();
     }
-    TableName table = region.getTableDesc().getTableName();
+    TableName table = region.getTableDescriptor().getTableName();
 
     OperationQuota quota = getQuota(ugi, table);
     try {

Reply via email to