This is an automated email from the ASF dual-hosted git repository.

gokcen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/phoenix.git

commit 82d346c5ccae71b5818243f344f63382f51d3bc2
Author: Gokcen Iskender <gisken...@salesforce.com>
AuthorDate: Mon Mar 29 18:28:52 2021 -0700

    Add more tests for PHOENIX-6247
    
    Signed-off-by: Gokcen Iskender <gokc...@gmail.com>
---
 .../apache/phoenix/end2end/CsvBulkLoadToolIT.java  |   2 +-
 ...ableNameIT.java => LogicalTableNameBaseIT.java} | 482 ++++-------------
 .../end2end/LogicalTableNameExtendedIT.java        | 276 ++++++++++
 .../apache/phoenix/end2end/LogicalTableNameIT.java | 576 ++++-----------------
 .../apache/phoenix/end2end/join/HashJoinIT.java    |  39 +-
 .../phoenix/coprocessor/MetaDataEndpointImpl.java  |  33 +-
 6 files changed, 546 insertions(+), 862 deletions(-)

diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java
index d42588a..c39d118 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java
@@ -466,7 +466,7 @@ public class CsvBulkLoadToolIT extends BaseOwnClusterIT {
             admin.snapshot(snapshotName, TableName.valueOf(fullTableName));
             admin.cloneSnapshot(Bytes.toBytes(snapshotName), 
Bytes.toBytes(fullNewTableName));
         }
-        LogicalTableNameIT.renameAndDropPhysicalTable(conn, "NULL", 
schemaName, tableName, newTableName);
+        LogicalTableNameIT.renameAndDropPhysicalTable(conn, "NULL", 
schemaName, tableName, newTableName, false);
 
         String csvName = "/tmp/input_logical_name.csv";
         FileSystem fs = FileSystem.get(getUtility().getConfiguration());
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/LogicalTableNameIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/LogicalTableNameBaseIT.java
similarity index 53%
copy from 
phoenix-core/src/it/java/org/apache/phoenix/end2end/LogicalTableNameIT.java
copy to 
phoenix-core/src/it/java/org/apache/phoenix/end2end/LogicalTableNameBaseIT.java
index 15388a8..5834984 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/LogicalTableNameIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/LogicalTableNameBaseIT.java
@@ -27,12 +27,7 @@ import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.regionserver.ScanInfoUtil;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.mapreduce.Counters;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.phoenix.end2end.index.SingleCellIndexIT;
-import org.apache.phoenix.hbase.index.IndexRegionObserver;
 import org.apache.phoenix.jdbc.PhoenixConnection;
-import org.apache.phoenix.mapreduce.index.IndexScrutinyTool;
 import org.apache.phoenix.query.BaseTest;
 import org.apache.phoenix.query.PhoenixTestBuilder;
 import org.apache.phoenix.query.QueryConstants;
@@ -45,13 +40,6 @@ import org.apache.phoenix.util.QueryUtil;
 import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.StringUtil;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 import java.sql.Connection;
 import java.sql.DriverManager;
@@ -59,7 +47,6 @@ import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.util.ArrayList;
-import java.util.Collection;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -67,9 +54,8 @@ import java.util.Properties;
 import java.util.Random;
 
 import static java.util.Arrays.asList;
-import static 
org.apache.phoenix.mapreduce.index.PhoenixScrutinyJobCounters.INVALID_ROW_COUNT;
-import static 
org.apache.phoenix.mapreduce.index.PhoenixScrutinyJobCounters.VALID_ROW_COUNT;
 import static org.apache.phoenix.query.PhoenixTestBuilder.DDLDefaults.MAX_ROWS;
+import static org.apache.phoenix.query.QueryConstants.NAMESPACE_SEPARATOR;
 import static org.apache.phoenix.util.PhoenixRuntime.TENANT_ID_ATTRIB;
 import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
 import static org.junit.Assert.assertEquals;
@@ -77,50 +63,22 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 
 
-@RunWith(Parameterized.class)
-@Category(NeedsOwnMiniClusterTest.class)
-public class LogicalTableNameIT extends BaseTest {
-    private static final Logger LOGGER = 
LoggerFactory.getLogger(LogicalTableNameIT.class);
-
-    private final boolean createChildAfterTransform;
-    private final boolean immutable;
-    private String dataTableDdl;
+public class LogicalTableNameBaseIT extends BaseTest {
+    protected String dataTableDdl = "";
     public static final String NEW_TABLE_PREFIX = "NEW_TBL_";
     private Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
 
-    @BeforeClass
-    public static synchronized void doSetup() throws Exception {
+    static void initCluster(boolean isNamespaceMapped) throws Exception {
         Map<String, String> props = Maps.newConcurrentMap();
         props.put(QueryServices.DROP_METADATA_ATTRIB, Boolean.TRUE.toString());
         props.put(ScanInfoUtil.PHOENIX_MAX_LOOKBACK_AGE_CONF_KEY, 
Integer.toString(60*60*1000)); // An hour
-        setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator()));
-    }
-
-    public LogicalTableNameIT(boolean createChildAfterTransform, boolean 
immutable)  {
-        this.createChildAfterTransform = createChildAfterTransform;
-        this.immutable = immutable;
-        StringBuilder optionBuilder = new StringBuilder();
-        if (immutable) {
-            optionBuilder.append(" 
,IMMUTABLE_STORAGE_SCHEME=ONE_CELL_PER_COLUMN, IMMUTABLE_ROWS=true");
-        }
-        this.dataTableDdl = optionBuilder.toString();
-    }
-
-    @Parameterized.Parameters(
-            name = "createChildAfterTransform={0}, immutable={1}")
-    public static synchronized Collection<Object[]> data() {
-        List<Object[]> list = Lists.newArrayListWithExpectedSize(2);
-        boolean[] Booleans = new boolean[] { false, true };
-        for (boolean immutable : Booleans) {
-            for (boolean createAfter : Booleans) {
-                list.add(new Object[] { createAfter, immutable });
-            }
+        if (isNamespaceMapped) {
+            props.put(QueryServices.IS_NAMESPACE_MAPPING_ENABLED, 
Boolean.TRUE.toString());
         }
-
-        return list;
+        setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator()));
     }
 
-    private Connection getConnection(Properties props) throws Exception {
+    protected Connection getConnection(Properties props) throws Exception {
         props.setProperty(QueryServices.DROP_METADATA_ATTRIB, 
Boolean.toString(true));
         // Force real driver to be used as the test one doesn't handle creating
         // more than one ConnectionQueryService
@@ -130,12 +88,40 @@ public class LogicalTableNameIT extends BaseTest {
         return DriverManager.getConnection(url, props);
     }
 
-    private  HashMap<String, ArrayList<String>> 
testBaseTableWithIndex_BaseTableChange(Connection conn, Connection conn2, 
String schemaName,
-                                                                               
        String tableName, String indexName) throws Exception {
+    public static void createAndPointToNewPhysicalTable(Connection conn, 
String fullTableHName, boolean isNamespaceEnabled) throws Exception{
+        String tableName = SchemaUtil.getTableNameFromFullName(fullTableHName);
+        String newTableName = NEW_TABLE_PREFIX + tableName;
+        createAndPointToNewPhysicalTable(conn, fullTableHName,newTableName, 
isNamespaceEnabled);
+    }
+
+    public static void createAndPointToNewPhysicalTable(Connection conn, 
String fullTableHName, String newTableName, boolean isNamespaceEnabled) throws 
Exception{
+        String tableName = SchemaUtil.getTableNameFromFullName(fullTableHName);
+        String schemaName = 
SchemaUtil.getSchemaNameFromFullName(fullTableHName);
+        String fullNewTableHName = schemaName + (isNamespaceEnabled? ":" : 
".") + newTableName;
+        String
+                snapshotName =
+                new StringBuilder(tableName).append("-Snapshot").toString();
+
+        try (HBaseAdmin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices()
+                .getAdmin()) {
+
+            admin.snapshot(snapshotName, TableName.valueOf(fullTableHName));
+            admin.cloneSnapshot(Bytes.toBytes(snapshotName), 
Bytes.toBytes(fullNewTableHName));
+            admin.deleteSnapshot(snapshotName);
+            LogicalTableNameIT.renameAndDropPhysicalTable(conn, null, 
schemaName, tableName,
+                    newTableName, isNamespaceEnabled);
+
+        }
+    }
+
+    protected HashMap<String, ArrayList<String>> 
testBaseTableWithIndex_BaseTableChange(Connection conn, Connection conn2,
+                                                                               
        String schemaName, String tableName, String indexName,
+                                                                               
        boolean isNamespaceEnabled,
+                                                                               
        boolean createChildAfterRename) throws Exception {
         conn.setAutoCommit(true);
         String fullTableName = SchemaUtil.getTableName(schemaName, tableName);
         createTable(conn, fullTableName);
-        if (!createChildAfterTransform) {
+        if (!createChildAfterRename) {
             createIndexOnTable(conn, fullTableName, indexName);
         }
         HashMap<String, ArrayList<String>> expected = populateTable(conn, 
fullTableName, 1, 2);
@@ -147,7 +133,7 @@ public class LogicalTableNameIT extends BaseTest {
             String snapshotName = new 
StringBuilder(fullTableName).append("-Snapshot").toString();
             admin.snapshot(snapshotName, TableName.valueOf(fullTableName));
             admin.cloneSnapshot(Bytes.toBytes(snapshotName), 
Bytes.toBytes(fullNewTableName));
-
+            admin.deleteSnapshot(snapshotName);
             try (HTableInterface htable = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(Bytes.toBytes(fullNewTableName)))
 {
                 Put put = new Put(ByteUtil.concat(Bytes.toBytes("PK3")));
                 put.addColumn(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, 
QueryConstants.EMPTY_COLUMN_BYTES,
@@ -168,93 +154,18 @@ public class LogicalTableNameIT extends BaseTest {
         assertTrue(rs1.next());
 
         // Rename table to point to the new hbase table
-        renameAndDropPhysicalTable(conn, "NULL", schemaName, tableName, 
newTableName);
+        renameAndDropPhysicalTable(conn, "NULL", schemaName, tableName, 
newTableName, isNamespaceEnabled);
 
-        if (createChildAfterTransform) {
+        if (createChildAfterRename) {
             createIndexOnTable(conn, fullTableName, indexName);
         }
 
         return expected;
     }
 
-    @Test
-    public void testUpdatePhysicalTableNameWithIndex() throws Exception {
-        String schemaName = "S_" + generateUniqueName();
-        String tableName = "TBL_" + generateUniqueName();
-        String indexName = "IDX_" + generateUniqueName();
-        String fullTableName = SchemaUtil.getTableName(schemaName, tableName);
-        String fullIndexName = SchemaUtil.getTableName(schemaName, indexName);
-
-        try (Connection conn = getConnection(props)) {
-            try (Connection conn2 = getConnection(props)) {
-                HashMap<String, ArrayList<String>> expected = 
testBaseTableWithIndex_BaseTableChange(conn, conn2, schemaName, tableName, 
indexName);
-
-                // We have to rebuild index for this to work
-                IndexToolIT.runIndexTool(true, false, schemaName, tableName, 
indexName);
-
-                validateTable(conn, fullTableName);
-                validateTable(conn2, fullTableName);
-                validateIndex(conn, fullIndexName, false, expected);
-                validateIndex(conn2, fullIndexName, false, expected);
-
-                // Add row and check
-                populateTable(conn, fullTableName, 10, 1);
-                ResultSet rs = conn2.createStatement().executeQuery("SELECT * 
FROM " + fullIndexName + " WHERE \":PK1\"='PK10'");
-                assertEquals(true, rs.next());
-                rs = conn.createStatement().executeQuery("SELECT * FROM " + 
fullTableName  + " WHERE PK1='PK10'");
-                assertEquals(true, rs.next());
-
-                // Drop row and check
-                conn.createStatement().execute("DELETE from " + fullTableName 
+ " WHERE PK1='PK10'");
-                rs = conn2.createStatement().executeQuery("SELECT * FROM " + 
fullIndexName + " WHERE \":PK1\"='PK10'");
-                assertEquals(false, rs.next());
-                rs = conn.createStatement().executeQuery("SELECT * FROM " + 
fullTableName  + " WHERE PK1='PK10'");
-                assertEquals(false, rs.next());
-
-                conn2.createStatement().execute("DROP TABLE " + fullTableName);
-                // check that the physical data table is dropped
-                Admin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
-                assertEquals(false, 
admin.tableExists(TableName.valueOf(SchemaUtil.getTableName(schemaName,NEW_TABLE_PREFIX
 + tableName))));
-
-                // check that index is dropped
-                assertEquals(false, 
admin.tableExists(TableName.valueOf(fullIndexName)));
-
-            }
-        }
-    }
-
-    @Test
-    public void testUpdatePhysicalTableNameWithIndex_runScrutiny() throws 
Exception {
-        String schemaName = "S_" + generateUniqueName();
-        String tableName = "TBL_" + generateUniqueName();
-        String indexName = "IDX_" + generateUniqueName();
-
-        try (Connection conn = getConnection(props)) {
-            try (Connection conn2 = getConnection(props)) {
-                testBaseTableWithIndex_BaseTableChange(conn, conn2, 
schemaName, tableName, indexName);
-
-                List<Job>
-                        completedJobs =
-                        IndexScrutinyToolBaseIT.runScrutinyTool(schemaName, 
tableName, indexName, 1L,
-                                
IndexScrutinyTool.SourceTable.DATA_TABLE_SOURCE);
-
-                Job job = completedJobs.get(0);
-                assertTrue(job.isSuccessful());
-
-                Counters counters = job.getCounters();
-                if (createChildAfterTransform) {
-                    assertEquals(3, 
counters.findCounter(VALID_ROW_COUNT).getValue());
-                    assertEquals(0, 
counters.findCounter(INVALID_ROW_COUNT).getValue());
-                } else {
-                    // Since we didn't build the index, we expect 1 missing 
index row
-                    assertEquals(2, 
counters.findCounter(VALID_ROW_COUNT).getValue());
-                    assertEquals(1, 
counters.findCounter(INVALID_ROW_COUNT).getValue());
-                }
-            }
-        }
-    }
-
-    private  HashMap<String, ArrayList<String>> 
test_IndexTableChange(Connection conn, Connection conn2, String schemaName, 
String tableName, String indexName, byte[] verifiedBytes) throws Exception {
+    protected HashMap<String, ArrayList<String>> 
test_IndexTableChange(Connection conn, Connection conn2, String schemaName, 
String tableName,
+                                                                       String 
indexName,
+            byte[] verifiedBytes, boolean isNamespaceEnabled) throws Exception 
{
         String fullTableName = SchemaUtil.getTableName(schemaName, tableName);
         String fullIndexName = SchemaUtil.getTableName(schemaName, indexName);
         conn.setAutoCommit(true);
@@ -263,14 +174,19 @@ public class LogicalTableNameIT extends BaseTest {
         HashMap<String, ArrayList<String>> expected = populateTable(conn, 
fullTableName, 1, 2);
 
         // Create another hbase table for index and add 1 more row
-        String newTableName = "NEW_IDXTBL_" + generateUniqueName();
+        String newTableName = "NEW_IDXTBL_" + indexName;
         String fullNewTableName = SchemaUtil.getTableName(schemaName, 
newTableName);
+        String fullIndexTableHbaseName = fullIndexName;
+        if (isNamespaceEnabled) {
+            fullNewTableName = schemaName + NAMESPACE_SEPARATOR + newTableName;
+            fullIndexTableHbaseName = schemaName + NAMESPACE_SEPARATOR + 
indexName;
+        }
         try (HBaseAdmin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices()
                 .getAdmin()) {
             String snapshotName = new 
StringBuilder(indexName).append("-Snapshot").toString();
-            admin.snapshot(snapshotName, TableName.valueOf(fullIndexName));
+            admin.snapshot(snapshotName, 
TableName.valueOf(fullIndexTableHbaseName));
             admin.cloneSnapshot(Bytes.toBytes(snapshotName), 
Bytes.toBytes(fullNewTableName));
-
+            admin.deleteSnapshot(snapshotName);
             try (HTableInterface htable = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(Bytes.toBytes(fullNewTableName)))
 {
                 Put
                         put =
@@ -292,113 +208,44 @@ public class LogicalTableNameIT extends BaseTest {
         assertTrue(rs1.next());
 
         // Rename table to point to the new hbase table
-        renameAndDropPhysicalTable(conn, "NULL", schemaName, indexName, 
newTableName);
+        renameAndDropPhysicalTable(conn, "NULL", schemaName, indexName, 
newTableName, isNamespaceEnabled);
 
         return expected;
     }
 
-    @Test
-    public void testUpdatePhysicalIndexTableName() throws Exception {
-        String schemaName = "S_" + generateUniqueName();
-        String tableName = "TBL_" + generateUniqueName();
-        String indexName = "IDX_" + generateUniqueName();
-        String fullTableName = SchemaUtil.getTableName(schemaName, tableName);
-        String fullIndexName = SchemaUtil.getTableName(schemaName, indexName);
-        try (Connection conn = getConnection(props)) {
-            try (Connection conn2 = getConnection(props)) {
-                HashMap<String, ArrayList<String>> expected = 
test_IndexTableChange(conn, conn2, schemaName, tableName, indexName, 
IndexRegionObserver.VERIFIED_BYTES);
-
-                validateIndex(conn, fullIndexName, false, expected);
-                validateIndex(conn2, fullIndexName, false, expected);
-
-                // create another index and drop the first index and validate 
the second one
-                String indexName2 = "IDX2_" + generateUniqueName();
-                String fullIndexName2 = SchemaUtil.getTableName(schemaName, 
indexName2);
-                if (createChildAfterTransform) {
-                    createIndexOnTable(conn2, fullTableName, indexName2);
-                }
-                dropIndex(conn2, fullTableName, indexName);
-                if (!createChildAfterTransform) {
-                    createIndexOnTable(conn2, fullTableName, indexName2);
-                }
-                // The new index doesn't have the new row
-                expected.remove("PK3");
-                validateIndex(conn, fullIndexName2, false, expected);
-                validateIndex(conn2, fullIndexName2, false, expected);
-            }
-        }
-    }
-
-    @Test
-    public void testUpdatePhysicalIndexTableName_runScrutiny() throws 
Exception {
-        String schemaName = "S_" + generateUniqueName();
-        String tableName = "TBL_" + generateUniqueName();
-        String indexName = "IDX_" + generateUniqueName();
-        String fullTableName = SchemaUtil.getTableName(schemaName, tableName);
-        String fullIndexName = SchemaUtil.getTableName(schemaName, indexName);
-        try (Connection conn = getConnection(props)) {
-            try (Connection conn2 = getConnection(props)) {
-                test_IndexTableChange(conn, conn2, schemaName, tableName, 
indexName, IndexRegionObserver.VERIFIED_BYTES);
-                List<Job>
-                        completedJobs =
-                        IndexScrutinyToolBaseIT.runScrutinyTool(schemaName, 
tableName, indexName, 1L,
-                                
IndexScrutinyTool.SourceTable.INDEX_TABLE_SOURCE);
-
-                Job job = completedJobs.get(0);
-                assertTrue(job.isSuccessful());
-
-                Counters counters = job.getCounters();
-
-                // Since we didn't build the index, we expect 1 missing index 
row
-                assertEquals(2, 
counters.findCounter(VALID_ROW_COUNT).getValue());
-                assertEquals(1, 
counters.findCounter(INVALID_ROW_COUNT).getValue());
-
-                // Try with unverified bytes
-                String tableName2 = "TBL_" + generateUniqueName();
-                String indexName2 = "IDX_" + generateUniqueName();
-                test_IndexTableChange(conn, conn2, schemaName, tableName2, 
indexName2, IndexRegionObserver.UNVERIFIED_BYTES);
-
-                completedJobs =
-                        IndexScrutinyToolBaseIT.runScrutinyTool(schemaName, 
tableName2, indexName2, 1L,
-                                
IndexScrutinyTool.SourceTable.INDEX_TABLE_SOURCE);
-
-                job = completedJobs.get(0);
-                assertTrue(job.isSuccessful());
-
-                counters = job.getCounters();
-
-                // Since we didn't build the index, we expect 1 missing index 
row
-                assertEquals(2, 
counters.findCounter(VALID_ROW_COUNT).getValue());
-                assertEquals(0, 
counters.findCounter(INVALID_ROW_COUNT).getValue());
-
-            }
-        }
-    }
-
-    private HashMap<String, ArrayList<String>> 
testWithViewsAndIndex_BaseTableChange(Connection conn, Connection conn2, String 
schemaName, String tableName, String viewName1, String v1_indexName1, String 
v1_indexName2, String viewName2, String v2_indexName1) throws Exception {
+    protected HashMap<String, ArrayList<String>> 
testWithViewsAndIndex_BaseTableChange(Connection conn, Connection conn2, String 
tenantName,
+                                                                               
        String schemaName, String tableName,
+            String viewName1, String v1_indexName1, String v1_indexName2, 
String viewName2, String v2_indexName1, boolean isNamespaceEnabled,
+                                                                               
        boolean createChildAfterRename) throws Exception {
         conn.setAutoCommit(true);
+        conn2.setAutoCommit(true);
         String fullTableName = SchemaUtil.getTableName(schemaName, tableName);
         String fullViewName1 = SchemaUtil.getTableName(schemaName, viewName1);
         String fullViewName2 = SchemaUtil.getTableName(schemaName, viewName2);
         createTable(conn, fullTableName);
         HashMap<String, ArrayList<String>> expected = new HashMap<>();
-        if (!createChildAfterTransform) {
-            createViewAndIndex(conn, schemaName, tableName, viewName1, 
v1_indexName1);
-            createViewAndIndex(conn, schemaName, tableName, viewName1, 
v1_indexName2);
-            createViewAndIndex(conn, schemaName, tableName, viewName2, 
v2_indexName1);
+        if (!createChildAfterRename) {
+            createViewAndIndex(conn2, schemaName, tableName, viewName1, 
v1_indexName1);
+            createViewAndIndex(conn2, schemaName, tableName, viewName1, 
v1_indexName2);
+            createViewAndIndex(conn2, schemaName, tableName, viewName2, 
v2_indexName1);
             expected.putAll(populateView(conn, fullViewName1, 1,2));
             expected.putAll(populateView(conn, fullViewName2, 10,2));
         }
 
         // Create another hbase table and add 1 more row
-        String newTableName = "NEW_TBL_" + generateUniqueName();
+        String newTableName = NEW_TABLE_PREFIX + generateUniqueName();
         String fullNewTableName = SchemaUtil.getTableName(schemaName, 
newTableName);
+        String fullTableHbaseName = fullTableName;
+        if (isNamespaceEnabled) {
+            fullNewTableName = schemaName + NAMESPACE_SEPARATOR + newTableName;
+            fullTableHbaseName = schemaName + NAMESPACE_SEPARATOR + tableName;
+        }
         try (HBaseAdmin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices()
                 .getAdmin()) {
             String snapshotName = new 
StringBuilder(fullTableName).append("-Snapshot").toString();
-            admin.snapshot(snapshotName, TableName.valueOf(fullTableName));
+            admin.snapshot(snapshotName, 
TableName.valueOf(fullTableHbaseName));
             admin.cloneSnapshot(Bytes.toBytes(snapshotName), 
Bytes.toBytes(fullNewTableName));
-
+            admin.deleteSnapshot(snapshotName);
             try (HTableInterface htable = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(Bytes.toBytes(fullNewTableName)))
 {
                 Put put = new Put(ByteUtil.concat(Bytes.toBytes("PK3")));
                 put.addColumn(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, 
QueryConstants.EMPTY_COLUMN_BYTES,
@@ -419,29 +266,30 @@ public class LogicalTableNameIT extends BaseTest {
         }
 
         // Query to cache on the second connection
-        String selectTable1 = "SELECT PK1, V1, V2, V3 FROM " + fullTableName + 
" ORDER BY PK1 DESC";
-        ResultSet rs1 = conn2.createStatement().executeQuery(selectTable1);
-        if (!createChildAfterTransform) {
-            assertTrue(rs1.next());
+        if (tenantName != null) {
+            String selectTable1 = "SELECT PK1, V1, V2, V3 FROM " + 
fullTableName + " ORDER BY PK1 DESC";
+            ResultSet rs1 = conn2.createStatement().executeQuery(selectTable1);
+            if (!createChildAfterRename) {
+                assertTrue(rs1.next());
+            }
         }
 
         // Rename table to point to hbase table
-        renameAndDropPhysicalTable(conn, "NULL", schemaName, tableName, 
newTableName);
+        renameAndDropPhysicalTable(conn, "NULL", schemaName, tableName, 
newTableName, isNamespaceEnabled);
 
         conn.unwrap(PhoenixConnection.class).getQueryServices().clearCache();
-        if (createChildAfterTransform) {
-            createViewAndIndex(conn, schemaName, tableName, viewName1, 
v1_indexName1);
-            createViewAndIndex(conn, schemaName, tableName, viewName1, 
v1_indexName2);
-            createViewAndIndex(conn, schemaName, tableName, viewName2, 
v2_indexName1);
-            expected.putAll(populateView(conn, fullViewName1, 1,2));
-            expected.putAll(populateView(conn, fullViewName2, 10,2));
+        if (createChildAfterRename) {
+            createViewAndIndex(conn2, schemaName, tableName, viewName1, 
v1_indexName1);
+            createViewAndIndex(conn2, schemaName, tableName, viewName1, 
v1_indexName2);
+            createViewAndIndex(conn2, schemaName, tableName, viewName2, 
v2_indexName1);
+            expected.putAll(populateView(conn2, fullViewName1, 1,2));
+            expected.putAll(populateView(conn2, fullViewName2, 10,2));
         }
 
         return expected;
     }
 
-
-    private PhoenixTestBuilder.SchemaBuilder createGlobalViewAndTenantView() 
throws Exception {
+    protected PhoenixTestBuilder.SchemaBuilder 
testGlobalViewAndTenantView(boolean createChildAfterRename, boolean 
isNamespaceEnabled) throws Exception {
         int numOfRows = 5;
         PhoenixTestBuilder.SchemaBuilder.TableOptions tableOptions = 
PhoenixTestBuilder.SchemaBuilder.TableOptions.withDefaults();
         tableOptions.getTableColumns().clear();
@@ -469,7 +317,7 @@ public class LogicalTableNameIT extends BaseTest {
 
         // Define the test schema.
         PhoenixTestBuilder.SchemaBuilder schemaBuilder = null;
-        if (!createChildAfterTransform) {
+        if (!createChildAfterRename) {
             schemaBuilder = new PhoenixTestBuilder.SchemaBuilder(getUrl());
             
schemaBuilder.withTableOptions(tableOptions).withGlobalViewOptions(globalViewOptions)
                     .withGlobalViewIndexOptions(globalViewIndexOptions)
@@ -483,23 +331,17 @@ public class LogicalTableNameIT extends BaseTest {
         PTable table = schemaBuilder.getBaseTable();
         String schemaName = table.getSchemaName().getString();
         String tableName = table.getTableName().getString();
-        String newBaseTableName = "NEW_TBL_" + tableName;
+        String newBaseTableName = NEW_TABLE_PREFIX + tableName;
         String fullNewBaseTableName = SchemaUtil.getTableName(schemaName, 
newBaseTableName);
         String fullTableName = table.getName().getString();
 
+        String fullTableHName = schemaName + ":" + tableName;
+        String fullNewTableHName = schemaName + ":" + newBaseTableName;
         try (Connection conn = getConnection(props)) {
-
-            try (HBaseAdmin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-                String snapshotName = new 
StringBuilder(fullTableName).append("-Snapshot").toString();
-                admin.snapshot(snapshotName, TableName.valueOf(fullTableName));
-                admin.cloneSnapshot(Bytes.toBytes(snapshotName), 
Bytes.toBytes(fullNewBaseTableName));
-            }
-
-            renameAndDropPhysicalTable(conn, null, schemaName, tableName, 
newBaseTableName);
+            createAndPointToNewPhysicalTable(conn, fullTableHName, 
newBaseTableName, isNamespaceEnabled);
         }
 
-        // TODO: this still creates a new table.
-        if (createChildAfterTransform) {
+        if (createChildAfterRename) {
             schemaBuilder = new PhoenixTestBuilder.SchemaBuilder(getUrl());
             schemaBuilder.withDataOptions(schemaBuilder.getDataOptions())
                     .withTableOptions(tableOptions)
@@ -564,139 +406,30 @@ public class LogicalTableNameIT extends BaseTest {
         return schemaBuilder;
     }
 
-    @Test
-    public void testWith2LevelViewsBaseTablePhysicalNameChange() throws 
Exception {
-        // TODO: use namespace in one of the cases
-        PhoenixTestBuilder.SchemaBuilder schemaBuilder = 
createGlobalViewAndTenantView();
-    }
-
-    @Test
-    public void testUpdatePhysicalTableNameWithViews() throws Exception {
-        try (Connection conn = getConnection(props)) {
-            try (Connection conn2 = getConnection(props)) {
-                String schemaName = "S_" + generateUniqueName();
-                String tableName = "TBL_" + generateUniqueName();
-                String view1Name = "VW1_" + generateUniqueName();
-                String view1IndexName1 = "VW1IDX1_" + generateUniqueName();
-                String view1IndexName2 = "VW1IDX2_" + generateUniqueName();
-                String fullView1IndexName1 = 
SchemaUtil.getTableName(schemaName, view1IndexName1);
-                String fullView1IndexName2 =  
SchemaUtil.getTableName(schemaName, view1IndexName2);
-                String view2Name = "VW2_" + generateUniqueName();
-                String view2IndexName1 = "VW2IDX1_" + generateUniqueName();
-                String fullView1Name = SchemaUtil.getTableName(schemaName, 
view1Name);
-                String fullView2Name = SchemaUtil.getTableName(schemaName, 
view2Name);
-                String fullView2IndexName1 =  
SchemaUtil.getTableName(schemaName, view2IndexName1);
-
-                HashMap<String, ArrayList<String>> expected = 
testWithViewsAndIndex_BaseTableChange(conn, conn2, schemaName, tableName, 
view1Name, view1IndexName1, view1IndexName2, view2Name, view2IndexName1);
-
-                // We have to rebuild index for this to work
-                IndexToolIT.runIndexTool(true, false, schemaName, view1Name, 
view1IndexName1);
-                IndexToolIT.runIndexTool(true, false, schemaName, view1Name, 
view1IndexName2);
-                IndexToolIT.runIndexTool(true, false, schemaName, view2Name, 
view2IndexName1);
-
-                validateIndex(conn, fullView1IndexName1, true, expected);
-                validateIndex(conn2, fullView1IndexName2, true, expected);
-
-                // Add row and check
-                populateView(conn, fullView2Name, 20, 1);
-                ResultSet rs = conn2.createStatement().executeQuery("SELECT * 
FROM " + fullView2IndexName1 + " WHERE \":PK1\"='PK20'");
-                assertEquals(true, rs.next());
-                rs = conn.createStatement().executeQuery("SELECT * FROM " + 
fullView2Name  + " WHERE PK1='PK20'");
-                assertEquals(true, rs.next());
-
-                // Drop row and check
-                conn.createStatement().execute("DELETE from " + fullView2Name 
+ " WHERE PK1='PK20'");
-                rs = conn2.createStatement().executeQuery("SELECT * FROM " + 
fullView2IndexName1 + " WHERE \":PK1\"='PK20'");
-                assertEquals(false, rs.next());
-                rs = conn.createStatement().executeQuery("SELECT * FROM " + 
fullView2Name  + " WHERE PK1='PK20'");
-                assertEquals(false, rs.next());
-
-                conn2.createStatement().execute("DROP VIEW " + fullView2Name);
-                // check that this view is dropped but the other is there
-                rs = conn.createStatement().executeQuery("SELECT * FROM " + 
fullView1Name);
-                assertEquals(true, rs.next());
-                boolean failed = true;
-                try (ResultSet rsFail = 
conn.createStatement().executeQuery("SELECT * FROM " + fullView2Name)) {
-                    rsFail.next();
-                    failed = false;
-                } catch (SQLException e){
-
-                }
-                assertEquals(true, failed);
-
-                // check that first index is there but second index is dropped
-                rs = conn2.createStatement().executeQuery("SELECT * FROM " + 
fullView1IndexName1);
-                assertEquals(true, rs.next());
-                try {
-                    rs = conn.createStatement().executeQuery("SELECT * FROM " 
+ fullView2IndexName1);
-                    rs.next();
-                    failed = false;
-                } catch (SQLException e){
-
-                }
-                assertEquals(true, failed);
-            }
-        }
-    }
-
-    @Test
-    public void testUpdatePhysicalTableNameWithViews_runScrutiny() throws 
Exception {
-        try (Connection conn = getConnection(props)) {
-            try (Connection conn2 = getConnection(props)) {
-                String schemaName = "S_" + generateUniqueName();
-                String tableName = "TBL_" + generateUniqueName();
-                String view1Name = "VW1_" + generateUniqueName();
-                String view1IndexName1 = "VW1IDX1_" + generateUniqueName();
-                String view1IndexName2 = "VW1IDX2_" + generateUniqueName();
-                String view2Name = "VW2_" + generateUniqueName();
-                String view2IndexName1 = "VW2IDX1_" + generateUniqueName();
-
-                testWithViewsAndIndex_BaseTableChange(conn, conn2,schemaName, 
tableName, view1Name,
-                        view1IndexName1, view1IndexName2, view2Name, 
view2IndexName1);
-
-                List<Job>
-                        completedJobs =
-                        IndexScrutinyToolBaseIT.runScrutinyTool(schemaName, 
view2Name, view2IndexName1, 1L,
-                                
IndexScrutinyTool.SourceTable.DATA_TABLE_SOURCE);
-
-                Job job = completedJobs.get(0);
-                assertTrue(job.isSuccessful());
-
-                Counters counters = job.getCounters();
-                if (createChildAfterTransform) {
-                    assertEquals(3, 
counters.findCounter(VALID_ROW_COUNT).getValue());
-                    assertEquals(2, 
counters.findCounter(INVALID_ROW_COUNT).getValue());
-                } else {
-                    // Since we didn't build the index, we expect 1 missing 
index row and 2 are from the other index
-                    assertEquals(2, 
counters.findCounter(VALID_ROW_COUNT).getValue());
-                    assertEquals(3, 
counters.findCounter(INVALID_ROW_COUNT).getValue());
-                }
-
-            }
-        }
-    }
-
-    private void createTable(Connection conn, String tableName) throws 
Exception {
+    protected void createTable(Connection conn, String tableName) throws 
Exception {
         String createTableSql = "CREATE TABLE " + tableName + " (PK1 VARCHAR 
NOT NULL, V1 VARCHAR, V2 INTEGER, V3 INTEGER "
                 + "CONSTRAINT NAME_PK PRIMARY KEY(PK1)) COLUMN_ENCODED_BYTES=0 
" + dataTableDdl;
-        LOGGER.debug(createTableSql);
         conn.createStatement().execute(createTableSql);
     }
 
-    private void createIndexOnTable(Connection conn, String tableName, String 
indexName)
+    protected void createIndexOnTable(Connection conn, String tableName, 
String indexName, boolean isLocal)
             throws SQLException {
-        String createIndexSql = "CREATE INDEX " + indexName + " ON " + 
tableName + " (V1) INCLUDE (V2, V3) ";
-        LOGGER.debug(createIndexSql);
+        String createIndexSql = "CREATE " + (isLocal? " LOCAL ":"") + " INDEX 
" + indexName + " ON " + tableName + " (V1) INCLUDE (V2, V3) ";
         conn.createStatement().execute(createIndexSql);
     }
 
-    private void dropIndex(Connection conn, String tableName, String indexName)
+    protected void createIndexOnTable(Connection conn, String tableName, 
String indexName)
+            throws SQLException {
+        createIndexOnTable(conn, tableName, indexName, false);
+    }
+
+    protected void dropIndex(Connection conn, String tableName, String 
indexName)
             throws SQLException {
         String sql = "DROP INDEX " + indexName + " ON " + tableName ;
         conn.createStatement().execute(sql);
     }
 
-    private HashMap<String, ArrayList<String>> populateTable(Connection conn, 
String tableName, int startnum, int numOfRows)
+    protected HashMap<String, ArrayList<String>> populateTable(Connection 
conn, String tableName, int startnum, int numOfRows)
             throws SQLException {
         String upsert = "UPSERT INTO " + tableName + " (PK1, V1,  V2, V3) 
VALUES (?,?,?,?)";
         PreparedStatement upsertStmt = conn.prepareStatement(upsert);
@@ -717,7 +450,7 @@ public class LogicalTableNameIT extends BaseTest {
         return result;
     }
 
-    private HashMap<String, ArrayList<String>> populateView(Connection conn, 
String viewName, int startNum, int numOfRows) throws SQLException {
+    protected HashMap<String, ArrayList<String>> populateView(Connection conn, 
String viewName, int startNum, int numOfRows) throws SQLException {
         String upsert = "UPSERT INTO " + viewName + " (PK1, V1,  V2, V3, 
VIEW_COL1, VIEW_COL2) VALUES (?,?,?,?,?,?)";
         PreparedStatement upsertStmt = conn.prepareStatement(upsert);
         HashMap<String, ArrayList<String>> result = new HashMap<>();
@@ -741,7 +474,7 @@ public class LogicalTableNameIT extends BaseTest {
         return result;
     }
 
-    private void createViewAndIndex(Connection conn, String schemaName, String 
tableName, String viewName, String viewIndexName)
+    protected void createViewAndIndex(Connection conn, String schemaName, 
String tableName, String viewName, String viewIndexName)
             throws SQLException {
         String fullTableName = SchemaUtil.getTableName(schemaName, tableName);
         String fullViewName = SchemaUtil.getTableName(schemaName, viewName);
@@ -755,7 +488,7 @@ public class LogicalTableNameIT extends BaseTest {
         conn.commit();
     }
 
-    private void validateTable(Connection connection, String tableName) throws 
SQLException {
+    protected void validateTable(Connection connection, String tableName) 
throws SQLException {
         String selectTable = "SELECT PK1, V1, V2, V3 FROM " + tableName + " 
ORDER BY PK1 DESC";
         ResultSet rs = connection.createStatement().executeQuery(selectTable);
         assertTrue(rs.next());
@@ -775,7 +508,7 @@ public class LogicalTableNameIT extends BaseTest {
         assertEquals(2, rs.getInt(4));
     }
 
-    private void validateIndex(Connection connection, String tableName, 
boolean isViewIndex, HashMap<String, ArrayList<String>> expected) throws 
SQLException {
+    protected void validateIndex(Connection connection, String tableName, 
boolean isViewIndex, HashMap<String, ArrayList<String>> expected) throws 
SQLException {
         String selectTable = "SELECT * FROM " + tableName;
         ResultSet rs = connection.createStatement().executeQuery(selectTable);
         int cnt = 0;
@@ -794,7 +527,7 @@ public class LogicalTableNameIT extends BaseTest {
         assertEquals(cnt, expected.size());
     }
 
-    public static void renameAndDropPhysicalTable(Connection conn, String 
tenantId, String schema, String tableName, String physicalName) throws 
Exception {
+    public static void renameAndDropPhysicalTable(Connection conn, String 
tenantId, String schema, String tableName, String physicalName, boolean 
isNamespaceEnabled) throws Exception {
         String
                 changeName =
                 String.format(
@@ -804,6 +537,9 @@ public class LogicalTableNameIT extends BaseTest {
         conn.commit();
 
         String fullTableName = SchemaUtil.getTableName(schema, tableName);
+        if (isNamespaceEnabled) {
+            fullTableName = schema + NAMESPACE_SEPARATOR + tableName;
+        }
         Admin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
         TableName hTableName = TableName.valueOf(fullTableName);
         admin.disableTable(hTableName);
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/LogicalTableNameExtendedIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/LogicalTableNameExtendedIT.java
new file mode 100644
index 0000000..cb58962
--- /dev/null
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/LogicalTableNameExtendedIT.java
@@ -0,0 +1,276 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.end2end;
+
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.phoenix.hbase.index.IndexRegionObserver;
+import org.apache.phoenix.jdbc.PhoenixConnection;
+import org.apache.phoenix.query.QueryConstants;
+import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.schema.types.PInteger;
+import org.apache.phoenix.util.ByteUtil;
+import org.apache.phoenix.util.PropertiesUtil;
+import org.apache.phoenix.util.SchemaUtil;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Properties;
+
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.junit.Assert.assertEquals;
+
+@Category(NeedsOwnMiniClusterTest.class)
+public class LogicalTableNameExtendedIT extends LogicalTableNameBaseIT {
+    private Properties propsNamespace = 
PropertiesUtil.deepCopy(TEST_PROPERTIES);
+
+    @BeforeClass
+    public static synchronized void doSetup() throws Exception {
+        initCluster(true);
+    }
+
+    public LogicalTableNameExtendedIT()  {
+        propsNamespace.setProperty(QueryServices.IS_NAMESPACE_MAPPING_ENABLED, 
Boolean.toString(true));
+    }
+
+    @Test
+    public void testUpdatePhysicalTableName_namespaceMapped() throws Exception 
{
+        String schemaName = "S_" + generateUniqueName();
+        String tableName = "TBL_" + generateUniqueName();
+        String view1Name = "VW1_" + generateUniqueName();
+        String view1IndexName1 = "VW1IDX1_" + generateUniqueName();
+        String view1IndexName2 = "VW1IDX2_" + generateUniqueName();
+        String view2Name = "VW2_" + generateUniqueName();
+        String view2IndexName1 = "VW2IDX1_" + generateUniqueName();
+
+         try (Connection conn = getConnection(propsNamespace)) {
+            try (Connection conn2 = getConnection(propsNamespace)) {
+                conn.createStatement().execute("CREATE SCHEMA IF NOT EXISTS " 
+ schemaName);
+                testWithViewsAndIndex_BaseTableChange(conn, conn2, null, 
schemaName, tableName, view1Name,
+                        view1IndexName1, view1IndexName2, view2Name, 
view2IndexName1, true, false);
+
+                populateView(conn, (schemaName+"."+view2Name), 10, 1);
+                ResultSet rs = conn2.createStatement().executeQuery("SELECT * 
FROM " + (schemaName + "." + view2IndexName1) + " WHERE \":PK1\"='PK10'");
+                assertEquals(true, rs.next());
+
+            }
+        }
+    }
+
+    private void test_bothTableAndIndexHaveDifferentNames(Connection conn, 
Connection conn2, String schemaName, String tableName, String indexName) throws 
Exception {
+        String fullTableHName = schemaName + ":" + tableName;
+        String fullIndexHName = schemaName + ":" + indexName;
+
+        conn.createStatement().execute("CREATE SCHEMA IF NOT EXISTS " + 
schemaName);
+        // Create tables and change physical index table
+        test_IndexTableChange(conn, conn2, schemaName, tableName, indexName,
+                IndexRegionObserver.UNVERIFIED_BYTES, true);
+        // Now change physical data table
+        createAndPointToNewPhysicalTable(conn, fullTableHName, true);
+        try (HBaseAdmin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices()
+                .getAdmin()) {
+            assertEquals(false, 
admin.tableExists(TableName.valueOf(fullTableHName)));
+            assertEquals(false, 
admin.tableExists(TableName.valueOf(fullIndexHName)));
+        }
+    }
+
+    @Test
+    public void 
testUpdatePhysicalTableName_bothTableAndIndexHaveDifferentNames() throws 
Exception {
+        String schemaName = "S_" + generateUniqueName();
+        String tableName = "TBL_" + generateUniqueName();
+        String indexName = "IDX_" + generateUniqueName();
+        String fullTableName = SchemaUtil.getTableName(schemaName, tableName);
+        String fullTableHName = schemaName + ":" + tableName;
+        String fullIndexName = SchemaUtil.getTableName(schemaName, indexName);
+        String fullIndexHName = schemaName + ":" + indexName;
+        String fullNewTableHName = schemaName + ":NEW_TBL_" + tableName;
+        try (Connection conn = getConnection(propsNamespace)) {
+            try (Connection conn2 = getConnection(propsNamespace)) {
+                test_bothTableAndIndexHaveDifferentNames(conn, conn2, 
schemaName, tableName, indexName);
+                try (HBaseAdmin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices()
+                        .getAdmin()) {
+                    conn2.setAutoCommit(true);
+                    // Add row and check
+                    populateTable(conn2, fullTableName, 10, 1);
+                    ResultSet
+                            rs =
+                            conn2.createStatement().executeQuery("SELECT * 
FROM " + fullIndexName + " WHERE \":PK1\"='PK10'");
+                    assertEquals(true, rs.next());
+                    rs = conn.createStatement().executeQuery("SELECT * FROM " 
+ fullTableName + " WHERE PK1='PK10'");
+                    assertEquals(true, rs.next());
+
+                    // Drop row and check
+                    conn.createStatement().execute("DELETE from " + 
fullTableName + " WHERE PK1='PK10'");
+                    rs = conn2.createStatement().executeQuery("SELECT * FROM " 
+ fullIndexName + " WHERE \":PK1\"='PK10'");
+                    assertEquals(false, rs.next());
+                    rs = conn.createStatement().executeQuery("SELECT * FROM " 
+ fullTableName + " WHERE PK1='PK10'");
+                    assertEquals(false, rs.next());
+
+                    // Add a row and run IndexTool to check that the row is 
there on the other side
+                    rs = conn.createStatement().executeQuery("SELECT * FROM " 
+  fullIndexName + " WHERE \":PK1\"='PK30'");
+                    assertEquals(false, rs.next());
+                    try (HTableInterface htable = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(Bytes.toBytes(fullNewTableHName)))
 {
+                        Put put = new 
Put(ByteUtil.concat(Bytes.toBytes("PK30")));
+                        
put.addColumn(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, 
QueryConstants.EMPTY_COLUMN_BYTES,
+                                QueryConstants.EMPTY_COLUMN_VALUE_BYTES);
+                        
put.addColumn(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, Bytes.toBytes("V1"),
+                                Bytes.toBytes("V30"));
+                        
put.addColumn(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, Bytes.toBytes("V2"),
+                                PInteger.INSTANCE.toBytes(32));
+                        
put.addColumn(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, Bytes.toBytes("V3"),
+                                PInteger.INSTANCE.toBytes(33));
+                        htable.put(put);
+                    }
+
+                    IndexToolIT.runIndexTool(true, false, schemaName, 
tableName, indexName);
+                    rs = conn.createStatement().executeQuery("SELECT * FROM " 
+  fullIndexName + " WHERE \":PK1\"='PK30'");
+                    assertEquals(true, rs.next());
+
+                    // Drop tables
+                    conn2.createStatement().execute("DROP TABLE " + 
fullTableName);
+                    // check that the physical data table is dropped
+                    assertEquals(false, 
admin.tableExists(TableName.valueOf(fullNewTableHName)));
+
+                    // check that index is dropped
+                    assertEquals(false, 
admin.tableExists(TableName.valueOf((schemaName + ":NEW_IDXTBL_" + 
indexName))));
+                }
+            }
+        }
+    }
+
+    @Test
+    public void testUpdatePhysicalTableName_alterTable() throws Exception {
+        String schemaName = "S_" + generateUniqueName();
+        String tableName = "TBL_" + generateUniqueName();
+        String indexName = "IDX_" + generateUniqueName();
+        String fullTableName = SchemaUtil.getTableName(schemaName, tableName);
+        String fullTableHName = schemaName + ":" + tableName;
+        String fullIndexName = SchemaUtil.getTableName(schemaName, indexName);
+        String fullIndexHName = schemaName + ":" + indexName;
+        String fullNewTableHName = schemaName + ":NEW_TBL_" + tableName;
+        try (Connection conn = getConnection(propsNamespace)) {
+            try (Connection conn2 = getConnection(propsNamespace)) {
+                test_bothTableAndIndexHaveDifferentNames(conn, conn2, 
schemaName, tableName, indexName);
+                conn2.setAutoCommit(true);
+
+                conn2.createStatement().execute("ALTER TABLE " + fullTableName 
+ " ADD new_column_1 VARCHAR(64) CASCADE INDEX ALL");
+                conn2.createStatement().execute("UPSERT INTO " + fullTableName 
+ " (PK1, V1, new_column_1) VALUES ('a', 'v1', 'new_col_val')");
+                ResultSet
+                        rs =
+                        conn2.createStatement().executeQuery("SELECT 
\"0:NEW_COLUMN_1\" FROM " + fullIndexName);
+                assertEquals(true, rs.next());
+                rs = conn.createStatement().executeQuery("SELECT NEW_COLUMN_1 
FROM " + fullTableName + " WHERE NEW_COLUMN_1 IS NOT NULL");
+                assertEquals(true, rs.next());
+                assertEquals(false, rs.next());
+
+                // Drop column, check is that there are no exceptions
+                conn.createStatement().execute("ALTER TABLE " + fullTableName 
+ " DROP COLUMN NEW_COLUMN_1");
+            }
+        }
+    }
+
+    @Test
+    public void testUpdatePhysicalTableName_tenantViews() throws Exception {
+
+        try (Connection conn = getConnection(propsNamespace)) {
+            conn.createStatement().execute("CREATE SCHEMA IF NOT EXISTS 
TEST_ENTITY");
+        }
+        testGlobalViewAndTenantView(false, true);
+        testGlobalViewAndTenantView(true, true);
+    }
+
+    @Test
+    public void testUpdatePhysicalTableName_localIndex() throws Exception {
+        String schemaName = "S_" + generateUniqueName();
+        String tableName = "TBL_" + generateUniqueName();
+        String indexName = "LCL_IDX_" + generateUniqueName();
+        String fullTableName = SchemaUtil.getTableName(schemaName, tableName);
+        String fullIndexName = SchemaUtil.getTableName(schemaName, indexName);
+        String fullHTableName = schemaName + ":" + tableName;
+
+        try (Connection conn = getConnection(propsNamespace)) {
+            conn.setAutoCommit(true);
+            conn.createStatement().execute("CREATE SCHEMA IF NOT EXISTS " + 
schemaName);
+            createTable(conn, fullTableName);
+
+            createIndexOnTable(conn, fullTableName, indexName, true);
+            HashMap<String, ArrayList<String>> expected = populateTable(conn, 
fullTableName, 1, 2);
+            createAndPointToNewPhysicalTable(conn, fullHTableName, true);
+
+            String select = "SELECT * FROM " + fullIndexName;
+            ResultSet rs = conn.createStatement().executeQuery( select);
+            assertEquals(true, rs.next());
+            validateIndex(conn, fullIndexName,false, expected);
+
+            // Drop and recreate
+            conn.createStatement().execute("DROP INDEX " + indexName + " ON " 
+ fullTableName);
+            createIndexOnTable(conn, fullTableName, indexName, true);
+            rs = conn.createStatement().executeQuery(select);
+            assertEquals(true, rs.next());
+            validateIndex(conn, fullIndexName,false, expected);
+        }
+    }
+
+    @Test
+    public void testUpdatePhysicalTableName_viewIndexSequence() throws 
Exception {
+        String schemaName = "S_" + generateUniqueName();
+        String tableName = "TBL_" + generateUniqueName();
+        String viewName = "VW1_" + generateUniqueName();
+        String viewIndexName1 = "VWIDX1_" + generateUniqueName();
+        String viewIndexName2 = "VWIDX2_" + generateUniqueName();
+        String fullTableName = SchemaUtil.getTableName(schemaName, tableName);
+        String fullViewName = SchemaUtil.getTableName(schemaName, viewName);
+        String fullViewIndex1Name = SchemaUtil.getTableName(schemaName, 
viewIndexName1);
+        String fullViewIndex2Name = SchemaUtil.getTableName(schemaName, 
viewIndexName2);
+        String fullTableHName = schemaName + ":" + tableName;
+        try (Connection conn = getConnection(propsNamespace)) {
+            conn.setAutoCommit(true);
+            conn.createStatement().execute("CREATE SCHEMA IF NOT EXISTS " + 
schemaName);
+            createTable(conn, fullTableName);
+            createViewAndIndex(conn, schemaName, tableName, viewName, 
viewIndexName1);
+            HashMap<String, ArrayList<String>> expected = populateView(conn, 
fullViewName, 1, 1);
+            createAndPointToNewPhysicalTable(conn, fullTableHName, true);
+            validateIndex(conn, fullViewIndex1Name,true, expected);
+            String indexDDL = "CREATE INDEX IF NOT EXISTS " + viewIndexName2 + 
" ON " + fullViewName + " (VIEW_COL1) include (VIEW_COL2) ";
+            conn.createStatement().execute(indexDDL);
+            ResultSet rs = conn.createStatement().executeQuery("SELECT * FROM 
" + fullViewIndex2Name);
+            assertEquals(true, rs.next());
+            assertEquals("VIEW_COL1_1", rs.getString(1));
+            assertEquals("PK1", rs.getString(2));
+            assertEquals("VIEW_COL2_1", rs.getString(3));
+            assertEquals(false, rs.next());
+            expected.putAll(populateView(conn, fullViewName, 10, 1));
+
+            validateIndex(conn, fullViewIndex1Name, true, expected);
+            rs = conn.createStatement().executeQuery("SELECT * FROM " + 
fullViewIndex2Name + " WHERE \"0:VIEW_COL1\"='VIEW_COL1_10'");
+            assertEquals(true, rs.next());
+            assertEquals("VIEW_COL1_10", rs.getString(1));
+            assertEquals("PK10", rs.getString(2));
+            assertEquals("VIEW_COL2_10", rs.getString(3));
+            assertEquals(false, rs.next());
+        }
+    }
+}
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/LogicalTableNameIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/LogicalTableNameIT.java
index 15388a8..c4f3c2e 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/LogicalTableNameIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/LogicalTableNameIT.java
@@ -17,34 +17,18 @@
  */
 package org.apache.phoenix.end2end;
 
-import org.apache.curator.shaded.com.google.common.base.Joiner;
 import org.apache.curator.shaded.com.google.common.collect.Lists;
-import org.apache.curator.shaded.com.google.common.collect.Maps;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.regionserver.ScanInfoUtil;
-import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.Job;
-import org.apache.phoenix.end2end.index.SingleCellIndexIT;
+import org.apache.phoenix.end2end.join.HashJoinGlobalIndexIT;
 import org.apache.phoenix.hbase.index.IndexRegionObserver;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.mapreduce.index.IndexScrutinyTool;
-import org.apache.phoenix.query.BaseTest;
-import org.apache.phoenix.query.PhoenixTestBuilder;
-import org.apache.phoenix.query.QueryConstants;
-import org.apache.phoenix.query.QueryServices;
-import org.apache.phoenix.schema.PTable;
-import org.apache.phoenix.schema.types.PInteger;
-import org.apache.phoenix.util.ByteUtil;
 import org.apache.phoenix.util.PropertiesUtil;
 import org.apache.phoenix.util.QueryUtil;
-import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.SchemaUtil;
-import org.apache.phoenix.util.StringUtil;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -54,7 +38,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.sql.Connection;
-import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
@@ -62,42 +45,32 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.List;
-import java.util.Map;
 import java.util.Properties;
-import java.util.Random;
 
-import static java.util.Arrays.asList;
 import static 
org.apache.phoenix.mapreduce.index.PhoenixScrutinyJobCounters.INVALID_ROW_COUNT;
 import static 
org.apache.phoenix.mapreduce.index.PhoenixScrutinyJobCounters.VALID_ROW_COUNT;
-import static org.apache.phoenix.query.PhoenixTestBuilder.DDLDefaults.MAX_ROWS;
-import static org.apache.phoenix.util.PhoenixRuntime.TENANT_ID_ATTRIB;
+import static org.apache.phoenix.util.MetaDataUtil.VIEW_INDEX_TABLE_PREFIX;
 import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
 import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 
 
 @RunWith(Parameterized.class)
 @Category(NeedsOwnMiniClusterTest.class)
-public class LogicalTableNameIT extends BaseTest {
+public class LogicalTableNameIT extends LogicalTableNameBaseIT {
     private static final Logger LOGGER = 
LoggerFactory.getLogger(LogicalTableNameIT.class);
 
-    private final boolean createChildAfterTransform;
-    private final boolean immutable;
-    private String dataTableDdl;
-    public static final String NEW_TABLE_PREFIX = "NEW_TBL_";
+    protected boolean createChildAfterRename;
+    private boolean immutable;
     private Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
 
     @BeforeClass
     public static synchronized void doSetup() throws Exception {
-        Map<String, String> props = Maps.newConcurrentMap();
-        props.put(QueryServices.DROP_METADATA_ATTRIB, Boolean.TRUE.toString());
-        props.put(ScanInfoUtil.PHOENIX_MAX_LOOKBACK_AGE_CONF_KEY, 
Integer.toString(60*60*1000)); // An hour
-        setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator()));
+       initCluster(false);
     }
 
-    public LogicalTableNameIT(boolean createChildAfterTransform, boolean 
immutable)  {
-        this.createChildAfterTransform = createChildAfterTransform;
+    public LogicalTableNameIT(boolean createChildAfterRename, boolean 
immutable)  {
+        this.createChildAfterRename = createChildAfterRename;
         this.immutable = immutable;
         StringBuilder optionBuilder = new StringBuilder();
         if (immutable) {
@@ -107,7 +80,7 @@ public class LogicalTableNameIT extends BaseTest {
     }
 
     @Parameterized.Parameters(
-            name = "createChildAfterTransform={0}, immutable={1}")
+            name = "createChildAfterRename={0}, immutable={1}")
     public static synchronized Collection<Object[]> data() {
         List<Object[]> list = Lists.newArrayListWithExpectedSize(2);
         boolean[] Booleans = new boolean[] { false, true };
@@ -120,63 +93,6 @@ public class LogicalTableNameIT extends BaseTest {
         return list;
     }
 
-    private Connection getConnection(Properties props) throws Exception {
-        props.setProperty(QueryServices.DROP_METADATA_ATTRIB, 
Boolean.toString(true));
-        // Force real driver to be used as the test one doesn't handle creating
-        // more than one ConnectionQueryService
-        props.setProperty(QueryServices.EXTRA_JDBC_ARGUMENTS_ATTRIB, 
StringUtil.EMPTY_STRING);
-        // Create new ConnectionQueryServices so that we can set 
DROP_METADATA_ATTRIB
-        String url = QueryUtil.getConnectionUrl(props, config, "PRINCIPAL");
-        return DriverManager.getConnection(url, props);
-    }
-
-    private  HashMap<String, ArrayList<String>> 
testBaseTableWithIndex_BaseTableChange(Connection conn, Connection conn2, 
String schemaName,
-                                                                               
        String tableName, String indexName) throws Exception {
-        conn.setAutoCommit(true);
-        String fullTableName = SchemaUtil.getTableName(schemaName, tableName);
-        createTable(conn, fullTableName);
-        if (!createChildAfterTransform) {
-            createIndexOnTable(conn, fullTableName, indexName);
-        }
-        HashMap<String, ArrayList<String>> expected = populateTable(conn, 
fullTableName, 1, 2);
-
-        // Create another hbase table and add 1 more row
-        String newTableName =  NEW_TABLE_PREFIX + tableName;
-        String fullNewTableName = SchemaUtil.getTableName(schemaName, 
newTableName);
-        try (HBaseAdmin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-            String snapshotName = new 
StringBuilder(fullTableName).append("-Snapshot").toString();
-            admin.snapshot(snapshotName, TableName.valueOf(fullTableName));
-            admin.cloneSnapshot(Bytes.toBytes(snapshotName), 
Bytes.toBytes(fullNewTableName));
-
-            try (HTableInterface htable = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(Bytes.toBytes(fullNewTableName)))
 {
-                Put put = new Put(ByteUtil.concat(Bytes.toBytes("PK3")));
-                put.addColumn(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, 
QueryConstants.EMPTY_COLUMN_BYTES,
-                        QueryConstants.EMPTY_COLUMN_VALUE_BYTES);
-                put.addColumn(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, 
Bytes.toBytes("V1"), Bytes.toBytes("V13"));
-                put.addColumn(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, 
Bytes.toBytes("V2"),
-                        PInteger.INSTANCE.toBytes(3));
-                put.addColumn(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, 
Bytes.toBytes("V3"),
-                        PInteger.INSTANCE.toBytes(4));
-                htable.put(put);
-                expected.put("PK3", Lists.newArrayList("PK3", "V13", "3", 
"4"));
-            }
-        }
-
-        // Query to cache on the second connection
-        String selectTable1 = "SELECT PK1, V1, V2, V3 FROM " + fullTableName + 
" ORDER BY PK1 DESC";
-        ResultSet rs1 = conn2.createStatement().executeQuery(selectTable1);
-        assertTrue(rs1.next());
-
-        // Rename table to point to the new hbase table
-        renameAndDropPhysicalTable(conn, "NULL", schemaName, tableName, 
newTableName);
-
-        if (createChildAfterTransform) {
-            createIndexOnTable(conn, fullTableName, indexName);
-        }
-
-        return expected;
-    }
-
     @Test
     public void testUpdatePhysicalTableNameWithIndex() throws Exception {
         String schemaName = "S_" + generateUniqueName();
@@ -187,7 +103,8 @@ public class LogicalTableNameIT extends BaseTest {
 
         try (Connection conn = getConnection(props)) {
             try (Connection conn2 = getConnection(props)) {
-                HashMap<String, ArrayList<String>> expected = 
testBaseTableWithIndex_BaseTableChange(conn, conn2, schemaName, tableName, 
indexName);
+                HashMap<String, ArrayList<String>> expected = 
testBaseTableWithIndex_BaseTableChange(conn, conn2, schemaName, tableName,
+                        indexName, false, createChildAfterRename);
 
                 // We have to rebuild index for this to work
                 IndexToolIT.runIndexTool(true, false, schemaName, tableName, 
indexName);
@@ -213,12 +130,12 @@ public class LogicalTableNameIT extends BaseTest {
 
                 conn2.createStatement().execute("DROP TABLE " + fullTableName);
                 // check that the physical data table is dropped
-                Admin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
-                assertEquals(false, 
admin.tableExists(TableName.valueOf(SchemaUtil.getTableName(schemaName,NEW_TABLE_PREFIX
 + tableName))));
-
-                // check that index is dropped
-                assertEquals(false, 
admin.tableExists(TableName.valueOf(fullIndexName)));
+                try (Admin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+                    assertEquals(false, 
admin.tableExists(TableName.valueOf(SchemaUtil.getTableName(schemaName, 
NEW_TABLE_PREFIX + tableName))));
 
+                    // check that index is dropped
+                    assertEquals(false, 
admin.tableExists(TableName.valueOf(fullIndexName)));
+                }
             }
         }
     }
@@ -231,7 +148,7 @@ public class LogicalTableNameIT extends BaseTest {
 
         try (Connection conn = getConnection(props)) {
             try (Connection conn2 = getConnection(props)) {
-                testBaseTableWithIndex_BaseTableChange(conn, conn2, 
schemaName, tableName, indexName);
+                testBaseTableWithIndex_BaseTableChange(conn, conn2, 
schemaName, tableName, indexName, false, createChildAfterRename);
 
                 List<Job>
                         completedJobs =
@@ -242,7 +159,7 @@ public class LogicalTableNameIT extends BaseTest {
                 assertTrue(job.isSuccessful());
 
                 Counters counters = job.getCounters();
-                if (createChildAfterTransform) {
+                if (createChildAfterRename) {
                     assertEquals(3, 
counters.findCounter(VALID_ROW_COUNT).getValue());
                     assertEquals(0, 
counters.findCounter(INVALID_ROW_COUNT).getValue());
                 } else {
@@ -254,49 +171,6 @@ public class LogicalTableNameIT extends BaseTest {
         }
     }
 
-    private  HashMap<String, ArrayList<String>> 
test_IndexTableChange(Connection conn, Connection conn2, String schemaName, 
String tableName, String indexName, byte[] verifiedBytes) throws Exception {
-        String fullTableName = SchemaUtil.getTableName(schemaName, tableName);
-        String fullIndexName = SchemaUtil.getTableName(schemaName, indexName);
-        conn.setAutoCommit(true);
-        createTable(conn, fullTableName);
-        createIndexOnTable(conn, fullTableName, indexName);
-        HashMap<String, ArrayList<String>> expected = populateTable(conn, 
fullTableName, 1, 2);
-
-        // Create another hbase table for index and add 1 more row
-        String newTableName = "NEW_IDXTBL_" + generateUniqueName();
-        String fullNewTableName = SchemaUtil.getTableName(schemaName, 
newTableName);
-        try (HBaseAdmin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices()
-                .getAdmin()) {
-            String snapshotName = new 
StringBuilder(indexName).append("-Snapshot").toString();
-            admin.snapshot(snapshotName, TableName.valueOf(fullIndexName));
-            admin.cloneSnapshot(Bytes.toBytes(snapshotName), 
Bytes.toBytes(fullNewTableName));
-
-            try (HTableInterface htable = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(Bytes.toBytes(fullNewTableName)))
 {
-                Put
-                        put =
-                        new Put(ByteUtil.concat(Bytes.toBytes("V13"), 
QueryConstants.SEPARATOR_BYTE_ARRAY, Bytes.toBytes("PK3")));
-                put.addColumn(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, 
QueryConstants.EMPTY_COLUMN_BYTES,
-                        verifiedBytes);
-                put.addColumn(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, 
Bytes.toBytes("0:V2"),
-                        PInteger.INSTANCE.toBytes(3));
-                put.addColumn(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, 
Bytes.toBytes("0:V3"),
-                        PInteger.INSTANCE.toBytes(4));
-                htable.put(put);
-                expected.put("PK3", Lists.newArrayList("PK3", "V13", "3", 
"4"));
-            }
-        }
-
-        // Query to cache on the second connection
-        String selectTable1 = "SELECT * FROM " + fullIndexName;
-        ResultSet rs1 = conn2.createStatement().executeQuery(selectTable1);
-        assertTrue(rs1.next());
-
-        // Rename table to point to the new hbase table
-        renameAndDropPhysicalTable(conn, "NULL", schemaName, indexName, 
newTableName);
-
-        return expected;
-    }
-
     @Test
     public void testUpdatePhysicalIndexTableName() throws Exception {
         String schemaName = "S_" + generateUniqueName();
@@ -306,7 +180,7 @@ public class LogicalTableNameIT extends BaseTest {
         String fullIndexName = SchemaUtil.getTableName(schemaName, indexName);
         try (Connection conn = getConnection(props)) {
             try (Connection conn2 = getConnection(props)) {
-                HashMap<String, ArrayList<String>> expected = 
test_IndexTableChange(conn, conn2, schemaName, tableName, indexName, 
IndexRegionObserver.VERIFIED_BYTES);
+                HashMap<String, ArrayList<String>> expected = 
test_IndexTableChange(conn, conn2, schemaName, tableName, indexName, 
IndexRegionObserver.VERIFIED_BYTES, false);
 
                 validateIndex(conn, fullIndexName, false, expected);
                 validateIndex(conn2, fullIndexName, false, expected);
@@ -314,11 +188,11 @@ public class LogicalTableNameIT extends BaseTest {
                 // create another index and drop the first index and validate 
the second one
                 String indexName2 = "IDX2_" + generateUniqueName();
                 String fullIndexName2 = SchemaUtil.getTableName(schemaName, 
indexName2);
-                if (createChildAfterTransform) {
+                if (createChildAfterRename) {
                     createIndexOnTable(conn2, fullTableName, indexName2);
                 }
                 dropIndex(conn2, fullTableName, indexName);
-                if (!createChildAfterTransform) {
+                if (!createChildAfterRename) {
                     createIndexOnTable(conn2, fullTableName, indexName2);
                 }
                 // The new index doesn't have the new row
@@ -338,7 +212,7 @@ public class LogicalTableNameIT extends BaseTest {
         String fullIndexName = SchemaUtil.getTableName(schemaName, indexName);
         try (Connection conn = getConnection(props)) {
             try (Connection conn2 = getConnection(props)) {
-                test_IndexTableChange(conn, conn2, schemaName, tableName, 
indexName, IndexRegionObserver.VERIFIED_BYTES);
+                test_IndexTableChange(conn, conn2, schemaName, tableName, 
indexName, IndexRegionObserver.VERIFIED_BYTES, false);
                 List<Job>
                         completedJobs =
                         IndexScrutinyToolBaseIT.runScrutinyTool(schemaName, 
tableName, indexName, 1L,
@@ -356,7 +230,7 @@ public class LogicalTableNameIT extends BaseTest {
                 // Try with unverified bytes
                 String tableName2 = "TBL_" + generateUniqueName();
                 String indexName2 = "IDX_" + generateUniqueName();
-                test_IndexTableChange(conn, conn2, schemaName, tableName2, 
indexName2, IndexRegionObserver.UNVERIFIED_BYTES);
+                test_IndexTableChange(conn, conn2, schemaName, tableName2, 
indexName2, IndexRegionObserver.UNVERIFIED_BYTES, false);
 
                 completedJobs =
                         IndexScrutinyToolBaseIT.runScrutinyTool(schemaName, 
tableName2, indexName2, 1L,
@@ -375,201 +249,6 @@ public class LogicalTableNameIT extends BaseTest {
         }
     }
 
-    private HashMap<String, ArrayList<String>> 
testWithViewsAndIndex_BaseTableChange(Connection conn, Connection conn2, String 
schemaName, String tableName, String viewName1, String v1_indexName1, String 
v1_indexName2, String viewName2, String v2_indexName1) throws Exception {
-        conn.setAutoCommit(true);
-        String fullTableName = SchemaUtil.getTableName(schemaName, tableName);
-        String fullViewName1 = SchemaUtil.getTableName(schemaName, viewName1);
-        String fullViewName2 = SchemaUtil.getTableName(schemaName, viewName2);
-        createTable(conn, fullTableName);
-        HashMap<String, ArrayList<String>> expected = new HashMap<>();
-        if (!createChildAfterTransform) {
-            createViewAndIndex(conn, schemaName, tableName, viewName1, 
v1_indexName1);
-            createViewAndIndex(conn, schemaName, tableName, viewName1, 
v1_indexName2);
-            createViewAndIndex(conn, schemaName, tableName, viewName2, 
v2_indexName1);
-            expected.putAll(populateView(conn, fullViewName1, 1,2));
-            expected.putAll(populateView(conn, fullViewName2, 10,2));
-        }
-
-        // Create another hbase table and add 1 more row
-        String newTableName = "NEW_TBL_" + generateUniqueName();
-        String fullNewTableName = SchemaUtil.getTableName(schemaName, 
newTableName);
-        try (HBaseAdmin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices()
-                .getAdmin()) {
-            String snapshotName = new 
StringBuilder(fullTableName).append("-Snapshot").toString();
-            admin.snapshot(snapshotName, TableName.valueOf(fullTableName));
-            admin.cloneSnapshot(Bytes.toBytes(snapshotName), 
Bytes.toBytes(fullNewTableName));
-
-            try (HTableInterface htable = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(Bytes.toBytes(fullNewTableName)))
 {
-                Put put = new Put(ByteUtil.concat(Bytes.toBytes("PK3")));
-                put.addColumn(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, 
QueryConstants.EMPTY_COLUMN_BYTES,
-                        QueryConstants.EMPTY_COLUMN_VALUE_BYTES);
-                put.addColumn(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, 
Bytes.toBytes("V1"),
-                        Bytes.toBytes("V13"));
-                put.addColumn(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, 
Bytes.toBytes("V2"),
-                        PInteger.INSTANCE.toBytes(3));
-                put.addColumn(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, 
Bytes.toBytes("V3"),
-                        PInteger.INSTANCE.toBytes(4));
-                put.addColumn(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, 
Bytes.toBytes("VIEW_COL1"),
-                        Bytes.toBytes("VIEW_COL1_3"));
-                put.addColumn(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, 
Bytes.toBytes("VIEW_COL2"),
-                        Bytes.toBytes("VIEW_COL2_3"));
-                htable.put(put);
-                expected.put("PK3", Lists.newArrayList("PK3", "V13", "3", "4", 
"VIEW_COL1_3", "VIEW_COL2_3"));
-            }
-        }
-
-        // Query to cache on the second connection
-        String selectTable1 = "SELECT PK1, V1, V2, V3 FROM " + fullTableName + 
" ORDER BY PK1 DESC";
-        ResultSet rs1 = conn2.createStatement().executeQuery(selectTable1);
-        if (!createChildAfterTransform) {
-            assertTrue(rs1.next());
-        }
-
-        // Rename table to point to hbase table
-        renameAndDropPhysicalTable(conn, "NULL", schemaName, tableName, 
newTableName);
-
-        conn.unwrap(PhoenixConnection.class).getQueryServices().clearCache();
-        if (createChildAfterTransform) {
-            createViewAndIndex(conn, schemaName, tableName, viewName1, 
v1_indexName1);
-            createViewAndIndex(conn, schemaName, tableName, viewName1, 
v1_indexName2);
-            createViewAndIndex(conn, schemaName, tableName, viewName2, 
v2_indexName1);
-            expected.putAll(populateView(conn, fullViewName1, 1,2));
-            expected.putAll(populateView(conn, fullViewName2, 10,2));
-        }
-
-        return expected;
-    }
-
-
-    private PhoenixTestBuilder.SchemaBuilder createGlobalViewAndTenantView() 
throws Exception {
-        int numOfRows = 5;
-        PhoenixTestBuilder.SchemaBuilder.TableOptions tableOptions = 
PhoenixTestBuilder.SchemaBuilder.TableOptions.withDefaults();
-        tableOptions.getTableColumns().clear();
-        tableOptions.getTableColumnTypes().clear();
-        tableOptions.setTableProps(" MULTI_TENANT=true, COLUMN_ENCODED_BYTES=0 
"+this.dataTableDdl);
-
-        PhoenixTestBuilder.SchemaBuilder.GlobalViewOptions globalViewOptions = 
PhoenixTestBuilder.SchemaBuilder.GlobalViewOptions.withDefaults();
-
-        PhoenixTestBuilder.SchemaBuilder.GlobalViewIndexOptions 
globalViewIndexOptions =
-                
PhoenixTestBuilder.SchemaBuilder.GlobalViewIndexOptions.withDefaults();
-        globalViewIndexOptions.setLocal(false);
-
-        PhoenixTestBuilder.SchemaBuilder.TenantViewOptions tenantViewOptions = 
new PhoenixTestBuilder.SchemaBuilder.TenantViewOptions();
-        tenantViewOptions.setTenantViewColumns(asList("ZID", "COL7", "COL8", 
"COL9"));
-        tenantViewOptions.setTenantViewColumnTypes(asList("CHAR(15)", 
"VARCHAR", "VARCHAR", "VARCHAR"));
-
-        PhoenixTestBuilder.SchemaBuilder.OtherOptions 
testCaseWhenAllCFMatchAndAllDefault = new 
PhoenixTestBuilder.SchemaBuilder.OtherOptions();
-        
testCaseWhenAllCFMatchAndAllDefault.setTestName("testCaseWhenAllCFMatchAndAllDefault");
-        testCaseWhenAllCFMatchAndAllDefault
-                .setTableCFs(Lists.newArrayList((String) null, null, null));
-        testCaseWhenAllCFMatchAndAllDefault
-                .setGlobalViewCFs(Lists.newArrayList((String) null, null, 
null));
-        testCaseWhenAllCFMatchAndAllDefault
-                .setTenantViewCFs(Lists.newArrayList((String) null, null, 
null, null));
-
-        // Define the test schema.
-        PhoenixTestBuilder.SchemaBuilder schemaBuilder = null;
-        if (!createChildAfterTransform) {
-            schemaBuilder = new PhoenixTestBuilder.SchemaBuilder(getUrl());
-            
schemaBuilder.withTableOptions(tableOptions).withGlobalViewOptions(globalViewOptions)
-                    .withGlobalViewIndexOptions(globalViewIndexOptions)
-                    .withTenantViewOptions(tenantViewOptions)
-                    
.withOtherOptions(testCaseWhenAllCFMatchAndAllDefault).build();
-        }  else {
-            schemaBuilder = new PhoenixTestBuilder.SchemaBuilder(getUrl());
-            schemaBuilder.withTableOptions(tableOptions).build();
-        }
-
-        PTable table = schemaBuilder.getBaseTable();
-        String schemaName = table.getSchemaName().getString();
-        String tableName = table.getTableName().getString();
-        String newBaseTableName = "NEW_TBL_" + tableName;
-        String fullNewBaseTableName = SchemaUtil.getTableName(schemaName, 
newBaseTableName);
-        String fullTableName = table.getName().getString();
-
-        try (Connection conn = getConnection(props)) {
-
-            try (HBaseAdmin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
-                String snapshotName = new 
StringBuilder(fullTableName).append("-Snapshot").toString();
-                admin.snapshot(snapshotName, TableName.valueOf(fullTableName));
-                admin.cloneSnapshot(Bytes.toBytes(snapshotName), 
Bytes.toBytes(fullNewBaseTableName));
-            }
-
-            renameAndDropPhysicalTable(conn, null, schemaName, tableName, 
newBaseTableName);
-        }
-
-        // TODO: this still creates a new table.
-        if (createChildAfterTransform) {
-            schemaBuilder = new PhoenixTestBuilder.SchemaBuilder(getUrl());
-            schemaBuilder.withDataOptions(schemaBuilder.getDataOptions())
-                    .withTableOptions(tableOptions)
-                    .withGlobalViewOptions(globalViewOptions)
-                    .withGlobalViewIndexOptions(globalViewIndexOptions)
-                    .withTenantViewOptions(tenantViewOptions)
-                    
.withOtherOptions(testCaseWhenAllCFMatchAndAllDefault).build();
-        }
-
-        // Define the test data.
-        PhoenixTestBuilder.DataSupplier dataSupplier = new 
PhoenixTestBuilder.DataSupplier() {
-
-            @Override public List<Object> getValues(int rowIndex) {
-                Random rnd = new Random();
-                String id = String.format(ViewTTLIT.ID_FMT, rowIndex);
-                String zid = String.format(ViewTTLIT.ZID_FMT, rowIndex);
-                String col4 = String.format(ViewTTLIT.COL4_FMT, rowIndex + 
rnd.nextInt(MAX_ROWS));
-                String col5 = String.format(ViewTTLIT.COL5_FMT, rowIndex + 
rnd.nextInt(MAX_ROWS));
-                String col6 = String.format(ViewTTLIT.COL6_FMT, rowIndex + 
rnd.nextInt(MAX_ROWS));
-                String col7 = String.format(ViewTTLIT.COL7_FMT, rowIndex + 
rnd.nextInt(MAX_ROWS));
-                String col8 = String.format(ViewTTLIT.COL8_FMT, rowIndex + 
rnd.nextInt(MAX_ROWS));
-                String col9 = String.format(ViewTTLIT.COL9_FMT, rowIndex + 
rnd.nextInt(MAX_ROWS));
-
-                return Lists.newArrayList(
-                        new Object[] { id, zid, col4, col5, col6, col7, col8, 
col9 });
-            }
-        };
-
-        // Create a test data reader/writer for the above schema.
-        PhoenixTestBuilder.DataWriter dataWriter = new 
PhoenixTestBuilder.BasicDataWriter();
-        List<String> columns =
-                Lists.newArrayList("ID", "ZID", "COL4", "COL5", "COL6", 
"COL7", "COL8", "COL9");
-        List<String> rowKeyColumns = Lists.newArrayList("ID", "ZID");
-
-        String tenantConnectUrl =
-                getUrl() + ';' + TENANT_ID_ATTRIB + '=' + 
schemaBuilder.getDataOptions().getTenantId();
-
-        try (Connection tenantConnection = 
DriverManager.getConnection(tenantConnectUrl)) {
-            tenantConnection.setAutoCommit(true);
-            dataWriter.setConnection(tenantConnection);
-            dataWriter.setDataSupplier(dataSupplier);
-            dataWriter.setUpsertColumns(columns);
-            dataWriter.setRowKeyColumns(rowKeyColumns);
-            
dataWriter.setTargetEntity(schemaBuilder.getEntityTenantViewName());
-            dataWriter.upsertRows(1, numOfRows);
-            com.google.common.collect.Table<String, String, Object> 
upsertedData = dataWriter.getDataTable();;
-
-            PhoenixTestBuilder.DataReader dataReader = new 
PhoenixTestBuilder.BasicDataReader();
-            dataReader.setValidationColumns(columns);
-            dataReader.setRowKeyColumns(rowKeyColumns);
-            dataReader.setDML(String.format("SELECT %s from %s", 
Joiner.on(",").join(columns),
-                    schemaBuilder.getEntityTenantViewName()));
-            
dataReader.setTargetEntity(schemaBuilder.getEntityTenantViewName());
-            dataReader.setConnection(tenantConnection);
-            dataReader.readRows();
-            com.google.common.collect.Table<String, String, Object> fetchedData
-                    = dataReader.getDataTable();
-            assertNotNull("Fetched data should not be null", fetchedData);
-            ViewTTLIT.verifyRowsBeforeTTLExpiration(upsertedData, fetchedData);
-
-        }
-        return schemaBuilder;
-    }
-
-    @Test
-    public void testWith2LevelViewsBaseTablePhysicalNameChange() throws 
Exception {
-        // TODO: use namespace in one of the cases
-        PhoenixTestBuilder.SchemaBuilder schemaBuilder = 
createGlobalViewAndTenantView();
-    }
-
     @Test
     public void testUpdatePhysicalTableNameWithViews() throws Exception {
         try (Connection conn = getConnection(props)) {
@@ -587,7 +266,8 @@ public class LogicalTableNameIT extends BaseTest {
                 String fullView2Name = SchemaUtil.getTableName(schemaName, 
view2Name);
                 String fullView2IndexName1 =  
SchemaUtil.getTableName(schemaName, view2IndexName1);
 
-                HashMap<String, ArrayList<String>> expected = 
testWithViewsAndIndex_BaseTableChange(conn, conn2, schemaName, tableName, 
view1Name, view1IndexName1, view1IndexName2, view2Name, view2IndexName1);
+                HashMap<String, ArrayList<String>> expected = 
testWithViewsAndIndex_BaseTableChange(conn, conn2, null,
+                        schemaName, tableName, view1Name, view1IndexName1, 
view1IndexName2, view2Name, view2IndexName1, false, createChildAfterRename);
 
                 // We have to rebuild index for this to work
                 IndexToolIT.runIndexTool(true, false, schemaName, view1Name, 
view1IndexName1);
@@ -651,8 +331,8 @@ public class LogicalTableNameIT extends BaseTest {
                 String view2Name = "VW2_" + generateUniqueName();
                 String view2IndexName1 = "VW2IDX1_" + generateUniqueName();
 
-                testWithViewsAndIndex_BaseTableChange(conn, conn2,schemaName, 
tableName, view1Name,
-                        view1IndexName1, view1IndexName2, view2Name, 
view2IndexName1);
+                testWithViewsAndIndex_BaseTableChange(conn, conn2, 
null,schemaName, tableName, view1Name,
+                        view1IndexName1, view1IndexName2, view2Name, 
view2IndexName1, false, createChildAfterRename);
 
                 List<Job>
                         completedJobs =
@@ -663,7 +343,7 @@ public class LogicalTableNameIT extends BaseTest {
                 assertTrue(job.isSuccessful());
 
                 Counters counters = job.getCounters();
-                if (createChildAfterTransform) {
+                if (createChildAfterRename) {
                     assertEquals(3, 
counters.findCounter(VALID_ROW_COUNT).getValue());
                     assertEquals(2, 
counters.findCounter(INVALID_ROW_COUNT).getValue());
                 } else {
@@ -676,139 +356,89 @@ public class LogicalTableNameIT extends BaseTest {
         }
     }
 
-    private void createTable(Connection conn, String tableName) throws 
Exception {
-        String createTableSql = "CREATE TABLE " + tableName + " (PK1 VARCHAR 
NOT NULL, V1 VARCHAR, V2 INTEGER, V3 INTEGER "
-                + "CONSTRAINT NAME_PK PRIMARY KEY(PK1)) COLUMN_ENCODED_BYTES=0 
" + dataTableDdl;
-        LOGGER.debug(createTableSql);
-        conn.createStatement().execute(createTableSql);
-    }
-
-    private void createIndexOnTable(Connection conn, String tableName, String 
indexName)
-            throws SQLException {
-        String createIndexSql = "CREATE INDEX " + indexName + " ON " + 
tableName + " (V1) INCLUDE (V2, V3) ";
-        LOGGER.debug(createIndexSql);
-        conn.createStatement().execute(createIndexSql);
-    }
-
-    private void dropIndex(Connection conn, String tableName, String indexName)
-            throws SQLException {
-        String sql = "DROP INDEX " + indexName + " ON " + tableName ;
-        conn.createStatement().execute(sql);
-    }
+    @Test
+    public void testWith2LevelViewsBaseTablePhysicalNameChange() throws 
Exception {
+        String schemaName = "S_" + generateUniqueName();
+        String tableName = "TBL_" + generateUniqueName();
+        String view1Name = "VW1_" + generateUniqueName();
+        String level2ViewName = "VW1_CH1_" + generateUniqueName();
+        String fullLevel2ViewName = SchemaUtil.getTableName(schemaName, 
level2ViewName);
+        String view1IndexName1 = "VW1IDX1_" + generateUniqueName();
+        String level2ViewIndexName = "VW1_CH1IDX_" + generateUniqueName();
+        String fullView1Name = SchemaUtil.getTableName(schemaName, view1Name);
+        String fullTableName = SchemaUtil.getTableName(schemaName, tableName);
+        try (Connection conn = getConnection(props)) {
+            try (Connection conn2 = getConnection(props)) {
+                conn.setAutoCommit(true);
+                conn2.setAutoCommit(true);
+                HashMap<String, ArrayList<String>> expected = new HashMap<>();
+                createTable(conn, fullTableName);
+                createViewAndIndex(conn2, schemaName, tableName, view1Name, 
view1IndexName1);
+                createViewAndIndex(conn2, schemaName, tableName, view1Name, 
view1IndexName1);
+                expected.putAll(populateView(conn, fullView1Name, 1, 2));
+
+                String ddl = "CREATE VIEW " + fullLevel2ViewName + "(chv2 
VARCHAR) AS SELECT * FROM " + fullView1Name;
+                String
+                        indexDdl =
+                        "CREATE INDEX " + level2ViewIndexName + " ON " + 
fullLevel2ViewName + " (chv2) INCLUDE (v1, VIEW_COL1)";
+                if (!createChildAfterRename) {
+                    conn.createStatement().execute(ddl);
+                    conn.createStatement().execute(indexDdl);
+                }
 
-    private HashMap<String, ArrayList<String>> populateTable(Connection conn, 
String tableName, int startnum, int numOfRows)
-            throws SQLException {
-        String upsert = "UPSERT INTO " + tableName + " (PK1, V1,  V2, V3) 
VALUES (?,?,?,?)";
-        PreparedStatement upsertStmt = conn.prepareStatement(upsert);
-        HashMap<String, ArrayList<String>> result = new HashMap<>();
-        for (int i=startnum; i < startnum + numOfRows; i++) {
-            ArrayList<String> row = new ArrayList<>();
-            upsertStmt.setString(1, "PK" + i);
-            row.add("PK" + i);
-            upsertStmt.setString(2, "V1" + i);
-            row.add("V1" + i);
-            upsertStmt.setInt(3, i);
-            row.add(String.valueOf(i));
-            upsertStmt.setInt(4, i + 1);
-            row.add(String.valueOf(i + 1));
-            upsertStmt.executeUpdate();
-            result.put("PK" + i, row);
-        }
-        return result;
-    }
+                String newTableName = NEW_TABLE_PREFIX + generateUniqueName();
+                String fullTableHbaseName = 
SchemaUtil.getTableName(schemaName, tableName);
+                createAndPointToNewPhysicalTable(conn, fullTableHbaseName, 
newTableName, false);
 
-    private HashMap<String, ArrayList<String>> populateView(Connection conn, 
String viewName, int startNum, int numOfRows) throws SQLException {
-        String upsert = "UPSERT INTO " + viewName + " (PK1, V1,  V2, V3, 
VIEW_COL1, VIEW_COL2) VALUES (?,?,?,?,?,?)";
-        PreparedStatement upsertStmt = conn.prepareStatement(upsert);
-        HashMap<String, ArrayList<String>> result = new HashMap<>();
-        for (int i=startNum; i < startNum + numOfRows; i++) {
-            ArrayList<String> row = new ArrayList<>();
-            upsertStmt.setString(1, "PK"+i);
-            row.add("PK"+i);
-            upsertStmt.setString(2, "V1"+i);
-            row.add("V1"+i);
-            upsertStmt.setInt(3, i);
-            row.add(String.valueOf(i));
-            upsertStmt.setInt(4, i+1);
-            row.add(String.valueOf(i+1));
-            upsertStmt.setString(5, "VIEW_COL1_"+i);
-            row.add("VIEW_COL1_"+i);
-            upsertStmt.setString(6, "VIEW_COL2_"+i);
-            row.add("VIEW_COL2_"+i);
-            upsertStmt.executeUpdate();
-            result.put("PK"+i, row);
-        }
-        return result;
-    }
+                
conn.unwrap(PhoenixConnection.class).getQueryServices().clearCache();
+                if (createChildAfterRename) {
+                    conn.createStatement().execute(ddl);
+                    conn.createStatement().execute(indexDdl);
+                }
 
-    private void createViewAndIndex(Connection conn, String schemaName, String 
tableName, String viewName, String viewIndexName)
-            throws SQLException {
-        String fullTableName = SchemaUtil.getTableName(schemaName, tableName);
-        String fullViewName = SchemaUtil.getTableName(schemaName, viewName);
-        String
-                view1DDL =
-                "CREATE VIEW IF NOT EXISTS " + fullViewName + " ( VIEW_COL1 
VARCHAR, VIEW_COL2 VARCHAR) AS SELECT * FROM "
-                        + fullTableName;
-        conn.createStatement().execute(view1DDL);
-        String indexDDL = "CREATE INDEX IF NOT EXISTS " + viewIndexName + " ON 
" + fullViewName + " (V1) include (V2, V3, VIEW_COL2) ";
-        conn.createStatement().execute(indexDDL);
-        conn.commit();
-    }
+                // Add new row to child view
+                String upsert = "UPSERT INTO " + fullLevel2ViewName + " (PK1, 
V1, VIEW_COL1, CHV2) VALUES (?,?,?,?)";
+                PreparedStatement upsertStmt = conn.prepareStatement(upsert);
+                ArrayList<String> row = new ArrayList<>();
+                upsertStmt.setString(1, "PK10");
+                upsertStmt.setString(2, "V10");
+                upsertStmt.setString(3, "VIEW_COL1_10");
+                upsertStmt.setString(4, "CHV210");
+                upsertStmt.executeUpdate();
+
+                String selectFromL2View = "SELECT /*+ NO_INDEX */ * FROM " + 
fullLevel2ViewName + " WHERE chv2='CHV210'";
+                ResultSet
+                        rs =
+                        conn2.createStatement().executeQuery(selectFromL2View);
+                assertEquals(true, rs.next());
+                assertEquals(false, rs.next());
 
-    private void validateTable(Connection connection, String tableName) throws 
SQLException {
-        String selectTable = "SELECT PK1, V1, V2, V3 FROM " + tableName + " 
ORDER BY PK1 DESC";
-        ResultSet rs = connection.createStatement().executeQuery(selectTable);
-        assertTrue(rs.next());
-        assertEquals("PK3", rs.getString(1));
-        assertEquals("V13", rs.getString(2));
-        assertEquals(3, rs.getInt(3));
-        assertEquals(4, rs.getInt(4));
-        assertTrue(rs.next());
-        assertEquals("PK2", rs.getString(1));
-        assertEquals("V12", rs.getString(2));
-        assertEquals(2, rs.getInt(3));
-        assertEquals(3, rs.getInt(4));
-        assertTrue(rs.next());
-        assertEquals("PK1", rs.getString(1));
-        assertEquals("V11", rs.getString(2));
-        assertEquals(1, rs.getInt(3));
-        assertEquals(2, rs.getInt(4));
-    }
+                String indexSelect = "SELECT chv2, V1, VIEW_COL1 FROM " + 
fullLevel2ViewName + " WHERE chv2='CHV210'";
+                rs =
+                        conn2.createStatement().executeQuery("EXPLAIN " + 
indexSelect);
+                assertEquals(true, 
QueryUtil.getExplainPlan(rs).contains(VIEW_INDEX_TABLE_PREFIX));
+                rs = conn2.createStatement().executeQuery(indexSelect);
+                assertEquals(true, rs.next());
+                assertEquals(false, rs.next());
 
-    private void validateIndex(Connection connection, String tableName, 
boolean isViewIndex, HashMap<String, ArrayList<String>> expected) throws 
SQLException {
-        String selectTable = "SELECT * FROM " + tableName;
-        ResultSet rs = connection.createStatement().executeQuery(selectTable);
-        int cnt = 0;
-        while (rs.next()) {
-            String pk = rs.getString(2);
-            assertTrue(expected.containsKey(pk));
-            ArrayList<String> row = expected.get(pk);
-            assertEquals(row.get(1), rs.getString(1));
-            assertEquals(row.get(2), rs.getString(3));
-            assertEquals(row.get(3), rs.getString(4));
-            if (isViewIndex) {
-                assertEquals(row.get(5), rs.getString(5));
+                // Drop row and check
+                conn2.createStatement().execute("DELETE FROM " + 
fullLevel2ViewName + " WHERE chv2='CHV210'");
+                rs = conn2.createStatement().executeQuery(indexSelect);
+                assertEquals(false, rs.next());
             }
-            cnt++;
         }
-        assertEquals(cnt, expected.size());
     }
 
-    public static void renameAndDropPhysicalTable(Connection conn, String 
tenantId, String schema, String tableName, String physicalName) throws 
Exception {
-        String
-                changeName =
-                String.format(
-                        "UPSERT INTO SYSTEM.CATALOG (TENANT_ID, TABLE_SCHEM, 
TABLE_NAME, COLUMN_NAME, COLUMN_FAMILY, PHYSICAL_TABLE_NAME) VALUES (%s, '%s', 
'%s', NULL, NULL, '%s')",
-                        tenantId, schema, tableName, physicalName);
-        conn.createStatement().execute(changeName);
-        conn.commit();
-
-        String fullTableName = SchemaUtil.getTableName(schema, tableName);
-        Admin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
-        TableName hTableName = TableName.valueOf(fullTableName);
-        admin.disableTable(hTableName);
-        admin.deleteTable(hTableName);
-        conn.unwrap(PhoenixConnection.class).getQueryServices()
-                .clearCache();
+    @Test
+    public void testHashJoin() throws Exception {
+        if (immutable || createChildAfterRename) {
+            return;
+        }
+        Object[] arr = HashJoinGlobalIndexIT.data().toArray();
+        String[] indexDDL = ((String[][])arr[0])[0];
+        String[] plans = ((String[][])arr[0])[1];
+        HashJoinGlobalIndexIT hjgit = new HashJoinGlobalIndexIT(indexDDL, 
plans);
+        hjgit.createSchema();
+        hjgit.testInnerJoin(false);
     }
 }
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/join/HashJoinIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/join/HashJoinIT.java
index 56ec7f2..693c400 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/join/HashJoinIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/join/HashJoinIT.java
@@ -37,10 +37,12 @@ import java.sql.Timestamp;
 import java.util.Properties;
 
 import org.apache.phoenix.cache.ServerCacheClient;
+import org.apache.phoenix.end2end.LogicalTableNameBaseIT;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.util.PropertiesUtil;
 import org.apache.phoenix.util.QueryUtil;
+import org.apache.phoenix.util.SchemaUtil;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -52,14 +54,18 @@ public abstract class HashJoinIT extends BaseJoinIT {
     public HashJoinIT(String[] indexDDL, String[] plans) {
         super(indexDDL, plans);
     }
-    
-    
-    @Test
-    public void testDefaultJoin() throws Exception {
+
+    public void testInnerJoin(boolean renamePhysicalTable) throws Exception {
         Connection conn = getConnection();
         String tableName1 = getTableName(conn, JOIN_ITEM_TABLE_FULL_NAME);
         String tableName2 = getTableName(conn, JOIN_SUPPLIER_TABLE_FULL_NAME);
-        String query = "SELECT item.\"item_id\", item.name, 
supp.\"supplier_id\", supp.name FROM " + tableName1 + " item JOIN " + 
tableName2 + " supp ON item.\"supplier_id\" = supp.\"supplier_id\"";
+        String fullNameRealItemTable 
=getTableNameMap().get(JOIN_ITEM_TABLE_FULL_NAME);
+        String fullNameSupplierTable 
=getTableNameMap().get(JOIN_SUPPLIER_TABLE_FULL_NAME);
+        if (renamePhysicalTable) {
+            LogicalTableNameBaseIT.createAndPointToNewPhysicalTable(conn, 
fullNameRealItemTable, false);
+            LogicalTableNameBaseIT.createAndPointToNewPhysicalTable(conn, 
fullNameSupplierTable, false);
+        }
+        String query = "SELECT item.\"item_id\", item.name, 
supp.\"supplier_id\", supp.name, next value for " + seqName + " FROM " + 
tableName1 + " item INNER JOIN " + tableName2 + " supp ON item.\"supplier_id\" 
= supp.\"supplier_id\"";
         try {
             PreparedStatement statement = conn.prepareStatement(query);
             ResultSet rs = statement.executeQuery();
@@ -68,44 +74,50 @@ public abstract class HashJoinIT extends BaseJoinIT {
             assertEquals(rs.getString(2), "T1");
             assertEquals(rs.getString(3), "0000000001");
             assertEquals(rs.getString(4), "S1");
+            assertEquals(1, rs.getInt(5));
             assertTrue (rs.next());
             assertEquals(rs.getString(1), "0000000002");
             assertEquals(rs.getString(2), "T2");
             assertEquals(rs.getString(3), "0000000001");
             assertEquals(rs.getString(4), "S1");
+            assertEquals(2, rs.getInt(5));
             assertTrue (rs.next());
             assertEquals(rs.getString(1), "0000000003");
             assertEquals(rs.getString(2), "T3");
             assertEquals(rs.getString(3), "0000000002");
             assertEquals(rs.getString(4), "S2");
+            assertEquals(3, rs.getInt(5));
             assertTrue (rs.next());
             assertEquals(rs.getString(1), "0000000004");
             assertEquals(rs.getString(2), "T4");
             assertEquals(rs.getString(3), "0000000002");
             assertEquals(rs.getString(4), "S2");
+            assertEquals(4, rs.getInt(5));
             assertTrue (rs.next());
             assertEquals(rs.getString(1), "0000000005");
             assertEquals(rs.getString(2), "T5");
             assertEquals(rs.getString(3), "0000000005");
             assertEquals(rs.getString(4), "S5");
+            assertEquals(5, rs.getInt(5));
             assertTrue (rs.next());
             assertEquals(rs.getString(1), "0000000006");
             assertEquals(rs.getString(2), "T6");
             assertEquals(rs.getString(3), "0000000006");
             assertEquals(rs.getString(4), "S6");
+            assertEquals(6, rs.getInt(5));
 
             assertFalse(rs.next());
         } finally {
             conn.close();
         }
     }
-
+    
     @Test
-    public void testInnerJoin() throws Exception {
+    public void testDefaultJoin() throws Exception {
         Connection conn = getConnection();
         String tableName1 = getTableName(conn, JOIN_ITEM_TABLE_FULL_NAME);
         String tableName2 = getTableName(conn, JOIN_SUPPLIER_TABLE_FULL_NAME);
-        String query = "SELECT item.\"item_id\", item.name, 
supp.\"supplier_id\", supp.name, next value for " + seqName + " FROM " + 
tableName1 + " item INNER JOIN " + tableName2 + " supp ON item.\"supplier_id\" 
= supp.\"supplier_id\"";
+        String query = "SELECT item.\"item_id\", item.name, 
supp.\"supplier_id\", supp.name FROM " + tableName1 + " item JOIN " + 
tableName2 + " supp ON item.\"supplier_id\" = supp.\"supplier_id\"";
         try {
             PreparedStatement statement = conn.prepareStatement(query);
             ResultSet rs = statement.executeQuery();
@@ -114,43 +126,42 @@ public abstract class HashJoinIT extends BaseJoinIT {
             assertEquals(rs.getString(2), "T1");
             assertEquals(rs.getString(3), "0000000001");
             assertEquals(rs.getString(4), "S1");
-            assertEquals(1, rs.getInt(5));
             assertTrue (rs.next());
             assertEquals(rs.getString(1), "0000000002");
             assertEquals(rs.getString(2), "T2");
             assertEquals(rs.getString(3), "0000000001");
             assertEquals(rs.getString(4), "S1");
-            assertEquals(2, rs.getInt(5));
             assertTrue (rs.next());
             assertEquals(rs.getString(1), "0000000003");
             assertEquals(rs.getString(2), "T3");
             assertEquals(rs.getString(3), "0000000002");
             assertEquals(rs.getString(4), "S2");
-            assertEquals(3, rs.getInt(5));
             assertTrue (rs.next());
             assertEquals(rs.getString(1), "0000000004");
             assertEquals(rs.getString(2), "T4");
             assertEquals(rs.getString(3), "0000000002");
             assertEquals(rs.getString(4), "S2");
-            assertEquals(4, rs.getInt(5));
             assertTrue (rs.next());
             assertEquals(rs.getString(1), "0000000005");
             assertEquals(rs.getString(2), "T5");
             assertEquals(rs.getString(3), "0000000005");
             assertEquals(rs.getString(4), "S5");
-            assertEquals(5, rs.getInt(5));
             assertTrue (rs.next());
             assertEquals(rs.getString(1), "0000000006");
             assertEquals(rs.getString(2), "T6");
             assertEquals(rs.getString(3), "0000000006");
             assertEquals(rs.getString(4), "S6");
-            assertEquals(6, rs.getInt(5));
 
             assertFalse(rs.next());
         } finally {
             conn.close();
         }
     }
+
+    @Test
+    public void testInnerJoin() throws Exception {
+       testInnerJoin(false);
+    }
             
     @Test
     public void testLeftJoin() throws Exception {
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index 42bc4d0..f690c4b 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -132,7 +132,9 @@ import 
org.apache.hadoop.hbase.coprocessor.CoprocessorException;
 import org.apache.hadoop.hbase.coprocessor.CoreCoprocessor;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.filter.CompareFilter;
 import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
+import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.ipc.RpcCall;
 import org.apache.hadoop.hbase.ipc.RpcUtil;
@@ -984,6 +986,25 @@ TABLE_FAMILY_BYTES, TABLE_SEQ_NUM_BYTES);
         return table;
     }
 
+    private PName getPhysicalTableName(Region region, byte[] tenantId, byte[] 
schema, byte[] table, long timestamp) throws IOException {
+        byte[] key = SchemaUtil.getTableKey(tenantId, schema, table);
+        Scan scan = MetaDataUtil.newTableRowsScan(key, 
MetaDataProtocol.MIN_TABLE_TIMESTAMP,
+                timestamp);
+        scan.addColumn(TABLE_FAMILY_BYTES, PHYSICAL_TABLE_NAME_BYTES);
+        try (RegionScanner scanner = region.getScanner(scan)) {
+            List<Cell> results = Lists.newArrayList();
+            scanner.next(results);
+            Cell physicalTableNameKv = null;
+            if (results.size() > 0) {
+                physicalTableNameKv = results.get(0);
+            }
+            PName physicalTableName =
+                    physicalTableNameKv != null ? 
newPName(physicalTableNameKv.getValueArray(),
+                            physicalTableNameKv.getValueOffset(), 
physicalTableNameKv.getValueLength()) : null;
+            return physicalTableName;
+        }
+    }
+
     private PTable getTable(RegionScanner scanner, long clientTimeStamp, long 
tableTimeStamp,
                             int clientVersion)
             throws IOException, SQLException {
@@ -1258,7 +1279,17 @@ TABLE_FAMILY_BYTES, TABLE_SEQ_NUM_BYTES);
                     }
 
                     if (parentTable == null) {
-                        physicalTables.add(famName);
+                        if (indexType == IndexType.LOCAL) {
+                            PName tablePhysicalName = 
getPhysicalTableName(env.getRegion(),null, 
SchemaUtil.getSchemaNameFromFullName(famName.getBytes()).getBytes(),
+                                    
SchemaUtil.getTableNameFromFullName(famName.getBytes()).getBytes(), 
clientTimeStamp);
+                            if (tablePhysicalName == null) {
+                                physicalTables.add(famName);
+                            } else {
+                                
physicalTables.add(SchemaUtil.getPhysicalHBaseTableName(schemaName, 
tablePhysicalName, isNamespaceMapped));
+                            }
+                        } else {
+                            physicalTables.add(famName);
+                        }
                         // If this is a view index, then one of the link is 
IDX_VW -> _IDX_ PhysicalTable link. Since famName is _IDX_ and we can't get 
this table hence it is null, we need to use actual view name
                         parentLogicalName = (tableType == INDEX ? 
SchemaUtil.getTableName(parentSchemaName, parentTableName) : famName);
                     } else {

Reply via email to