Branch rebased to 08d9c7154194de69ce688ad78357bffb8d34c92f of 4.x-HBase-0.98


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/23a87989
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/23a87989
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/23a87989

Branch: refs/heads/encodecolumns2
Commit: 23a879890e1e00a9bf6ab4f239b59e50b8762909
Parents: 153612a
Author: Samarth <samarth.j...@salesforce.com>
Authored: Wed Oct 5 00:11:07 2016 -0700
Committer: Samarth <samarth.j...@salesforce.com>
Committed: Fri Oct 7 13:06:36 2016 -0700

----------------------------------------------------------------------
 .../apache/phoenix/end2end/AlterTableIT.java    |  250 +++-
 .../phoenix/end2end/AlterTableWithViewsIT.java  |    4 +-
 .../apache/phoenix/end2end/CreateTableIT.java   |   22 +
 .../phoenix/end2end/PhoenixRuntimeIT.java       |    4 +-
 .../phoenix/end2end/RowValueConstructorIT.java  |    2 +-
 .../phoenix/end2end/StatsCollectorIT.java       |    8 +-
 .../apache/phoenix/end2end/UpsertValuesIT.java  |   45 +-
 .../phoenix/end2end/index/DropMetadataIT.java   |   13 +-
 .../end2end/index/IndexExpressionIT.java        |   21 +-
 .../apache/phoenix/end2end/index/IndexIT.java   |   26 +-
 .../phoenix/end2end/index/IndexTestUtil.java    |   13 +-
 .../phoenix/compile/CreateTableCompiler.java    |    2 +-
 .../phoenix/compile/ExpressionCompiler.java     |   18 +-
 .../apache/phoenix/compile/FromCompiler.java    |   54 +-
 .../apache/phoenix/compile/JoinCompiler.java    |    8 +-
 .../phoenix/compile/ListJarsQueryPlan.java      |    3 +-
 .../apache/phoenix/compile/PostDDLCompiler.java |   11 +-
 .../phoenix/compile/ProjectionCompiler.java     |   10 +-
 .../apache/phoenix/compile/QueryCompiler.java   |    2 +-
 .../apache/phoenix/compile/TraceQueryPlan.java  |    2 +-
 .../compile/TupleProjectionCompiler.java        |   22 +-
 .../apache/phoenix/compile/UnionCompiler.java   |    6 +-
 .../apache/phoenix/compile/WhereCompiler.java   |   36 +-
 .../coprocessor/BaseScannerRegionObserver.java  |   42 +-
 .../coprocessor/DelegateRegionScanner.java      |    5 +
 .../GroupedAggregateRegionObserver.java         |   36 +-
 .../coprocessor/HashJoinRegionScanner.java      |    2 +-
 .../coprocessor/MetaDataEndpointImpl.java       |  171 ++-
 .../phoenix/coprocessor/ScanRegionObserver.java |   12 +-
 .../coprocessor/SequenceRegionObserver.java     |    1 +
 .../UngroupedAggregateRegionObserver.java       |   28 +-
 .../coprocessor/generated/PTableProtos.java     | 1379 ++++++++++++++++--
 .../apache/phoenix/execute/BaseQueryPlan.java   |   25 +-
 .../apache/phoenix/execute/MutationState.java   |   12 +-
 .../phoenix/execute/SortMergeJoinPlan.java      |    1 +
 .../apache/phoenix/execute/TupleProjector.java  |    6 +-
 .../expression/ArrayColumnExpression.java       |  110 ++
 .../expression/ArrayConstructorExpression.java  |    2 +-
 .../phoenix/expression/ExpressionType.java      |    3 +-
 .../expression/KeyValueColumnExpression.java    |   16 +-
 .../phoenix/expression/LiteralExpression.java   |   11 +-
 .../expression/ProjectedColumnExpression.java   |    1 +
 .../visitor/CloneExpressionVisitor.java         |    6 +
 .../expression/visitor/ExpressionVisitor.java   |    2 +
 ...lumnWithKeyValueColumnExpressionVisitor.java |   37 +
 .../StatelessTraverseAllExpressionVisitor.java  |    7 +-
 .../StatelessTraverseNoExpressionVisitor.java   |    7 +-
 .../phoenix/filter/ColumnProjectionFilter.java  |   24 +-
 .../filter/MultiKeyValueComparisonFilter.java   |   10 +-
 .../SingleCQKeyValueComparisonFilter.java       |    3 +-
 .../filter/SingleKeyValueComparisonFilter.java  |   10 +-
 .../apache/phoenix/hbase/index/ValueGetter.java |    1 +
 .../example/CoveredColumnIndexCodec.java        |    1 -
 .../apache/phoenix/index/IndexMaintainer.java   |  307 +++-
 .../apache/phoenix/index/PhoenixIndexCodec.java |    2 +-
 .../index/PhoenixTransactionalIndexer.java      |   16 +-
 .../phoenix/iterate/BaseResultIterators.java    |   83 +-
 .../iterate/LookAheadResultIterator.java        |    2 +-
 .../phoenix/iterate/MappedByteBufferQueue.java  |    1 +
 .../phoenix/iterate/OrderedResultIterator.java  |    3 +-
 .../iterate/RegionScannerResultIterator.java    |   14 +-
 .../phoenix/jdbc/PhoenixDatabaseMetaData.java   |   12 +-
 .../apache/phoenix/jdbc/PhoenixResultSet.java   |    2 +-
 .../apache/phoenix/join/HashCacheFactory.java   |    1 +
 .../mapreduce/FormatToBytesWritableMapper.java  |   22 +-
 .../mapreduce/FormatToKeyValueReducer.java      |   30 +-
 .../query/ConnectionQueryServicesImpl.java      |    2 +-
 .../query/ConnectionlessQueryServicesImpl.java  |    1 -
 .../apache/phoenix/query/QueryConstants.java    |   57 +-
 .../phoenix/query/QueryServicesOptions.java     |    1 -
 .../org/apache/phoenix/schema/ColumnRef.java    |   10 +-
 .../apache/phoenix/schema/DelegateColumn.java   |    5 +
 .../apache/phoenix/schema/DelegateTable.java    |   20 +-
 .../apache/phoenix/schema/KeyValueSchema.java   |    2 +
 .../apache/phoenix/schema/MetaDataClient.java   |  339 ++++-
 .../java/org/apache/phoenix/schema/PColumn.java |    9 +-
 .../apache/phoenix/schema/PColumnFamily.java    |   14 +-
 .../phoenix/schema/PColumnFamilyImpl.java       |   50 +-
 .../org/apache/phoenix/schema/PColumnImpl.java  |   24 +-
 .../apache/phoenix/schema/PMetaDataImpl.java    |    4 +-
 .../java/org/apache/phoenix/schema/PName.java   |   26 +
 .../java/org/apache/phoenix/schema/PTable.java  |  124 +-
 .../org/apache/phoenix/schema/PTableImpl.java   |  434 ++++--
 .../org/apache/phoenix/schema/PTableKey.java    |    6 +-
 .../apache/phoenix/schema/ProjectedColumn.java  |    1 +
 .../org/apache/phoenix/schema/SaltingUtil.java  |    2 +-
 .../apache/phoenix/schema/TableProperty.java    |    2 +-
 .../apache/phoenix/schema/tuple/BaseTuple.java  |   39 +
 .../phoenix/schema/tuple/DelegateTuple.java     |    7 +
 .../tuple/EncodedColumnQualiferCellsList.java   |  553 +++++++
 .../schema/tuple/MultiKeyValueTuple.java        |    1 +
 .../tuple/PositionBasedMultiKeyValueTuple.java  |   88 ++
 .../schema/tuple/PositionBasedResultTuple.java  |   83 ++
 .../phoenix/schema/tuple/ResultTuple.java       |   35 +-
 .../org/apache/phoenix/schema/tuple/Tuple.java  |    4 +
 .../tuple/UnboundedSkipNullCellsList.java       |  488 +++++++
 .../apache/phoenix/util/EncodedColumnsUtil.java |   94 ++
 .../java/org/apache/phoenix/util/IndexUtil.java |  268 ++--
 .../org/apache/phoenix/util/PhoenixRuntime.java |    8 +-
 .../org/apache/phoenix/util/ResultUtil.java     |   60 -
 .../java/org/apache/phoenix/util/ScanUtil.java  |   38 +
 .../org/apache/phoenix/util/SchemaUtil.java     |   28 +-
 .../phoenix/compile/HavingCompilerTest.java     |    2 +-
 .../phoenix/compile/QueryCompilerTest.java      |    2 +-
 .../phoenix/compile/WhereCompilerTest.java      |   26 +-
 .../phoenix/execute/CorrelatePlanTest.java      |   10 +-
 .../execute/LiteralResultIteratorPlanTest.java  |   10 +-
 .../phoenix/execute/UnnestArrayPlanTest.java    |    5 +-
 .../expression/ColumnExpressionTest.java        |   16 +-
 .../phoenix/index/IndexMaintainerTest.java      |    5 +-
 .../iterate/AggregateResultScannerTest.java     |    2 +-
 .../query/BaseConnectionlessQueryTest.java      |   18 +-
 .../phoenix/query/ConnectionlessTest.java       |    2 -
 .../java/org/apache/phoenix/util/TestUtil.java  |   43 +-
 phoenix-protocol/src/main/PTable.proto          |    8 +
 115 files changed, 5140 insertions(+), 984 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/23a87989/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
index 48f4217..507108d 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
@@ -18,6 +18,15 @@
 package org.apache.phoenix.end2end;
 
 import static 
org.apache.hadoop.hbase.HColumnDescriptor.DEFAULT_REPLICATION_SCOPE;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_FAMILY;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_NAME;
+import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_QUALIFIER_COUNTER;
+import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.ENCODED_COLUMN_QUALIFIER;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_NAME;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_SCHEM;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_SEQ_NUM;
+import static org.apache.phoenix.query.QueryConstants.DEFAULT_COLUMN_FAMILY;
+import static 
org.apache.phoenix.query.QueryConstants.ENCODED_CQ_COUNTER_INITIAL_VALUE;
 import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
 import static org.apache.phoenix.util.TestUtil.closeConnection;
 import static org.apache.phoenix.util.TestUtil.closeStatement;
@@ -53,12 +62,14 @@ import org.apache.phoenix.query.BaseTest;
 import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.schema.PTable;
+import org.apache.phoenix.schema.PTable.EncodedCQCounter;
 import org.apache.phoenix.schema.PTableKey;
 import org.apache.phoenix.schema.TableNotFoundException;
 import org.apache.phoenix.util.IndexUtil;
 import org.apache.phoenix.util.PropertiesUtil;
 import org.apache.phoenix.util.SchemaUtil;
 import org.junit.Before;
+import org.junit.Ignore;
 import org.junit.Test;
 
 /**
@@ -231,8 +242,18 @@ public class AlterTableIT extends ParallelStatsDisabledIT {
         assertEquals(exists, rs.next());
     }
 
+    
+    @Test
+    public void testDropIndexedColumnImmutableIndex() throws Exception {
+        helpTestDropIndexedColumn(true);
+    }
+    
     @Test
-    public void testDropIndexedColumn() throws Exception {
+    public void testDropIndexedColumnMutableIndex() throws Exception {
+        helpTestDropIndexedColumn(false);
+    }
+    
+    private void helpTestDropIndexedColumn(boolean immutable) throws Exception 
{
         String query;
         ResultSet rs;
         PreparedStatement stmt;
@@ -244,7 +265,7 @@ public class AlterTableIT extends ParallelStatsDisabledIT {
         // make sure that the tables are empty, but reachable
         conn.createStatement().execute(
           "CREATE TABLE " + dataTableFullName
-              + " (k VARCHAR NOT NULL PRIMARY KEY, v1 VARCHAR, v2 VARCHAR)");
+              + " (k VARCHAR NOT NULL PRIMARY KEY, v1 VARCHAR, v2 VARCHAR) " + 
(immutable ? "IMMUTABLE_ROWS = true" : ""));
         query = "SELECT * FROM " + dataTableFullName;
         rs = conn.createStatement().executeQuery(query);
         assertFalse(rs.next());
@@ -2118,8 +2139,8 @@ public class AlterTableIT extends ParallelStatsDisabledIT 
{
             PhoenixConnection phxConn = conn.unwrap(PhoenixConnection.class); 
             PTable table = phxConn.getTable(new 
PTableKey(phxConn.getTenantId(), dataTableFullName));
             // Assert that the column shows up as row time stamp in the cache.
-            assertTrue(table.getColumn("PK1").isRowTimestamp());
-            assertFalse(table.getColumn("PK2").isRowTimestamp());
+            assertTrue(table.getPColumnForColumnName("PK1").isRowTimestamp());
+            assertFalse(table.getPColumnForColumnName("PK2").isRowTimestamp());
             assertIsRowTimestampSet(schemaName, dataTableName, "PK1");
             
             String dataTableName2 = BaseTest.generateUniqueName();
@@ -2127,18 +2148,17 @@ public class AlterTableIT extends 
ParallelStatsDisabledIT {
             conn.createStatement().execute("CREATE TABLE " + 
dataTableFullName2 + " (PK1 VARCHAR, PK2 DATE PRIMARY KEY ROW_TIMESTAMP, KV1 
VARCHAR, KV2 INTEGER)");
             table = phxConn.getTable(new PTableKey(phxConn.getTenantId(), 
dataTableFullName2));
             // Assert that the column shows up as row time stamp in the cache.
-            assertFalse(table.getColumn("PK1").isRowTimestamp());
-            assertTrue(table.getColumn("PK2").isRowTimestamp());
+            assertFalse(table.getPColumnForColumnName("PK1").isRowTimestamp());
+            assertTrue(table.getPColumnForColumnName("PK2").isRowTimestamp());
             assertIsRowTimestampSet(schemaName, dataTableName2, "PK2");
             
             // Create an index on a table has a row time stamp pk column. The 
column should show up as a row time stamp column for the index too. 
             conn.createStatement().execute("CREATE INDEX " + indexTableName + 
"  ON " + dataTableFullName2 + " (KV1) include (KV2)");
             PTable indexTable = phxConn.getTable(new 
PTableKey(phxConn.getTenantId(), indexTableFullName));
-            String indexColName = 
IndexUtil.getIndexColumnName(table.getColumn("PK2"));
+            String indexColName = 
IndexUtil.getIndexColumnName(table.getPColumnForColumnName("PK2"));
             // Assert that the column shows up as row time stamp in the cache.
-            assertTrue(indexTable.getColumn(indexColName).isRowTimestamp());
+            
assertTrue(indexTable.getPColumnForColumnName(indexColName).isRowTimestamp());
             assertIsRowTimestampSet(schemaName, indexTableName, indexColName);
-            
             String viewTableName2 = dataTableName2 + "_VIEW";
             String viewTableFullName2 = SchemaUtil.getTableName(schemaName, 
viewTableName2);
             // Creating a view with a row_timestamp column in its pk 
constraint is not allowed
@@ -2207,5 +2227,217 @@ public class AlterTableIT extends 
ParallelStatsDisabledIT {
                }
        }
        
+       @Test
+       public void testClientAndServerMetadataForEncodedColumns() throws 
Exception {
+           String schemaName = "XYZ";
+           String baseTableName = generateUniqueName();
+           String viewName = generateUniqueName();
+           String fullTableName = schemaName + "." + baseTableName;
+           String fullViewName = schemaName + "." + viewName;
+           try (Connection conn = DriverManager.getConnection(getUrl())) {
+               PhoenixConnection phxConn = 
conn.unwrap(PhoenixConnection.class);
+               conn.createStatement().execute("CREATE TABLE IF NOT EXISTS " + 
fullTableName + " ("
+                       + " ID char(1) NOT NULL,"
+                       + " COL1 integer NOT NULL,"
+                       + " COL2 bigint NOT NULL,"
+                       + " KV1 VARCHAR"
+                       + " CONSTRAINT NAME_PK PRIMARY KEY (ID, COL1, COL2)"
+                       + " )");
+               PTable baseTable = phxConn.getTable(new 
PTableKey(phxConn.getTenantId(), fullTableName));
+               long initBaseTableSeqNumber = baseTable.getSequenceNumber(); 
+
+               // assert that the client side cache is updated.
+               EncodedCQCounter cqCounter = baseTable.getEncodedCQCounter();
+               assertEquals((Integer)(ENCODED_CQ_COUNTER_INITIAL_VALUE + 1), 
cqCounter.getValue(QueryConstants.DEFAULT_COLUMN_FAMILY));
+               
+               
+               // assert that the server side metadata is updated correctly.
+               assertEncodedCQCounter(DEFAULT_COLUMN_FAMILY, schemaName, 
baseTableName, ENCODED_CQ_COUNTER_INITIAL_VALUE + 1, true);
+               assertEncodedCQValue(DEFAULT_COLUMN_FAMILY, "KV1", schemaName, 
baseTableName, ENCODED_CQ_COUNTER_INITIAL_VALUE);
+               assertSequenceNumber(schemaName, baseTableName, 
initBaseTableSeqNumber);
+
+               // now create a view and validate client and server side 
metadata
+               String viewDDL = "CREATE VIEW " + fullViewName + " ( VIEW_COL1 
INTEGER, A.VIEW_COL2 VARCHAR ) AS SELECT * FROM " + fullTableName;
+               conn.createStatement().execute(viewDDL);
+               baseTable = phxConn.getTable(new 
PTableKey(phxConn.getTenantId(), fullTableName));
+               PTable view = phxConn.getTable(new 
PTableKey(phxConn.getTenantId(), fullViewName));
+
+               // verify that the client side cache is updated. Base table's 
cq counters should be updated.
+               assertEquals((Integer)(ENCODED_CQ_COUNTER_INITIAL_VALUE + 2), 
baseTable.getEncodedCQCounter().getValue(DEFAULT_COLUMN_FAMILY));
+               assertEquals((Integer)(ENCODED_CQ_COUNTER_INITIAL_VALUE + 1), 
baseTable.getEncodedCQCounter().getValue("A"));
+               assertNull("A view should always have the null cq counter", 
view.getEncodedCQCounter().getValue(DEFAULT_COLUMN_FAMILY));
+               
+               // assert that the server side metadata for the base table and 
the view is also updated correctly.
+               assertEncodedCQCounter(DEFAULT_COLUMN_FAMILY, schemaName, 
baseTableName, ENCODED_CQ_COUNTER_INITIAL_VALUE + 2, true);
+               assertEncodedCQCounter("A", schemaName, baseTableName, 
ENCODED_CQ_COUNTER_INITIAL_VALUE + 1, true);
+               assertEncodedCQValue(DEFAULT_COLUMN_FAMILY, "VIEW_COL1", 
schemaName, viewName, ENCODED_CQ_COUNTER_INITIAL_VALUE + 1);
+               assertEncodedCQValue("A", "VIEW_COL2", schemaName, viewName, 
ENCODED_CQ_COUNTER_INITIAL_VALUE);
+               assertSequenceNumber(schemaName, baseTableName, 
initBaseTableSeqNumber + 1);
+               assertSequenceNumber(schemaName, viewName, 
PTable.INITIAL_SEQ_NUM);
+           }
+       }
+       
+       @Test
+    public void testAddingColumnsToTablesAndViewsWithEncodedColumns() throws 
Exception {
+        String schemaName = "XYZ";
+        String baseTableName = generateUniqueName();
+        String viewName = generateUniqueName();
+        String fullTableName = schemaName + "." + baseTableName;
+        String fullViewName = schemaName + "." + viewName;
+        Properties props = new Properties();
+        props.put(QueryServices.IS_NAMESPACE_MAPPING_ENABLED, 
Boolean.toString(true));
+        try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
+            conn.createStatement().execute("CREATE SCHEMA " + schemaName);
+            PhoenixConnection phxConn = conn.unwrap(PhoenixConnection.class);
+            conn.createStatement().execute("CREATE TABLE " + fullTableName + " 
("
+                    + " ID char(1) NOT NULL,"
+                    + " COL1 integer NOT NULL,"
+                    + " COL2 bigint NOT NULL,"
+                    + " CONSTRAINT NAME_PK PRIMARY KEY (ID, COL1, COL2)"
+                    + " )");
+            PTable baseTable = phxConn.getTable(new 
PTableKey(phxConn.getTenantId(), fullTableName));
+            long initBaseTableSeqNumber = baseTable.getSequenceNumber(); 
+
+            // Add a column to the base table and see if the client and server 
metadata is updated correctly
+            String alterDDL = "ALTER TABLE " + fullTableName + " ADD COL3 
VARCHAR PRIMARY KEY, COL4 INTEGER, COL5 VARCHAR, B.COL6 DECIMAL (10, 2)";
+            conn.createStatement().execute(alterDDL);
+
+            // assert that the client side cache is updated.
+            baseTable = phxConn.getTable(new PTableKey(phxConn.getTenantId(), 
fullTableName));
+            EncodedCQCounter encodedCqCounter = 
baseTable.getEncodedCQCounter();
+            assertEquals((Integer)(ENCODED_CQ_COUNTER_INITIAL_VALUE + 2), 
encodedCqCounter.getValue(DEFAULT_COLUMN_FAMILY));
+            assertEquals((Integer)(ENCODED_CQ_COUNTER_INITIAL_VALUE + 1), 
encodedCqCounter.getValue("B"));
+            
+            // assert that the server side metadata is updated correctly.
+            assertEncodedCQCounter(DEFAULT_COLUMN_FAMILY, schemaName, 
baseTableName, ENCODED_CQ_COUNTER_INITIAL_VALUE + 2, true);
+            assertEncodedCQCounter("B", schemaName, baseTableName, 
ENCODED_CQ_COUNTER_INITIAL_VALUE + 1, true);
+            
+            // assert that the server side metadata for columns is updated 
correctly.
+            assertEncodedCQValue(DEFAULT_COLUMN_FAMILY, "COL4", schemaName, 
baseTableName, ENCODED_CQ_COUNTER_INITIAL_VALUE);
+            assertEncodedCQValue(DEFAULT_COLUMN_FAMILY, "COL5", schemaName, 
baseTableName, ENCODED_CQ_COUNTER_INITIAL_VALUE + 1);
+            assertEncodedCQValue("B", "COL6", schemaName, baseTableName, 
ENCODED_CQ_COUNTER_INITIAL_VALUE);
+            assertSequenceNumber(schemaName, baseTableName, 
initBaseTableSeqNumber + 1);
+
+            // Create a view
+            String viewDDL = "CREATE VIEW " + fullViewName + " ( VIEW_COL1 
INTEGER, A.VIEW_COL2 VARCHAR ) AS SELECT * FROM " + fullTableName;
+            conn.createStatement().execute(viewDDL);
+            
+            // assert that the server side metadata is updated correctly.
+            assertEncodedCQCounter(DEFAULT_COLUMN_FAMILY, schemaName, 
baseTableName, ENCODED_CQ_COUNTER_INITIAL_VALUE + 3, true);
+            assertEncodedCQCounter("A", schemaName, baseTableName, 
ENCODED_CQ_COUNTER_INITIAL_VALUE + 1, true);
+            assertEncodedCQCounter("B", schemaName, baseTableName, 
ENCODED_CQ_COUNTER_INITIAL_VALUE + 1, true);
+            
+            // assert that the server side metadata for columns is updated 
correctly.
+            assertEncodedCQValue(DEFAULT_COLUMN_FAMILY, "VIEW_COL1", 
schemaName, viewName, ENCODED_CQ_COUNTER_INITIAL_VALUE + 2);
+            assertEncodedCQValue("A", "VIEW_COL2", schemaName, viewName, 
ENCODED_CQ_COUNTER_INITIAL_VALUE);
+            // Creating a view that adds its own columns should increment the 
base table's sequence number too.
+            assertSequenceNumber(schemaName, baseTableName, 
initBaseTableSeqNumber + 2);
+            
+
+            // Add column to the view
+            viewDDL = "ALTER VIEW " + fullViewName + " ADD VIEW_COL3 
DECIMAL(10, 2), A.VIEW_COL4 VARCHAR, B.VIEW_COL5 INTEGER";
+            conn.createStatement().execute(viewDDL);
+
+            // assert that the client cache for the base table is updated
+            baseTable = phxConn.getTable(new PTableKey(phxConn.getTenantId(), 
fullTableName));
+            encodedCqCounter = baseTable.getEncodedCQCounter();
+            assertEquals((Integer)(ENCODED_CQ_COUNTER_INITIAL_VALUE + 4), 
encodedCqCounter.getValue(DEFAULT_COLUMN_FAMILY));
+            assertEquals((Integer)(ENCODED_CQ_COUNTER_INITIAL_VALUE + 2), 
encodedCqCounter.getValue("A"));
+            assertEquals((Integer)(ENCODED_CQ_COUNTER_INITIAL_VALUE + 2), 
encodedCqCounter.getValue("B"));
+            
+            // assert client cache for view
+            PTable view = phxConn.getTable(new 
PTableKey(phxConn.getTenantId(), fullViewName));
+            encodedCqCounter = view.getEncodedCQCounter();
+            assertNull("A view should always have the column qualifier counter 
as null", view.getEncodedCQCounter().getValue(DEFAULT_COLUMN_FAMILY));
+            
+            // assert that the server side metadata for the base table and the 
view is also updated correctly.
+            assertEncodedCQCounter(DEFAULT_COLUMN_FAMILY, schemaName, 
baseTableName, ENCODED_CQ_COUNTER_INITIAL_VALUE + 4, true);
+            assertEncodedCQCounter("A", schemaName, baseTableName, 
ENCODED_CQ_COUNTER_INITIAL_VALUE + 2, true);
+            assertEncodedCQCounter("B", schemaName, baseTableName, 
ENCODED_CQ_COUNTER_INITIAL_VALUE + 2, true);
+            assertEncodedCQValue(DEFAULT_COLUMN_FAMILY, "VIEW_COL1", 
schemaName, viewName, ENCODED_CQ_COUNTER_INITIAL_VALUE + 2);
+            assertEncodedCQValue(DEFAULT_COLUMN_FAMILY, "VIEW_COL3", 
schemaName, viewName, ENCODED_CQ_COUNTER_INITIAL_VALUE + 3);
+            assertEncodedCQValue("A", "VIEW_COL2", schemaName, viewName, 
ENCODED_CQ_COUNTER_INITIAL_VALUE);
+            assertEncodedCQValue("A", "VIEW_COL4", schemaName, viewName, 
ENCODED_CQ_COUNTER_INITIAL_VALUE + 1);
+            assertEncodedCQValue("B", "VIEW_COL5", schemaName, viewName, 
ENCODED_CQ_COUNTER_INITIAL_VALUE + 1);
+            // Adding a column to the should increment the base table's 
sequence number too since we update the cq counters for column families.
+            assertSequenceNumber(schemaName, baseTableName, 
initBaseTableSeqNumber + 3);
+            assertSequenceNumber(schemaName, viewName, PTable.INITIAL_SEQ_NUM 
+ 1);
+            
+            // Add column to the base table which doesn't already exist in the 
view.
+            alterDDL = "ALTER TABLE " + fullTableName + " ADD COL10 VARCHAR, 
A.COL11 INTEGER";
+            conn.createStatement().execute(alterDDL);
+            baseTable = phxConn.getTable(new PTableKey(phxConn.getTenantId(), 
fullTableName));
+            
+            // assert that the client cache for the base table is updated 
+            encodedCqCounter = baseTable.getEncodedCQCounter();
+            assertEquals((Integer)(ENCODED_CQ_COUNTER_INITIAL_VALUE + 5), 
encodedCqCounter.getValue(DEFAULT_COLUMN_FAMILY));
+            assertEquals((Integer)(ENCODED_CQ_COUNTER_INITIAL_VALUE + 3), 
encodedCqCounter.getValue("A"));
+            assertEquals((Integer)(ENCODED_CQ_COUNTER_INITIAL_VALUE + 2), 
encodedCqCounter.getValue("B"));
+            
+            // assert client cache for view
+            view = phxConn.getTable(new PTableKey(phxConn.getTenantId(), 
fullViewName));
+            encodedCqCounter = view.getEncodedCQCounter();
+            assertNull("A view should always have the column qualifier counter 
as null", view.getEncodedCQCounter().getValue(DEFAULT_COLUMN_FAMILY));
+            
+            // assert that the server side metadata for the base table and the 
view is also updated correctly.
+            assertEncodedCQCounter(DEFAULT_COLUMN_FAMILY, schemaName, 
baseTableName, (ENCODED_CQ_COUNTER_INITIAL_VALUE + 5), true);
+            assertEncodedCQCounter("A", schemaName, baseTableName, 
(ENCODED_CQ_COUNTER_INITIAL_VALUE + 3), true);
+            assertEncodedCQValue(DEFAULT_COLUMN_FAMILY, "COL10", schemaName, 
viewName, (ENCODED_CQ_COUNTER_INITIAL_VALUE + 4));
+            assertEncodedCQValue("A", "COL11", schemaName, viewName, 
ENCODED_CQ_COUNTER_INITIAL_VALUE + 2);
+            assertSequenceNumber(schemaName, baseTableName, 
initBaseTableSeqNumber + 4);
+            assertSequenceNumber(schemaName, viewName, PTable.INITIAL_SEQ_NUM 
+ 2);
+        }
+    }
+       
+       private void assertEncodedCQValue(String columnFamily, String 
columnName, String schemaName, String tableName, int expectedValue) throws 
Exception {
+        String query = "SELECT " + ENCODED_COLUMN_QUALIFIER + " FROM 
SYSTEM.CATALOG WHERE " + TABLE_SCHEM + " = ? AND " + TABLE_NAME
+                + " = ? " + " AND " + COLUMN_FAMILY + " = ?" + " AND " + 
COLUMN_NAME  + " = ?";
+        try (Connection conn = DriverManager.getConnection(getUrl())) {
+            PreparedStatement stmt = conn.prepareStatement(query);
+            stmt.setString(1, schemaName);
+            stmt.setString(2, tableName);
+            stmt.setString(3, columnFamily);
+            stmt.setString(4, columnName);
+            ResultSet rs = stmt.executeQuery();
+            assertTrue(rs.next());
+            assertEquals(expectedValue, rs.getInt(1));
+            assertFalse(rs.next());
+        }
+    }
+    
+    private void assertEncodedCQCounter(String columnFamily, String 
schemaName, String tableName, int expectedValue, boolean rowExists) throws 
Exception {
+        String query = "SELECT " + COLUMN_QUALIFIER_COUNTER + " FROM 
SYSTEM.CATALOG WHERE " + TABLE_SCHEM + " = ? AND " + TABLE_NAME
+                + " = ? " + " AND " + COLUMN_FAMILY + " = ? AND " + 
COLUMN_QUALIFIER_COUNTER + " IS NOT NULL";
+        try (Connection conn = DriverManager.getConnection(getUrl())) {
+            PreparedStatement stmt = conn.prepareStatement(query);
+            stmt.setString(1, schemaName);
+            stmt.setString(2, tableName);
+            stmt.setString(3, columnFamily);
+            ResultSet rs = stmt.executeQuery();
+            if (rowExists) {
+                assertTrue(rs.next());
+                assertEquals(expectedValue, rs.getInt(1));
+                assertFalse(rs.next());
+            } else {
+                assertFalse(rs.next());
+            }
+        }
+    }
+    
+    private void assertSequenceNumber(String schemaName, String tableName, 
long expectedSequenceNumber) throws Exception {
+        String query = "SELECT " + TABLE_SEQ_NUM + " FROM SYSTEM.CATALOG WHERE 
" + TABLE_SCHEM + " = ? AND " + TABLE_NAME
+                + " = ? AND " +  TABLE_SEQ_NUM + " IS NOT NULL AND " + 
COLUMN_NAME + " IS NULL AND "
+                + COLUMN_FAMILY + " IS NULL ";
+        try (Connection conn = DriverManager.getConnection(getUrl())) {
+            PreparedStatement stmt = conn.prepareStatement(query);
+            stmt.setString(1, schemaName);
+            stmt.setString(2, tableName);
+            ResultSet rs = stmt.executeQuery();
+            assertTrue(rs.next());
+            assertEquals(expectedSequenceNumber, rs.getInt(1));
+            assertFalse(rs.next());
+        }
+    }
+       
 }
  

http://git-wip-us.apache.org/repos/asf/phoenix/blob/23a87989/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableWithViewsIT.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableWithViewsIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableWithViewsIT.java
index e6bf2d2..e4baa20 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableWithViewsIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableWithViewsIT.java
@@ -65,7 +65,7 @@ public class AlterTableWithViewsIT extends 
ParallelStatsDisabledIT {
     public static Collection<Boolean> data() {
         return Arrays.asList(false, true);
     }
-       
+    
     private String generateDDL(String format) {
         return String.format(format, isMultiTenant ? "TENANT_ID VARCHAR NOT 
NULL, " : "",
             isMultiTenant ? "TENANT_ID, " : "", isMultiTenant ? 
"MULTI_TENANT=true" : "");
@@ -679,4 +679,4 @@ public class AlterTableWithViewsIT extends 
ParallelStatsDisabledIT {
         } 
     }
     
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/phoenix/blob/23a87989/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateTableIT.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateTableIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateTableIT.java
index 30bdb41..e367866 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateTableIT.java
@@ -18,7 +18,17 @@
 package org.apache.phoenix.end2end;
 
 import static 
org.apache.hadoop.hbase.HColumnDescriptor.DEFAULT_REPLICATION_SCOPE;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_FAMILY;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_NAME;
+import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_QUALIFIER_COUNTER;
+import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.ENCODED_COLUMN_QUALIFIER;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_NAME;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_SCHEM;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_SEQ_NUM;
+import static org.apache.phoenix.query.QueryConstants.DEFAULT_COLUMN_FAMILY;
+import static 
org.apache.phoenix.query.QueryConstants.ENCODED_CQ_COUNTER_INITIAL_VALUE;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
@@ -26,6 +36,8 @@ import static org.junit.Assert.fail;
 
 import java.sql.Connection;
 import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
 import java.util.List;
@@ -35,10 +47,15 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.exception.SQLExceptionCode;
+import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.query.KeyRange;
+import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.schema.NewerTableAlreadyExistsException;
+import org.apache.phoenix.schema.PTable;
+import org.apache.phoenix.schema.PTable.EncodedCQCounter;
+import org.apache.phoenix.schema.PTableKey;
 import org.apache.phoenix.schema.SchemaNotFoundException;
 import org.apache.phoenix.schema.TableAlreadyExistsException;
 import org.apache.phoenix.util.PhoenixRuntime;
@@ -502,4 +519,9 @@ public class CreateTableIT extends BaseClientManagedTimeIT {
             fail();
         }
     }
+    
+    @Test
+    public void testCreateTableIfNotExistsForEncodedColumnNames() throws 
Exception {
+        
+    }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/23a87989/phoenix-core/src/it/java/org/apache/phoenix/end2end/PhoenixRuntimeIT.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/PhoenixRuntimeIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/PhoenixRuntimeIT.java
index cddca04..91e9370 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/PhoenixRuntimeIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/PhoenixRuntimeIT.java
@@ -61,11 +61,11 @@ public class PhoenixRuntimeIT extends 
ParallelStatsDisabledIT {
         scan.setFilter(filter);
         ResultScanner scanner = htable.getScanner(scan);
         Result result = null;
-        ResultTuple tuple = new ResultTuple();
+        ResultTuple tuple;
         Set<String> actualTenantIds = 
Sets.newHashSetWithExpectedSize(tenantIds.length);
         Set<String> expectedTenantIds = new 
HashSet<>(Arrays.asList(tenantIds));
         while ((result = scanner.next()) != null) {
-            tuple.setResult(result);
+            tuple = new ResultTuple(result);
             e.evaluate(tuple, ptr);
             String tenantId = (String)PVarchar.INSTANCE.toObject(ptr);
             actualTenantIds.add(tenantId == null ? "" : tenantId);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/23a87989/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
index f35484d..861455e 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
@@ -107,7 +107,7 @@ public class RowValueConstructorIT extends 
BaseClientManagedTimeIT {
                 count++;
             }
             // we have 6 values for a_integer present in the atable where a >= 
4. x_integer is null for a_integer = 4. So the query should have returned 5 
rows.
-            assertTrue(count == 5);   
+            assertEquals(5, count);   
         } finally {
             conn.close();
         }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/23a87989/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectorIT.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectorIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectorIT.java
index 2445948..9ac7be3 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectorIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectorIT.java
@@ -124,13 +124,13 @@ public class StatsCollectorIT extends 
ParallelStatsEnabledIT {
         rs = conn.createStatement().executeQuery("EXPLAIN SELECT * FROM " + 
fullTableName);
         explainPlan = QueryUtil.getExplainPlan(rs);
         assertEquals(
-                "CLIENT 4-CHUNK 1 ROWS 34 BYTES PARALLEL 3-WAY FULL SCAN OVER 
" + fullTableName + "\n" +
+                "CLIENT 4-CHUNK 1 ROWS 28 BYTES PARALLEL 3-WAY FULL SCAN OVER 
" + fullTableName + "\n" +
                 "CLIENT MERGE SORT",
                 explainPlan);
         rs = conn.createStatement().executeQuery("EXPLAIN SELECT * FROM " + 
fullTableName + " WHERE k = 'a'");
         explainPlan = QueryUtil.getExplainPlan(rs);
         assertEquals(
-                "CLIENT 1-CHUNK 1 ROWS 202 BYTES PARALLEL 1-WAY POINT LOOKUP 
ON 1 KEY OVER " + fullTableName + "\n" +
+                "CLIENT 1-CHUNK 1 ROWS 204 BYTES PARALLEL 1-WAY POINT LOOKUP 
ON 1 KEY OVER " + fullTableName + "\n" +
                 "CLIENT MERGE SORT",
                 explainPlan);
         
@@ -433,7 +433,7 @@ public class StatsCollectorIT extends 
ParallelStatsEnabledIT {
         List<KeyRange> keyRanges = getAllSplits(conn, fullTableName);
         assertEquals(26, keyRanges.size());
         rs = conn.createStatement().executeQuery("EXPLAIN SELECT * FROM " + 
fullTableName);
-        assertEquals("CLIENT 26-CHUNK 25 ROWS 12420 BYTES PARALLEL 1-WAY FULL 
SCAN OVER " + fullTableName,
+        assertEquals("CLIENT 26-CHUNK 25 ROWS 12530 BYTES PARALLEL 1-WAY FULL 
SCAN OVER " + fullTableName,
                 QueryUtil.getExplainPlan(rs));
 
         ConnectionQueryServices services = 
conn.unwrap(PhoenixConnection.class).getQueryServices();
@@ -505,7 +505,7 @@ public class StatsCollectorIT extends 
ParallelStatsEnabledIT {
             int startIndex = r.nextInt(strings.length);
             int endIndex = r.nextInt(strings.length - startIndex) + startIndex;
             long rows = endIndex - startIndex;
-            long c2Bytes = rows * 35;
+            long c2Bytes = rows * 37;
             System.out.println(rows + ":" + startIndex + ":" + endIndex);
             rs = conn.createStatement().executeQuery(
                     "SELECT 
COLUMN_FAMILY,SUM(GUIDE_POSTS_ROW_COUNT),SUM(GUIDE_POSTS_WIDTH) from 
SYSTEM.STATS where PHYSICAL_NAME = '"

http://git-wip-us.apache.org/repos/asf/phoenix/blob/23a87989/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
index 21ccbf4..02ad55b 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
@@ -35,15 +35,26 @@ import java.sql.SQLException;
 import java.sql.Statement;
 import java.sql.Time;
 import java.sql.Timestamp;
+import java.util.Map.Entry;
+import java.util.NavigableMap;
 import java.util.Properties;
 
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.compile.QueryPlan;
 import org.apache.phoenix.exception.SQLExceptionCode;
+import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.query.BaseTest;
+import org.apache.phoenix.schema.types.PInteger;
 import org.apache.phoenix.util.DateUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.PropertiesUtil;
+import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.TestUtil;
 import org.junit.Test;
 
@@ -831,7 +842,7 @@ public class UpsertValuesIT extends BaseClientManagedTimeIT 
{
             assertEquals("KV2", rs.getString(2));
             assertFalse(rs.next());
             
-            // Verify now that the data was correctly added to the mutable 
index too.
+            // Verify now that the data was correctly added to the immutable 
index too.
             stmt = conn.prepareStatement("SELECT KV2 FROM " + tableName + " 
WHERE PK2 = ? AND KV1 = ?");
             stmt.setDate(1, upsertedDate);
             stmt.setString(2, "KV1");
@@ -944,6 +955,38 @@ public class UpsertValuesIT extends 
BaseClientManagedTimeIT {
         }
     }
     
+    public void testColumnQualifierForUpsertedValues() throws Exception {
+        String schemaName = "A";
+        String tableName = "TEST";
+        String fullTableName = SchemaUtil.getTableName(schemaName, tableName);
+        String ddl = "create table " + fullTableName 
+                + " (" 
+                + " K varchar primary key,"
+                + " CF1.V1 varchar, CF2.V2 VARCHAR, CF2.V3 VARCHAR)";
+        try (Connection conn = getConnection(nextTimestamp())) {
+            conn.createStatement().execute(ddl);
+        }
+        String dml = "UPSERT INTO " + fullTableName + " VALUES (?, ?, ?, ?)";
+        try (Connection conn = getConnection(nextTimestamp())) {
+            PreparedStatement stmt = conn.prepareStatement(dml);
+            stmt.setString(1, "KEY1");
+            stmt.setString(2, "VALUE1");
+            stmt.setString(3, "VALUE2");
+            stmt.setString(4, "VALUE3");
+            stmt.executeUpdate();
+            conn.commit();
+        }
+        // Issue a raw hbase scan and assert that key values have the expected 
column qualifiers.
+        try (Connection conn = getConnection(nextTimestamp())) {
+            HTableInterface table = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(Bytes.toBytes(fullTableName));
+            ResultScanner scanner = table.getScanner(new Scan());
+            Result next = scanner.next();
+            assertTrue(next.containsColumn(Bytes.toBytes("CF1"), 
PInteger.INSTANCE.toBytes(1)));
+            assertTrue(next.containsColumn(Bytes.toBytes("CF2"), 
PInteger.INSTANCE.toBytes(2)));
+            assertTrue(next.containsColumn(Bytes.toBytes("CF2"), 
PInteger.INSTANCE.toBytes(3)));
+        }
+    }
+    
     private static Connection getConnection(long ts) throws SQLException {
         Properties props = PropertiesUtil.deepCopy(TestUtil.TEST_PROPERTIES);
         props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts));

http://git-wip-us.apache.org/repos/asf/phoenix/blob/23a87989/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/DropMetadataIT.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/DropMetadataIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/DropMetadataIT.java
index 4e7d06a..5d36a2f 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/DropMetadataIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/DropMetadataIT.java
@@ -43,11 +43,13 @@ import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.schema.ColumnNotFoundException;
+import org.apache.phoenix.schema.PColumn;
 import org.apache.phoenix.schema.PName;
 import org.apache.phoenix.schema.PNameFactory;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTableKey;
 import org.apache.phoenix.schema.TableNotFoundException;
+import org.apache.phoenix.util.EncodedColumnsUtil;
 import org.apache.phoenix.util.IndexUtil;
 import org.apache.phoenix.util.PropertiesUtil;
 import org.apache.phoenix.util.QueryUtil;
@@ -175,6 +177,7 @@ public class DropMetadataIT extends ParallelStatsDisabledIT 
{
             
             // verify that the local index physical table was *not* dropped
             
conn.unwrap(PhoenixConnection.class).getQueryServices().getTableDescriptor(localIndexTablePhysicalName.getBytes());
+            PTable localIndex2 = 
conn.unwrap(PhoenixConnection.class).getTable(new PTableKey(null, 
localIndexTableName2));
             
             // there should be a single row belonging to localIndexTableName2 
             Scan scan = new Scan();
@@ -184,7 +187,7 @@ public class DropMetadataIT extends ParallelStatsDisabledIT 
{
             Result result = results.next();
             assertNotNull(result);
             assertNotNull("localIndexTableName2 row is missing", 
result.getValue(QueryConstants.DEFAULT_LOCAL_INDEX_COLUMN_FAMILY_BYTES, 
-                
IndexUtil.getIndexColumnName(QueryConstants.DEFAULT_COLUMN_FAMILY, 
"V1").getBytes()));
+                
EncodedColumnsUtil.getColumnQualifier(localIndex2.getPColumnForColumnName(IndexUtil.getIndexColumnName(QueryConstants.DEFAULT_COLUMN_FAMILY,
 "V1")), localIndex2)));
             assertNull(results.next());
         }
     }
@@ -295,9 +298,11 @@ public class DropMetadataIT extends 
ParallelStatsDisabledIT {
             ResultScanner results = table.getScanner(scan);
             Result result = results.next();
             assertNotNull(result);
-            // there should be a single row belonging to " + viewIndex2 + " 
-            assertNotNull( viewIndex2 + " row is missing", 
result.getValue(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, 
-                
IndexUtil.getIndexColumnName(QueryConstants.DEFAULT_COLUMN_FAMILY, 
"V4").getBytes()));
+            PTable viewIndexPTable = pconn.getTable(new 
PTableKey(pconn.getTenantId(), "VIEWINDEX2"));
+            PColumn column = 
viewIndexPTable.getPColumnForColumnName(IndexUtil.getIndexColumnName(QueryConstants.DEFAULT_COLUMN_FAMILY,
 "V4"));
+            byte[] cq = EncodedColumnsUtil.getColumnQualifier(column, 
viewIndexPTable);
+            // there should be a single row belonging to VIEWINDEX2 
+            assertNotNull(viewIndex2 + " row is missing", 
result.getValue(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, cq));
             assertNull(results.next());
         }
     }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/23a87989/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexExpressionIT.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexExpressionIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexExpressionIT.java
index 50548bd..7cca633 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexExpressionIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexExpressionIT.java
@@ -870,8 +870,10 @@ public class IndexExpressionIT extends 
ParallelStatsDisabledIT {
                conn.setAutoCommit(false);
        
                // make sure that the tables are empty, but reachable
-               conn.createStatement().execute(
-                 "CREATE TABLE " + dataTableName + " (k VARCHAR NOT NULL 
PRIMARY KEY, v1 VARCHAR, v2 VARCHAR)");
+            conn.createStatement().execute(
+                "CREATE TABLE " + dataTableName
+                        + " (k VARCHAR NOT NULL PRIMARY KEY, v1 VARCHAR, v2 
VARCHAR)"
+                        + (!mutable ? " IMMUTABLE_ROWS=true" : ""));
                query = "SELECT * FROM " + dataTableName ;
                rs = conn.createStatement().executeQuery(query);
                assertFalse(rs.next());
@@ -1244,7 +1246,16 @@ public class IndexExpressionIT extends 
ParallelStatsDisabledIT {
     }
     
     @Test
-    public void testViewUsesTableIndex() throws Exception {
+    public void testViewUsesMutableTableIndex() throws Exception {
+        helpTestViewUsesTableIndex(false);
+    }
+    
+    @Test
+    public void testViewUsesImmutableTableIndex() throws Exception {
+        helpTestViewUsesTableIndex(true);
+    }
+    
+    private void helpTestViewUsesTableIndex(boolean immutable) throws 
Exception {
         Connection conn = DriverManager.getConnection(getUrl());
         try 
         {
@@ -1253,7 +1264,7 @@ public class IndexExpressionIT extends 
ParallelStatsDisabledIT {
             String viewName = generateUniqueName();
             String indexName2 = generateUniqueName();
                ResultSet rs;
-               String ddl = "CREATE TABLE " + dataTableName + " (k1 INTEGER 
NOT NULL, k2 INTEGER NOT NULL, s1 VARCHAR, s2 VARCHAR, s3 VARCHAR, s4 VARCHAR 
CONSTRAINT pk PRIMARY KEY (k1, k2))";
+               String ddl = "CREATE TABLE " + dataTableName + " (k1 INTEGER 
NOT NULL, k2 INTEGER NOT NULL, s1 VARCHAR, s2 VARCHAR, s3 VARCHAR, s4 VARCHAR 
CONSTRAINT pk PRIMARY KEY (k1, k2)) " + (immutable ? "IMMUTABLE_ROWS = true" : 
"");
                conn.createStatement().execute(ddl);
                conn.createStatement().execute("CREATE INDEX " + indexName1 + " 
ON " + dataTableName + "(k2, s2, s3, s1)");
                conn.createStatement().execute("CREATE INDEX " + indexName2 + " 
ON " + dataTableName + "(k2, s2||'_'||s3, s1, s4)");
@@ -1350,7 +1361,7 @@ public class IndexExpressionIT extends 
ParallelStatsDisabledIT {
                try {
                        conn.createStatement().execute(
                                        "CREATE TABLE " + dataTableName + " (k 
VARCHAR NOT NULL PRIMARY KEY, v VARCHAR) "
-                                                       + (mutable ? 
"IMMUTABLE_ROWS=true" : ""));
+                                                       + (!mutable ? 
"IMMUTABLE_ROWS=true" : ""));
                        String query = "SELECT * FROM  " + dataTableName;
                        ResultSet rs = 
conn.createStatement().executeQuery(query);
                        assertFalse(rs.next());

http://git-wip-us.apache.org/repos/asf/phoenix/blob/23a87989/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexIT.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexIT.java
index cb4310b..521a317 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexIT.java
@@ -807,23 +807,23 @@ public class IndexIT extends ParallelStatsDisabledIT {
             stmt.execute();
             conn.commit();
 
-            // make sure the index is working as expected
-            query = "SELECT * FROM " + fullIndexName;
+            query = "SELECT /*+ NO_INDEX */ * FROM " + testTable;
             rs = conn.createStatement().executeQuery(query);
             assertTrue(rs.next());
-            assertEquals("x", rs.getString(1));
-            assertEquals("1", rs.getString(2));
-            assertEquals("a", rs.getString(3));
+            assertEquals("a", rs.getString(1));
+            assertEquals("x", rs.getString(2));
+            assertEquals("1", rs.getString(3));
             assertTrue(rs.next());
-            assertEquals("y", rs.getString(1));
-            assertEquals("2", rs.getString(2));
-            assertEquals("b", rs.getString(3));
+            assertEquals("b", rs.getString(1));
+            assertEquals("y", rs.getString(2));
+            assertEquals("2", rs.getString(3));
             assertTrue(rs.next());
-            assertEquals("z", rs.getString(1));
-            assertEquals("3", rs.getString(2));
-            assertEquals("c", rs.getString(3));
+            assertEquals("c", rs.getString(1));
+            assertEquals("z", rs.getString(2));
+            assertEquals("3", rs.getString(3));
             assertFalse(rs.next());
-
+            
+            // make sure the index is working as expected
             query = "SELECT * FROM " + testTable;
             rs = conn.createStatement().executeQuery("EXPLAIN " + query);
             if (localIndex) {
@@ -896,7 +896,7 @@ public class IndexIT extends ParallelStatsDisabledIT {
             } else {
                 assertEquals("CLIENT PARALLEL 1-WAY RANGE SCAN OVER " + 
fullIndexName + " ['1']", QueryUtil.getExplainPlan(rs));
             }
-
+            
             rs = conn.createStatement().executeQuery(query);
             assertTrue(rs.next());
             assertEquals("a",rs.getString(1));

http://git-wip-us.apache.org/repos/asf/phoenix/blob/23a87989/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexTestUtil.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexTestUtil.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexTestUtil.java
index ba04ad7..a2888cf 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexTestUtil.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexTestUtil.java
@@ -39,15 +39,15 @@ import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.hbase.index.util.KeyValueBuilder;
-import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.schema.ColumnNotFoundException;
 import org.apache.phoenix.schema.PColumn;
 import org.apache.phoenix.schema.PColumnFamily;
-import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.schema.PRow;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.RowKeySchema;
 import org.apache.phoenix.schema.SortOrder;
+import org.apache.phoenix.schema.types.PDataType;
+import org.apache.phoenix.util.EncodedColumnsUtil;
 import org.apache.phoenix.util.IndexUtil;
 import org.apache.phoenix.util.MetaDataUtil;
 import org.apache.phoenix.util.SchemaUtil;
@@ -115,7 +115,7 @@ public class IndexTestUtil {
         while ((hasValue = dataRowKeySchema.next(ptr, i, maxOffset)) != null) {
             if (hasValue) {
                 PColumn dataColumn = dataPKColumns.get(i);
-                PColumn indexColumn = 
indexTable.getColumn(IndexUtil.getIndexColumnName(dataColumn));
+                PColumn indexColumn = 
indexTable.getPColumnForColumnName(IndexUtil.getIndexColumnName(dataColumn));
                 coerceDataValueToIndexValue(dataColumn, indexColumn, ptr);
                 indexValues[indexColumn.getPosition()-indexOffset] = 
ptr.copyBytes();
             }
@@ -135,10 +135,11 @@ public class IndexTestUtil {
                     for (Cell kv : entry.getValue()) {
                         @SuppressWarnings("deprecation")
                         byte[] cq = kv.getQualifier();
-                        if (Bytes.compareTo(QueryConstants.EMPTY_COLUMN_BYTES, 
cq) != 0) {
+                        byte[] emptyKVQualifier = 
EncodedColumnsUtil.getEmptyKeyValueInfo(dataTable).getFirst();
+                        if (Bytes.compareTo(emptyKVQualifier, cq) != 0) {
                             try {
-                                PColumn dataColumn = family.getColumn(cq);
-                                PColumn indexColumn = 
indexTable.getColumn(IndexUtil.getIndexColumnName(family.getName().getString(), 
dataColumn.getName().getString()));
+                                PColumn dataColumn = 
family.getPColumnForColumnQualifier(cq);
+                                PColumn indexColumn = 
indexTable.getPColumnForColumnName(IndexUtil.getIndexColumnName(family.getName().getString(),
 dataColumn.getName().getString()));
                                 
ptr.set(kv.getValueArray(),kv.getValueOffset(),kv.getValueLength());
                                 coerceDataValueToIndexValue(dataColumn, 
indexColumn, ptr);
                                 
indexValues[indexPKColumns.indexOf(indexColumn)-indexOffset] = ptr.copyBytes();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/23a87989/phoenix-core/src/main/java/org/apache/phoenix/compile/CreateTableCompiler.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/CreateTableCompiler.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/CreateTableCompiler.java
index 3928f66..70b04d5 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/CreateTableCompiler.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/CreateTableCompiler.java
@@ -300,7 +300,7 @@ public class CreateTableCompiler {
         @Override
         public Boolean visit(KeyValueColumnExpression node) {
             try {
-                this.position = 
table.getColumnFamily(node.getColumnFamily()).getColumn(node.getColumnName()).getPosition();
+                this.position = 
table.getColumnFamily(node.getColumnFamily()).getPColumnForColumnQualifier(node.getColumnQualifier()).getPosition();
             } catch (SQLException e) {
                 throw new RuntimeException(e); // Impossible
             }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/23a87989/phoenix-core/src/main/java/org/apache/phoenix/compile/ExpressionCompiler.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/ExpressionCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/ExpressionCompiler.java
index c05918b..bce94c2 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/ExpressionCompiler.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/ExpressionCompiler.java
@@ -117,12 +117,26 @@ import org.apache.phoenix.schema.PColumn;
 import org.apache.phoenix.schema.PDatum;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTable.IndexType;
+import org.apache.phoenix.schema.PTable.StorageScheme;
 import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.RowKeyValueAccessor;
 import org.apache.phoenix.schema.SortOrder;
 import org.apache.phoenix.schema.TableRef;
 import org.apache.phoenix.schema.TypeMismatchException;
-import org.apache.phoenix.schema.types.*;
+import org.apache.phoenix.schema.types.PArrayDataType;
+import org.apache.phoenix.schema.types.PBoolean;
+import org.apache.phoenix.schema.types.PChar;
+import org.apache.phoenix.schema.types.PDataType;
+import org.apache.phoenix.schema.types.PDate;
+import org.apache.phoenix.schema.types.PDecimal;
+import org.apache.phoenix.schema.types.PDouble;
+import org.apache.phoenix.schema.types.PLong;
+import org.apache.phoenix.schema.types.PTimestamp;
+import org.apache.phoenix.schema.types.PUnsignedTimestamp;
+import org.apache.phoenix.schema.types.PVarbinary;
+import org.apache.phoenix.schema.types.PVarchar;
+import org.apache.phoenix.schema.types.PhoenixArray;
+import org.apache.phoenix.util.EncodedColumnsUtil;
 import org.apache.phoenix.util.ExpressionUtil;
 import org.apache.phoenix.util.IndexUtil;
 import org.apache.phoenix.util.SchemaUtil;
@@ -403,7 +417,7 @@ public class ExpressionCompiler extends 
UnsupportedAllParseNodeVisitor<Expressio
             return 
LiteralExpression.newConstant(column.getDataType().toObject(ptr), 
column.getDataType());
         }
         if (tableRef.equals(context.getCurrentTable()) && 
!SchemaUtil.isPKColumn(column)) { // project only kv columns
-            addColumn(column);
+            EncodedColumnsUtil.setColumns(column, tableRef.getTable(), 
context.getScan());
         }
         Expression expression = 
ref.newColumnExpression(node.isTableNameCaseSensitive(), 
node.isCaseSensitive());
         Expression wrappedExpression = wrapGroupByExpression(expression);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/23a87989/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java
index 8e4d9aa..0e6ffb6 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java
@@ -71,6 +71,7 @@ import org.apache.phoenix.schema.PName;
 import org.apache.phoenix.schema.PNameFactory;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTable.IndexType;
+import org.apache.phoenix.schema.PTable.StorageScheme;
 import org.apache.phoenix.schema.PTableImpl;
 import org.apache.phoenix.schema.PTableKey;
 import org.apache.phoenix.schema.PTableType;
@@ -125,10 +126,12 @@ public class FromCompiler {
             throw new ColumnNotFoundException(schemaName, tableName, null, 
colName);
         }
         
+        @Override
         public PFunction resolveFunction(String functionName) throws 
SQLException {
             throw new FunctionNotFoundException(functionName);
         }
 
+        @Override
         public boolean hasUDFs() {
             return false;
         }
@@ -185,7 +188,8 @@ public class FromCompiler {
                     if (htable != null) Closeables.closeQuietly(htable);
                 }
                 tableNode = NamedTableNode.create(null, baseTable, 
statement.getColumnDefs());
-                return new SingleTableColumnResolver(connection, tableNode, 
e.getTimeStamp(), new HashMap<String, UDFParseNode>(1), false);
+                //TODO: samarth confirm if passing false here is the right 
thing to do. I think it is since it is a mapped view, but still.
+                return new SingleTableColumnResolver(connection, tableNode, 
e.getTimeStamp(), new HashMap<String, UDFParseNode>(1), false, false);
             }
             throw e;
         }
@@ -257,7 +261,7 @@ public class FromCompiler {
             Expression sourceExpression = 
projector.getColumnProjector(column.getPosition()).getExpression();
             PColumnImpl projectedColumn = new PColumnImpl(column.getName(), 
column.getFamilyName(),
                     sourceExpression.getDataType(), 
sourceExpression.getMaxLength(), sourceExpression.getScale(), 
sourceExpression.isNullable(),
-                    column.getPosition(), sourceExpression.getSortOrder(), 
column.getArraySize(), column.getViewConstant(), column.isViewReferenced(), 
column.getExpressionStr(), column.isRowTimestamp(), column.isDynamic());
+                    column.getPosition(), sourceExpression.getSortOrder(), 
column.getArraySize(), column.getViewConstant(), column.isViewReferenced(), 
column.getExpressionStr(), column.isRowTimestamp(), column.isDynamic(), 
column.getEncodedColumnQualifier());
             projectedColumns.add(projectedColumn);
         }
         PTable t = PTableImpl.makePTable(table, projectedColumns);
@@ -332,26 +336,27 @@ public class FromCompiler {
        private final String alias;
         private final List<PSchema> schemas;
 
-       public SingleTableColumnResolver(PhoenixConnection connection, 
NamedTableNode table, long timeStamp, Map<String, UDFParseNode> udfParseNodes, 
boolean isNamespaceMapped) throws SQLException  {
-           super(connection, 0, false, udfParseNodes);
-           List<PColumnFamily> families = 
Lists.newArrayListWithExpectedSize(table.getDynamicColumns().size());
-           for (ColumnDef def : table.getDynamicColumns()) {
-               if (def.getColumnDefName().getFamilyName() != null) {
-                   families.add(new 
PColumnFamilyImpl(PNameFactory.newName(def.getColumnDefName().getFamilyName()),Collections.<PColumn>emptyList()));
-               }
+        public SingleTableColumnResolver(PhoenixConnection connection, 
NamedTableNode table, long timeStamp, Map<String, UDFParseNode> udfParseNodes, 
boolean isNamespaceMapped, boolean useEncodedColumnNames) throws SQLException {
+            super(connection, 0, false, udfParseNodes);
+            List<PColumnFamily> families = 
Lists.newArrayListWithExpectedSize(table.getDynamicColumns().size());
+            for (ColumnDef def : table.getDynamicColumns()) {
+                if (def.getColumnDefName().getFamilyName() != null) {
+                    families.add(new 
PColumnFamilyImpl(PNameFactory.newName(def.getColumnDefName().getFamilyName()),Collections.<PColumn>emptyList(),
 useEncodedColumnNames));
+                }
             }
             Long scn = connection.getSCN();
             String schema = table.getName().getSchemaName();
             if (connection.getSchema() != null) {
                 schema = schema != null ? schema : connection.getSchema();
             }
-           PTable theTable = new PTableImpl(connection.getTenantId(), schema, 
table.getName().getTableName(),
+            //TODO: samarth should we change the ptableimpl constructor here 
to set non-encoded column name scheme and null counter
+            PTable theTable = new PTableImpl(connection.getTenantId(), schema, 
table.getName().getTableName(),
                     scn == null ? HConstants.LATEST_TIMESTAMP : scn, families, 
isNamespaceMapped);
-           theTable = this.addDynamicColumns(table.getDynamicColumns(), 
theTable);
-           alias = null;
-           tableRefs = ImmutableList.of(new TableRef(alias, theTable, 
timeStamp, !table.getDynamicColumns().isEmpty()));
-           schemas = ImmutableList.of(new 
PSchema(theTable.getSchemaName().toString(), timeStamp));
-       }
+            theTable = this.addDynamicColumns(table.getDynamicColumns(), 
theTable);
+            alias = null;
+            tableRefs = ImmutableList.of(new TableRef(alias, theTable, 
timeStamp, !table.getDynamicColumns().isEmpty()));
+            schemas = ImmutableList.of(new 
PSchema(theTable.getSchemaName().toString(), timeStamp));
+        }
 
         public SingleTableColumnResolver(PhoenixConnection connection, 
NamedTableNode tableNode, boolean updateCacheImmediately) throws SQLException {
             this(connection, tableNode, updateCacheImmediately, 0, new 
HashMap<String,UDFParseNode>(1));
@@ -447,8 +452,8 @@ public class FromCompiler {
 
                        }
                PColumn column = resolveCF
-                       ? 
tableRef.getTable().getColumnFamily(tableName).getColumn(colName)
-                               : tableRef.getTable().getColumn(colName);
+                       ? 
tableRef.getTable().getColumnFamily(tableName).getPColumnForColumnName(colName)
+                               : 
tableRef.getTable().getPColumnForColumnName(colName);
             return new ColumnRef(tableRef, column.getPosition());
                }
 
@@ -671,8 +676,9 @@ public class FromCompiler {
                         theTable.getColumnFamily(family); // Verifies that 
column family exists
                         familyName = PNameFactory.newName(family);
                     }
+                    // Dynamic columns don't have an encoded column name
                     allcolumns.add(new PColumnImpl(name, familyName, 
dynColumn.getDataType(), dynColumn.getMaxLength(),
-                            dynColumn.getScale(), dynColumn.isNull(), 
position, dynColumn.getSortOrder(), dynColumn.getArraySize(), null, false, 
dynColumn.getExpression(), false, true));
+                            dynColumn.getScale(), dynColumn.isNull(), 
position, dynColumn.getSortOrder(), dynColumn.getArraySize(), null, false, 
dynColumn.getExpression(), false, true, null));
                     position++;
                 }
                 theTable = PTableImpl.makePTable(theTable, allcolumns);
@@ -774,16 +780,18 @@ public class FromCompiler {
                     // referenced by an outer wild-card select.
                     alias = String.valueOf(position);
                 }
+                //TODO: samarth confirm this is the right change i.e. using 
null for column qualifier
                 PColumnImpl column = new 
PColumnImpl(PNameFactory.newName(alias),
                         
PNameFactory.newName(QueryConstants.DEFAULT_COLUMN_FAMILY),
-                        null, 0, 0, true, position++, SortOrder.ASC, null, 
null, false, null, false, false);
+                        null, 0, 0, true, position++, SortOrder.ASC, null, 
null, false, null, false, false, null);
                 columns.add(column);
             }
+            //TODO: samarth confirm if the storage scheme and encode 
cqcounters should be passed as null here. Could it be non-encoded column names?
             PTable t = PTableImpl.makePTable(null, PName.EMPTY_NAME, 
PName.EMPTY_NAME, PTableType.SUBQUERY, null,
                     MetaDataProtocol.MIN_TABLE_TIMESTAMP, 
PTable.INITIAL_SEQ_NUM, null, null, columns, null, null,
                     Collections.<PTable> emptyList(), false, 
Collections.<PName> emptyList(), null, null, false, false,
                     false, null, null, null, false, false, 0, 0L, SchemaUtil
-                            .isNamespaceMappingEnabled(PTableType.SUBQUERY, 
connection.getQueryServices().getProps()), null, false);
+                            .isNamespaceMappingEnabled(PTableType.SUBQUERY, 
connection.getQueryServices().getProps()), null, false, 
StorageScheme.NON_ENCODED_COLUMN_NAMES, PTable.EncodedCQCounter.NULL_COUNTER);
 
             String alias = subselectNode.getAlias();
             TableRef tableRef = new TableRef(alias, t, 
MetaDataProtocol.MIN_TABLE_TIMESTAMP, false);
@@ -858,7 +866,7 @@ public class FromCompiler {
                 while (iterator.hasNext()) {
                     TableRef tableRef = iterator.next();
                     try {
-                        PColumn column = 
tableRef.getTable().getColumn(colName);
+                        PColumn column = 
tableRef.getTable().getPColumnForColumnName(colName);
                         if (theTableRef != null) { throw new 
AmbiguousColumnException(colName); }
                         theTableRef = tableRef;
                         theColumnPosition = column.getPosition();
@@ -871,12 +879,12 @@ public class FromCompiler {
             } else {
                 try {
                     TableRef tableRef = resolveTable(schemaName, tableName);
-                    PColumn column = tableRef.getTable().getColumn(colName);
+                    PColumn column = 
tableRef.getTable().getPColumnForColumnName(colName);
                     return new ColumnRef(tableRef, column.getPosition());
                 } catch (TableNotFoundException e) {
                     // Try using the tableName as a columnFamily reference 
instead
                     ColumnFamilyRef cfRef = resolveColumnFamily(schemaName, 
tableName);
-                    PColumn column = cfRef.getFamily().getColumn(colName);
+                    PColumn column = 
cfRef.getFamily().getPColumnForColumnName(colName);
                     return new ColumnRef(cfRef.getTableRef(), 
column.getPosition());
                 }
             }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/23a87989/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java
index e8c05ca..489b993 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java
@@ -76,6 +76,7 @@ import org.apache.phoenix.schema.PName;
 import org.apache.phoenix.schema.PNameFactory;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTable.IndexType;
+import org.apache.phoenix.schema.PTable.StorageScheme;
 import org.apache.phoenix.schema.PTableImpl;
 import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.ProjectedColumn;
@@ -93,6 +94,7 @@ import org.apache.phoenix.schema.types.PTimestamp;
 import org.apache.phoenix.schema.types.PTinyint;
 import org.apache.phoenix.schema.types.PVarbinary;
 import org.apache.phoenix.schema.types.PVarchar;
+import org.apache.phoenix.util.EncodedColumnsUtil;
 import org.apache.phoenix.util.IndexUtil;
 import org.apache.phoenix.util.SchemaUtil;
 
@@ -714,7 +716,7 @@ public class JoinCompiler {
                 if (columnRef.getTableRef().equals(tableRef)
                         && !SchemaUtil.isPKColumn(columnRef.getColumn())
                         && !(columnRef instanceof LocalIndexColumnRef)) {
-                    
scan.addColumn(columnRef.getColumn().getFamilyName().getBytes(), 
columnRef.getColumn().getName().getBytes());
+                       EncodedColumnsUtil.setColumns(columnRef.getColumn(), 
tableRef.getTable(), scan);
                 }
             }
         }
@@ -1301,7 +1303,7 @@ public class JoinCompiler {
         if (left.getBucketNum() != null) {
             merged.remove(0);
         }
-        
+        //TODO: samarth should projected join table should always have 
non-encoded column names? Is this where we also decide that once we start 
supporting joins then have the storage scheme right.
         return PTableImpl.makePTable(left.getTenantId(), left.getSchemaName(),
                 
PNameFactory.newName(SchemaUtil.getTableName(left.getName().getString(), 
right.getName().getString())),
                 left.getType(), left.getIndexState(), left.getTimeStamp(), 
left.getSequenceNumber(), left.getPKName(),
@@ -1310,7 +1312,7 @@ public class JoinCompiler {
                 left.isMultiTenant(), left.getStoreNulls(), 
left.getViewType(), left.getViewIndexId(),
                 left.getIndexType(), left.rowKeyOrderOptimizable(), 
left.isTransactional(),
                 left.getUpdateCacheFrequency(), 
left.getIndexDisableTimestamp(), left.isNamespaceMapped(), 
-                left.getAutoPartitionSeqName(), left.isAppendOnlySchema());
+                left.getAutoPartitionSeqName(), left.isAppendOnlySchema(), 
StorageScheme.NON_ENCODED_COLUMN_NAMES, PTable.EncodedCQCounter.NULL_COUNTER);
     }
 
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/23a87989/phoenix-core/src/main/java/org/apache/phoenix/compile/ListJarsQueryPlan.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/ListJarsQueryPlan.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/ListJarsQueryPlan.java
index 2df0671..d7aa13f 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/ListJarsQueryPlan.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/ListJarsQueryPlan.java
@@ -80,7 +80,7 @@ public class ListJarsQueryPlan implements QueryPlan {
         PColumn column =
                 new PColumnImpl(PNameFactory.newName("jar_location"), null,
                         PVarchar.INSTANCE, null, null, false, 0, 
SortOrder.getDefault(), 0, null,
-                        false, null, false, false);
+                        false, null, false, false, null);
         List<PColumn> columns = new ArrayList<PColumn>();
         columns.add(column);
         Expression expression =
@@ -165,6 +165,7 @@ public class ListJarsQueryPlan implements QueryPlan {
                                 Type.Put.getCode(), 
HConstants.EMPTY_BYTE_ARRAY);
                     List<Cell> cells = new ArrayList<Cell>(1);
                     cells.add(cell);
+                    //TODO: samarth confirm if passing false is the right 
thing to do here.
                     return new ResultTuple(Result.create(cells));
                 } catch (IOException e) {
                     throw new SQLException(e);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/23a87989/phoenix-core/src/main/java/org/apache/phoenix/compile/PostDDLCompiler.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/PostDDLCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/PostDDLCompiler.java
index e43b596..45225a8 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/PostDDLCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/PostDDLCompiler.java
@@ -50,6 +50,7 @@ import org.apache.phoenix.schema.TableNotFoundException;
 import org.apache.phoenix.schema.TableRef;
 import org.apache.phoenix.schema.tuple.Tuple;
 import org.apache.phoenix.schema.types.PLong;
+import org.apache.phoenix.util.EncodedColumnsUtil;
 import org.apache.phoenix.util.ScanUtil;
 import org.apache.phoenix.util.TransactionUtil;
 
@@ -176,8 +177,8 @@ public class PostDDLCompiler {
                             @Override
                             public ColumnRef resolveColumn(String schemaName, 
String tableName, String colName) throws SQLException {
                                 PColumn column = tableName != null
-                                        ? 
tableRef.getTable().getColumnFamily(tableName).getColumn(colName)
-                                        : 
tableRef.getTable().getColumn(colName);
+                                        ? 
tableRef.getTable().getColumnFamily(tableName).getPColumnForColumnName(colName)
+                                        : 
tableRef.getTable().getPColumnForColumnName(colName);
                                 return new ColumnRef(tableRef, 
column.getPosition());
                             }
                             
@@ -213,6 +214,7 @@ public class PostDDLCompiler {
                         ScanUtil.setTimeRange(scan, ts);
                         if (emptyCF != null) {
                             
scan.setAttribute(BaseScannerRegionObserver.EMPTY_CF, emptyCF);
+                            
scan.setAttribute(BaseScannerRegionObserver.EMPTY_COLUMN_QUALIFIER, 
EncodedColumnsUtil.getEmptyKeyValueInfo(tableRef.getTable()).getFirst());
                         }
                         ServerCache cache = null;
                         try {
@@ -236,11 +238,12 @@ public class PostDDLCompiler {
                                     // data empty column family to stay the 
same, while the index empty column family
                                     // changes.
                                     PColumn column = deleteList.get(0);
+                                    byte[] cq = 
EncodedColumnsUtil.getColumnQualifier(column, tableRef.getTable());
                                     if (emptyCF == null) {
-                                        
scan.addColumn(column.getFamilyName().getBytes(), column.getName().getBytes());
+                                        
scan.addColumn(column.getFamilyName().getBytes(), cq);
                                     }
                                     
scan.setAttribute(BaseScannerRegionObserver.DELETE_CF, 
column.getFamilyName().getBytes());
-                                    
scan.setAttribute(BaseScannerRegionObserver.DELETE_CQ, 
column.getName().getBytes());
+                                    
scan.setAttribute(BaseScannerRegionObserver.DELETE_CQ, cq);
                                 }
                             }
                             List<byte[]> columnFamilies = 
Lists.newArrayListWithExpectedSize(tableRef.getTable().getColumnFamilies().size());

http://git-wip-us.apache.org/repos/asf/phoenix/blob/23a87989/phoenix-core/src/main/java/org/apache/phoenix/compile/ProjectionCompiler.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/ProjectionCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/ProjectionCompiler.java
index 99a9731..25aee35 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/ProjectionCompiler.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/ProjectionCompiler.java
@@ -88,6 +88,7 @@ import org.apache.phoenix.schema.ValueBitSet;
 import org.apache.phoenix.schema.tuple.Tuple;
 import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.util.ByteUtil;
+import org.apache.phoenix.util.EncodedColumnsUtil;
 import org.apache.phoenix.util.IndexUtil;
 import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.SizedUtil;
@@ -217,7 +218,7 @@ public class ProjectionCompiler {
             PColumn indexColumn = null;
             ColumnRef ref = null;
             try {
-                indexColumn = index.getColumn(indexColName);
+                indexColumn = index.getPColumnForColumnName(indexColName);
                 ref = new ColumnRef(tableRef, indexColumn.getPosition());
             } catch (ColumnNotFoundException e) {
                 if (index.getIndexType() == IndexType.LOCAL) {
@@ -289,7 +290,7 @@ public class ProjectionCompiler {
             ColumnRef ref = null;
             String indexColumnFamily = null;
             try {
-                indexColumn = index.getColumn(indexColName);
+                indexColumn = index.getPColumnForColumnName(indexColName);
                 ref = new ColumnRef(tableRef, indexColumn.getPosition());
                 indexColumnFamily = indexColumn.getFamilyName() == null ? null 
: indexColumn.getFamilyName().getString();
             } catch (ColumnNotFoundException e) {
@@ -484,7 +485,7 @@ public class ProjectionCompiler {
                 }
             } else {
                 for (byte[] cq : entry.getValue()) {
-                    PColumn column = family.getColumn(cq);
+                    PColumn column = family.getPColumnForColumnQualifier(cq);
                     Integer maxLength = column.getMaxLength();
                     int byteSize = column.getDataType().isFixedWidth() ? 
maxLength == null ? column.getDataType().getByteSize() : maxLength : 
RowKeySchema.ESTIMATED_VARIABLE_LENGTH_SIZE;
                     estimatedByteSize += SizedUtil.KEY_VALUE_SIZE + 
estimatedKeySize + byteSize;
@@ -663,7 +664,8 @@ public class ProjectionCompiler {
                      public Void visit(ProjectedColumnExpression expression) {
                          if (expression.getDataType().isArrayType()) {
                              indexProjectedColumns.add(expression);
-                             KeyValueColumnExpression keyValueColumnExpression 
= new KeyValueColumnExpression(expression.getColumn());
+                             PColumn col = expression.getColumn();
+                             KeyValueColumnExpression keyValueColumnExpression 
= new KeyValueColumnExpression(col, 
EncodedColumnsUtil.hasEncodedColumnName(col));
                              indexKVs.add(keyValueColumnExpression);
                              copyOfChildren.set(0, keyValueColumnExpression);
                              Integer count = 
arrayExpressionCounts.get(expression);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/23a87989/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
index 2258f28..5126c8b 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
@@ -94,7 +94,7 @@ public class QueryCompiler {
      */
     private static final String LOAD_COLUMN_FAMILIES_ON_DEMAND_ATTR = 
"_ondemand_";
     private final PhoenixStatement statement;
-    private final Scan scan;
+       private final Scan scan;
     private final Scan originalScan;
     private final ColumnResolver resolver;
     private final SelectStatement select;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/23a87989/phoenix-core/src/main/java/org/apache/phoenix/compile/TraceQueryPlan.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/TraceQueryPlan.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/TraceQueryPlan.java
index ed5cda9..10260cb 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/TraceQueryPlan.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/TraceQueryPlan.java
@@ -78,7 +78,7 @@ public class TraceQueryPlan implements QueryPlan {
         PColumn column =
                 new 
PColumnImpl(PNameFactory.newName(MetricInfo.TRACE.columnName), null,
                         PLong.INSTANCE, null, null, false, 0, 
SortOrder.getDefault(), 0, null,
-                        false, null, false, false);
+                        false, null, false, false, null);
         List<PColumn> columns = new ArrayList<PColumn>();
         columns.add(column);
         Expression expression =

http://git-wip-us.apache.org/repos/asf/phoenix/blob/23a87989/phoenix-core/src/main/java/org/apache/phoenix/compile/TupleProjectionCompiler.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/TupleProjectionCompiler.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/TupleProjectionCompiler.java
index 4d3c0cf..9f7de36 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/TupleProjectionCompiler.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/TupleProjectionCompiler.java
@@ -17,6 +17,8 @@
  */
 package org.apache.phoenix.compile;
 
+import static org.apache.phoenix.query.QueryConstants.VALUE_COLUMN_FAMILY;
+
 import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Collections;
@@ -24,7 +26,6 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
-import org.apache.phoenix.execute.TupleProjector;
 import org.apache.phoenix.parse.AliasedNode;
 import org.apache.phoenix.parse.ColumnParseNode;
 import org.apache.phoenix.parse.FamilyWildcardParseNode;
@@ -43,11 +44,14 @@ import org.apache.phoenix.schema.PColumn;
 import org.apache.phoenix.schema.PName;
 import org.apache.phoenix.schema.PNameFactory;
 import org.apache.phoenix.schema.PTable;
+import org.apache.phoenix.schema.PTable.EncodedCQCounter;
 import org.apache.phoenix.schema.PTable.IndexType;
+import org.apache.phoenix.schema.PTable.StorageScheme;
 import org.apache.phoenix.schema.PTableImpl;
 import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.ProjectedColumn;
 import org.apache.phoenix.schema.TableRef;
+import org.apache.phoenix.util.EncodedColumnsUtil;
 import org.apache.phoenix.util.IndexUtil;
 import org.apache.phoenix.util.SchemaUtil;
 
@@ -136,7 +140,7 @@ public class TupleProjectionCompiler {
             projectedColumns.add(column);
             // Wildcard or FamilyWildcard will be handled by 
ProjectionCompiler.
             if (!isWildcard && 
!families.contains(sourceColumn.getFamilyName())) {
-                
context.getScan().addColumn(sourceColumn.getFamilyName().getBytes(), 
sourceColumn.getName().getBytes());
+               EncodedColumnsUtil.setColumns(column, table, context.getScan());
             }
         }
         // add LocalIndexDataColumnRef
@@ -154,9 +158,10 @@ public class TupleProjectionCompiler {
                 null, null, table.isWALDisabled(), table.isMultiTenant(), 
table.getStoreNulls(), table.getViewType(),
                 table.getViewIndexId(),
                 table.getIndexType(), table.rowKeyOrderOptimizable(), 
table.isTransactional(), table.getUpdateCacheFrequency(), 
-                table.getIndexDisableTimestamp(), table.isNamespaceMapped(), 
table.getAutoPartitionSeqName(), table.isAppendOnlySchema());
+                table.getIndexDisableTimestamp(), table.isNamespaceMapped(), 
table.getAutoPartitionSeqName(), table.isAppendOnlySchema(), 
table.getStorageScheme(), table.getEncodedCQCounter());
     }
-
+    
+    //TODO: samarth does it matter what the storage scheme here is for the 
projected tables?
     public static PTable createProjectedTable(TableRef tableRef, 
List<ColumnRef> sourceColumnRefs, boolean retainPKColumns) throws SQLException {
         PTable table = tableRef.getTable();
         boolean hasSaltingColumn = retainPKColumns && table.getBucketNum() != 
null;
@@ -172,17 +177,22 @@ public class TupleProjectionCompiler {
 
             PColumn column = new 
ProjectedColumn(PNameFactory.newName(aliasedName), 
                     retainPKColumns && SchemaUtil.isPKColumn(sourceColumn) ? 
-                            null : 
PNameFactory.newName(TupleProjector.VALUE_COLUMN_FAMILY), 
+                            null : PNameFactory.newName(VALUE_COLUMN_FAMILY), 
                     position++, sourceColumn.isNullable(), sourceColumnRef);
             projectedColumns.add(column);
         }
+        StorageScheme storageScheme = table.getStorageScheme();
+        EncodedCQCounter cqCounter = EncodedCQCounter.NULL_COUNTER;
+        if (storageScheme != StorageScheme.NON_ENCODED_COLUMN_NAMES) {
+            cqCounter = EncodedCQCounter.copy(table.getEncodedCQCounter());
+        }
         return PTableImpl.makePTable(table.getTenantId(), 
PROJECTED_TABLE_SCHEMA, table.getName(), PTableType.PROJECTED,
                 null, table.getTimeStamp(), table.getSequenceNumber(), 
table.getPKName(),
                 retainPKColumns ? table.getBucketNum() : null, 
projectedColumns, null, null,
                 Collections.<PTable> emptyList(), table.isImmutableRows(), 
Collections.<PName> emptyList(), null, null,
                 table.isWALDisabled(), table.isMultiTenant(), 
table.getStoreNulls(), table.getViewType(),
                 table.getViewIndexId(), null, table.rowKeyOrderOptimizable(), 
table.isTransactional(),
-                table.getUpdateCacheFrequency(), 
table.getIndexDisableTimestamp(), table.isNamespaceMapped(), 
table.getAutoPartitionSeqName(), table.isAppendOnlySchema());
+                table.getUpdateCacheFrequency(), 
table.getIndexDisableTimestamp(), table.isNamespaceMapped(), 
table.getAutoPartitionSeqName(), table.isAppendOnlySchema(), storageScheme, 
cqCounter);
     }
 
     // For extracting column references from single select statement

Reply via email to