http://git-wip-us.apache.org/repos/asf/hive/blob/7ddac02b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java
----------------------------------------------------------------------
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java
 
b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java
index 5d3ebd6..a230441 100644
--- 
a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java
@@ -1,9 +1,13 @@
 /*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  *
- * http://www.apache.org/licenses/LICENSE-2.0
+ *     http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -26,12 +30,14 @@ import 
org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.StructColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.ListColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx;
 import org.apache.hadoop.hive.ql.io.IOConstants;
 import org.apache.hadoop.hive.ql.io.parquet.read.DataWritableReadSupport;
 import org.apache.hadoop.hive.ql.io.parquet.serde.ArrayWritableObjectInspector;
+import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTime;
+import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils;
 import 
org.apache.hadoop.hive.ql.io.parquet.vector.VectorizedParquetRecordReader;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.MapWork;
@@ -43,8 +49,8 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.mapred.FileSplit;
-import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.parquet.example.data.Group;
 import org.apache.parquet.example.data.simple.SimpleGroupFactory;
@@ -54,9 +60,10 @@ import org.apache.parquet.hadoop.example.GroupReadSupport;
 import org.apache.parquet.hadoop.example.GroupWriteSupport;
 import org.apache.parquet.io.api.Binary;
 import org.apache.parquet.schema.MessageType;
+
 import java.io.IOException;
-import java.math.BigDecimal;
-import java.math.BigInteger;
+import java.sql.Timestamp;
+import java.util.Arrays;
 import java.util.List;
 
 import static junit.framework.Assert.assertTrue;
@@ -77,84 +84,84 @@ public class VectorizedColumnReaderTestBase {
   protected final static Path file = new 
Path("target/test/TestParquetVectorReader/testParquetFile");
 
   protected static final MessageType schema = parseMessageType(
-    "message hive_schema { "
-      + "required int32 int32_field; "
-      + "required int64 int64_field; "
-      + "required int96 int96_field; "
-      + "required double double_field; "
-      + "required float float_field; "
-      + "required boolean boolean_field; "
-      + "required fixed_len_byte_array(3) flba_field; "
-      + "optional fixed_len_byte_array(1) some_null_field; "
-      + "optional fixed_len_byte_array(1) all_null_field; "
-      + "required binary binary_field; "
-      + "optional binary binary_field_some_null; "
-      + "required binary value (DECIMAL(5,2)); "
-      + "required group struct_field {"
-      + "  required int32 a;\n"
-      + "  required double b;\n"
-      + "}\n"
-      + "optional group nested_struct_field {"
-      + "  optional group nsf {"
-      + "    optional int32 c;\n"
-      + "    optional int32 d;\n"
-      + "  }\n"
-      + "  optional double e;\n"
-      + "}\n"
-      + "optional group struct_field_some_null {"
-      + "  optional int32 f;\n"
-      + "  optional double g;\n"
-      + "}\n"
-      + "optional group map_field (MAP) {\n"
-      + "  repeated group map (MAP_KEY_VALUE) {\n"
-      + "    required binary key;\n"
-      + "    optional binary value;\n"
-      + "  }\n"
-      + "}\n"
-      + "optional group array_list (LIST) {\n"
-      + "  repeated group bag {\n"
-      + "    optional int32 array_element;\n"
-      + "  }\n"
-      + "}\n"
-      + "repeated int32 list_int32_field;"
-      + "repeated int64 list_int64_field;"
-      + "repeated double list_double_field;"
-      + "repeated float list_float_field;"
-      + "repeated boolean list_boolean_field;"
-      + "repeated fixed_len_byte_array(3) list_byte_array_field;"
-      + "repeated binary list_binary_field;"
-      + "repeated binary list_decimal_field (DECIMAL(5,2));"
-      + "repeated binary list_binary_field_for_repeat_test;"
-      + "repeated int32 list_int32_field_for_repeat_test;"
-      + "repeated group map_int32 (MAP_KEY_VALUE) {\n"
-      + "  required int32 key;\n"
-      + "  optional int32 value;\n"
-      + "}\n"
-      + "repeated group map_int64 (MAP_KEY_VALUE) {\n"
-      + "  required int64 key;\n"
-      + "  optional int64 value;\n"
-      + "}\n"
-      + "repeated group map_double (MAP_KEY_VALUE) {\n"
-      + "  required double key;\n"
-      + "  optional double value;\n"
-      + "}\n"
-      + "repeated group map_float (MAP_KEY_VALUE) {\n"
-      + "  required float key;\n"
-      + "  optional float value;\n"
-      + "}\n"
-      + "repeated group map_binary (MAP_KEY_VALUE) {\n"
-      + "  required binary key;\n"
-      + "  optional binary value;\n"
-      + "}\n"
-      + "repeated group map_decimal (MAP_KEY_VALUE) {\n"
-      + "  required binary key (DECIMAL(5,2));\n"
-      + "  optional binary value (DECIMAL(5,2));\n"
-      + "}\n"
-      + "repeated group map_int32_for_repeat_test (MAP_KEY_VALUE) {\n"
-      + "  required int32 key;\n"
-      + "  optional int32 value;\n"
-      + "}\n"
-      + "} ");
+      "message hive_schema { "
+          + "required int32 int32_field; "
+          + "required int64 int64_field; "
+          + "required int96 int96_field; "
+          + "required double double_field; "
+          + "required float float_field; "
+          + "required boolean boolean_field; "
+          + "required fixed_len_byte_array(3) flba_field; "
+          + "optional fixed_len_byte_array(1) some_null_field; "
+          + "optional fixed_len_byte_array(1) all_null_field; "
+          + "required binary binary_field; "
+          + "optional binary binary_field_some_null; "
+          + "required binary value (DECIMAL(5,2)); "
+          + "required group struct_field {"
+          + "  required int32 a;\n"
+          + "  required double b;\n"
+          + "}\n"
+          + "optional group nested_struct_field {"
+          + "  optional group nsf {"
+          + "    optional int32 c;\n"
+          + "    optional int32 d;\n"
+          + "  }\n"
+          + "  optional double e;\n"
+          + "}\n"
+          + "optional group struct_field_some_null {"
+          + "  optional int32 f;\n"
+          + "  optional double g;\n"
+          + "}\n"
+          + "optional group map_field (MAP) {\n"
+          + "  repeated group map (MAP_KEY_VALUE) {\n"
+          + "    required binary key;\n"
+          + "    optional binary value;\n"
+          + "  }\n"
+          + "}\n"
+          + "optional group array_list (LIST) {\n"
+          + "  repeated group bag {\n"
+          + "    optional int32 array_element;\n"
+          + "  }\n"
+          + "}\n"
+          + "repeated int32 list_int32_field;"
+          + "repeated int64 list_int64_field;"
+          + "repeated double list_double_field;"
+          + "repeated float list_float_field;"
+          + "repeated boolean list_boolean_field;"
+          + "repeated fixed_len_byte_array(3) list_byte_array_field;"
+          + "repeated binary list_binary_field;"
+          + "repeated binary list_decimal_field (DECIMAL(5,2));"
+          + "repeated binary list_binary_field_for_repeat_test;"
+          + "repeated int32 list_int32_field_for_repeat_test;"
+          + "repeated group map_int32 (MAP_KEY_VALUE) {\n"
+          + "  required int32 key;\n"
+          + "  optional int32 value;\n"
+          + "}\n"
+          + "repeated group map_int64 (MAP_KEY_VALUE) {\n"
+          + "  required int64 key;\n"
+          + "  optional int64 value;\n"
+          + "}\n"
+          + "repeated group map_double (MAP_KEY_VALUE) {\n"
+          + "  required double key;\n"
+          + "  optional double value;\n"
+          + "}\n"
+          + "repeated group map_float (MAP_KEY_VALUE) {\n"
+          + "  required float key;\n"
+          + "  optional float value;\n"
+          + "}\n"
+          + "repeated group map_binary (MAP_KEY_VALUE) {\n"
+          + "  required binary key;\n"
+          + "  optional binary value;\n"
+          + "}\n"
+          + "repeated group map_decimal (MAP_KEY_VALUE) {\n"
+          + "  required binary key (DECIMAL(5,2));\n"
+          + "  optional binary value (DECIMAL(5,2));\n"
+          + "}\n"
+          + "repeated group map_int32_for_repeat_test (MAP_KEY_VALUE) {\n"
+          + "  required int32 key;\n"
+          + "  optional int32 value;\n"
+          + "}\n"
+          + "} ");
 
   protected static void removeFile() throws IOException {
     FileSystem fs = file.getFileSystem(conf);
@@ -166,73 +173,66 @@ public class VectorizedColumnReaderTestBase {
   protected static ParquetWriter<Group> initWriterFromFile() throws 
IOException {
     GroupWriteSupport.setSchema(schema, conf);
     return new ParquetWriter<>(
-      file,
-      new GroupWriteSupport(),
-      GZIP, 1024 * 1024, 1024, 1024 * 1024,
-      true, false, PARQUET_1_0, conf);
+        file,
+        new GroupWriteSupport(),
+        GZIP, 1024 * 1024, 1024, 1024 * 1024,
+        true, false, PARQUET_1_0, conf);
   }
 
   protected static int getIntValue(
-    boolean isDictionaryEncoding,
-    int index) {
+      boolean isDictionaryEncoding,
+      int index) {
     return isDictionaryEncoding ? index % UNIQUE_NUM : index;
   }
 
   protected static double getDoubleValue(
-    boolean isDictionaryEncoding,
-    int index) {
+      boolean isDictionaryEncoding,
+      int index) {
     return isDictionaryEncoding ? index % UNIQUE_NUM : index;
   }
 
   protected static long getLongValue(
-    boolean isDictionaryEncoding,
-    int index) {
+      boolean isDictionaryEncoding,
+      int index) {
     return isDictionaryEncoding ? (long) 2 * index % UNIQUE_NUM : (long) 2 * 
index;
   }
 
   protected static float getFloatValue(
-    boolean isDictionaryEncoding,
-    int index) {
+      boolean isDictionaryEncoding,
+      int index) {
     return (float) (isDictionaryEncoding ? index % UNIQUE_NUM * 2.0 : index * 
2.0);
   }
 
   protected static boolean getBooleanValue(
-    float index) {
+      float index) {
     return (index % 2 == 0);
   }
 
-  protected static String getTimestampStr(int index) {
-    String s = String.valueOf(index);
-    int l = 4 - s.length();
-    for (int i = 0; i < l; i++) {
-      s = "0" + s;
-    }
-    return "99999999" + s;
+  protected static NanoTime getNanoTime(int index) {
+    return NanoTimeUtils.getNanoTime(new Timestamp(index), false);
   }
 
   protected static HiveDecimal getDecimal(
-    boolean isDictionaryEncoding,
-    int index) {
+      boolean isDictionaryEncoding,
+      int index) {
     int decimalVal = index % 100;
-    String decimalStr = (decimalVal < 10) ? "0" + String.valueOf(decimalVal) : 
String.valueOf
-      (decimalVal);
+    String decimalStr = (decimalVal < 10) ? "0" + String.valueOf(decimalVal) : 
String
+        .valueOf(decimalVal);
     int intVal = (isDictionaryEncoding) ? index % UNIQUE_NUM : index / 100;
-    String d = String.valueOf(intVal) + decimalStr;
-    BigInteger bi = new BigInteger(d);
-    BigDecimal bd = new BigDecimal(bi);
-    return HiveDecimal.create(bd);
+    String d = String.valueOf(intVal) + "." + decimalStr;
+    return HiveDecimal.create(d);
   }
 
   protected static Binary getTimestamp(
-    boolean isDictionaryEncoding,
-    int index) {
-    String s = isDictionaryEncoding ? getTimestampStr(index % UNIQUE_NUM) : 
getTimestampStr(index);
-    return Binary.fromReusedByteArray(s.getBytes());
+      boolean isDictionaryEncoding,
+      int index) {
+    NanoTime s = isDictionaryEncoding ? getNanoTime(index % UNIQUE_NUM) : 
getNanoTime(index);
+    return s.toBinary();
   }
 
   protected static String getStr(
-    boolean isDictionaryEncoding,
-    int index) {
+      boolean isDictionaryEncoding,
+      int index) {
     int binaryLen = isDictionaryEncoding ? index % UNIQUE_NUM : index;
     String v = "";
     while (binaryLen > 0) {
@@ -244,8 +244,8 @@ public class VectorizedColumnReaderTestBase {
   }
 
   protected static Binary getBinaryValue(
-    boolean isDictionaryEncoding,
-    int index) {
+      boolean isDictionaryEncoding,
+      int index) {
     return Binary.fromString(getStr(isDictionaryEncoding, index));
   }
 
@@ -254,20 +254,20 @@ public class VectorizedColumnReaderTestBase {
   }
 
   public static VectorizedParquetRecordReader createTestParquetReader(String 
schemaString, Configuration conf)
-    throws IOException, InterruptedException, HiveException {
+      throws IOException, InterruptedException, HiveException {
     conf.set(PARQUET_READ_SCHEMA, schemaString);
     HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, 
true);
     HiveConf.setVar(conf, HiveConf.ConfVars.PLAN, "//tmp");
     Job vectorJob = new Job(conf, "read vector");
     ParquetInputFormat.setInputPaths(vectorJob, file);
     initialVectorizedRowBatchCtx(conf);
-    return new VectorizedParquetRecordReader(getFileSplit(vectorJob),new 
JobConf(conf));
+    return new VectorizedParquetRecordReader(getFileSplit(vectorJob), new 
JobConf(conf));
   }
 
   protected static FileSplit getFileSplit(Job vectorJob) throws IOException, 
InterruptedException {
     ParquetInputFormat parquetInputFormat = new 
ParquetInputFormat(GroupReadSupport.class);
     InputSplit split = (InputSplit) 
parquetInputFormat.getSplits(vectorJob).get(0);
-    FileSplit fsplit = new 
FileSplit(file,0L,split.getLength(),split.getLocations());
+    FileSplit fsplit = new FileSplit(file, 0L, split.getLength(), 
split.getLocations());
     return fsplit;
   }
 
@@ -284,13 +284,13 @@ public class VectorizedColumnReaderTestBase {
       boolean booleanVal = getBooleanValue(i);
       Binary binary = getBinaryValue(isDictionaryEncoding, i);
       Group group = f.newGroup()
-        .append("int32_field", intVal)
-        .append("int64_field", longVal)
-        .append("int96_field", timeStamp)
-        .append("double_field", doubleVal)
-        .append("float_field", floatVal)
-        .append("boolean_field", booleanVal)
-        .append("flba_field", "abc");
+          .append("int32_field", intVal)
+          .append("int64_field", longVal)
+          .append("int96_field", timeStamp)
+          .append("double_field", doubleVal)
+          .append("float_field", floatVal)
+          .append("boolean_field", booleanVal)
+          .append("flba_field", "abc");
 
       if (!isNull) {
         group.append("some_null_field", "x");
@@ -306,8 +306,8 @@ public class VectorizedColumnReaderTestBase {
       group.append("value", 
Binary.fromConstantByteArray(w.getInternalStorage()));
 
       group.addGroup("struct_field")
-        .append("a", intVal)
-        .append("b", doubleVal);
+          .append("a", intVal)
+          .append("b", doubleVal);
 
       Group g = group.addGroup("nested_struct_field");
 
@@ -358,14 +358,154 @@ public class VectorizedColumnReaderTestBase {
     return new ArrayWritableObjectInspector((StructTypeInfo) rowTypeInfo);
   }
 
-  protected void intRead(boolean isDictionaryEncoding) throws 
InterruptedException, HiveException, IOException {
-    Configuration conf = new Configuration();
-    conf.set(IOConstants.COLUMNS,"int32_field");
-    conf.set(IOConstants.COLUMNS_TYPES,"int");
+  protected void timestampRead(boolean isDictionaryEncoding) throws 
InterruptedException,
+      HiveException, IOException {
+    conf.set(IOConstants.COLUMNS, "int96_field");
+    conf.set(IOConstants.COLUMNS_TYPES, "timestamp");
+    conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
+    conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
+    VectorizedParquetRecordReader reader = createTestParquetReader("message 
test { required " +
+        "int96 int96_field;}", conf);
+    VectorizedRowBatch previous = reader.createValue();
+    try {
+      int c = 0;
+      while (reader.next(NullWritable.get(), previous)) {
+        TimestampColumnVector vector = (TimestampColumnVector) 
previous.cols[0];
+        assertTrue(vector.noNulls);
+        for (int i = 0; i < vector.nanos.length; i++) {
+          if (c == nElements) {
+            break;
+          }
+          Timestamp expected = isDictionaryEncoding ? new Timestamp(c % 
UNIQUE_NUM) : new Timestamp(c);
+          assertEquals("Not the same time at " + c, expected.getTime(), 
vector.getTime(i));
+          assertEquals("Not the same nano at " + c, expected.getNanos(), 
vector.getNanos(i));
+          assertFalse(vector.isNull[i]);
+          c++;
+        }
+      }
+      assertEquals(nElements, c);
+    } finally {
+      reader.close();
+    }
+  }
+
+  protected void stringReadTimestamp(boolean isDictionaryEncoding) throws 
InterruptedException,
+      HiveException, IOException {
+    conf.set(IOConstants.COLUMNS, "int96_field");
+    conf.set(IOConstants.COLUMNS_TYPES, "string");
+    conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
+    conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
+    VectorizedParquetRecordReader reader = createTestParquetReader("message 
test { required " +
+        "int96 int96_field;}", conf);
+    VectorizedRowBatch previous = reader.createValue();
+    try {
+      int c = 0;
+      while (reader.next(NullWritable.get(), previous)) {
+        BytesColumnVector vector = (BytesColumnVector) previous.cols[0];
+        assertTrue(vector.noNulls);
+        for (int i = 0; i < vector.vector.length; i++) {
+          if (c == nElements) {
+            break;
+          }
+
+          Timestamp expected = isDictionaryEncoding ? new Timestamp(c % 
UNIQUE_NUM) : new Timestamp(
+              c);
+          String actual = new String(Arrays
+              .copyOfRange(vector.vector[i], vector.start[i], vector.start[i] 
+ vector.length[i]));
+          assertEquals("Not the same time at " + c, expected.toString(), 
actual);
+
+          assertFalse(vector.isNull[i]);
+          c++;
+        }
+      }
+      assertEquals(nElements, c);
+    } finally {
+      reader.close();
+    }
+  }
+
+  protected void floatReadInt(boolean isDictionaryEncoding) throws 
InterruptedException,
+      HiveException, IOException {
+    conf.set(IOConstants.COLUMNS, "int32_field");
+    conf.set(IOConstants.COLUMNS_TYPES, "float");
+    conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
+    conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
+    VectorizedParquetRecordReader reader = createTestParquetReader("message 
test { required int32" +
+        " int32_field;}", conf);
+    VectorizedRowBatch previous = reader.createValue();
+    try {
+      int c = 0;
+      while (reader.next(NullWritable.get(), previous)) {
+        DoubleColumnVector vector = (DoubleColumnVector) previous.cols[0];
+        assertTrue(vector.noNulls);
+        for (int i = 0; i < vector.vector.length; i++) {
+          if (c == nElements) {
+            break;
+          }
+          assertEquals("Failed at " + c, getIntValue(isDictionaryEncoding, c), 
vector.vector[i], 0);
+          assertFalse(vector.isNull[i]);
+          c++;
+        }
+      }
+      assertEquals(nElements, c);
+    } finally {
+      reader.close();
+    }
+  }
+
+  protected void doubleReadInt(boolean isDictionaryEncoding) throws 
InterruptedException,
+      HiveException, IOException {
+    conf.set(IOConstants.COLUMNS, "int32_field");
+    conf.set(IOConstants.COLUMNS_TYPES, "double");
     conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
     conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
     VectorizedParquetRecordReader reader =
-      createTestParquetReader("message test { required int32 int32_field;}", 
conf);
+        createTestParquetReader("message test { required int32 int32_field;}", 
conf);
+    VectorizedRowBatch previous = reader.createValue();
+    try {
+      int c = 0;
+      while (reader.next(NullWritable.get(), previous)) {
+        DoubleColumnVector vector = (DoubleColumnVector) previous.cols[0];
+        assertTrue(vector.noNulls);
+        for (int i = 0; i < vector.vector.length; i++) {
+          if (c == nElements) {
+            break;
+          }
+          assertEquals("Failed at " + c, getIntValue(isDictionaryEncoding, c), 
vector.vector[i], 0);
+          assertFalse(vector.isNull[i]);
+          c++;
+        }
+      }
+      assertEquals(nElements, c);
+    } finally {
+      reader.close();
+    }
+  }
+
+  protected void longReadInt(boolean isDictionaryEncoding) throws 
InterruptedException,
+      HiveException, IOException {
+    Configuration c = new Configuration();
+    c.set(IOConstants.COLUMNS, "int32_field");
+    c.set(IOConstants.COLUMNS_TYPES, "bigint");
+    c.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
+    c.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
+    intRead(isDictionaryEncoding, c);
+  }
+
+  protected void intRead(boolean isDictionaryEncoding) throws 
InterruptedException,
+      HiveException, IOException {
+    Configuration c = new Configuration();
+    c.set(IOConstants.COLUMNS, "int32_field");
+    c.set(IOConstants.COLUMNS_TYPES, "int");
+    c.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
+    c.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
+    intRead(isDictionaryEncoding, c);
+  }
+
+  private void intRead(boolean isDictionaryEncoding, Configuration conf) throws
+      InterruptedException, HiveException, IOException {
+    VectorizedParquetRecordReader reader =
+        createTestParquetReader("message test { required int32 int32_field;}", 
conf);
     VectorizedRowBatch previous = reader.createValue();
     try {
       int c = 0;
@@ -373,7 +513,7 @@ public class VectorizedColumnReaderTestBase {
         LongColumnVector vector = (LongColumnVector) previous.cols[0];
         assertTrue(vector.noNulls);
         for (int i = 0; i < vector.vector.length; i++) {
-          if(c == nElements){
+          if (c == nElements) {
             break;
           }
           assertEquals("Failed at " + c, getIntValue(isDictionaryEncoding, c), 
vector.vector[i]);
@@ -387,14 +527,78 @@ public class VectorizedColumnReaderTestBase {
     }
   }
 
+  protected void floatReadLong(boolean isDictionaryEncoding) throws Exception {
+    Configuration c = new Configuration();
+    c.set(IOConstants.COLUMNS, "int64_field");
+    c.set(IOConstants.COLUMNS_TYPES, "float");
+    c.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
+    c.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
+    VectorizedParquetRecordReader reader =
+        createTestParquetReader("message test { required int64 int64_field;}", 
c);
+    VectorizedRowBatch previous = reader.createValue();
+    try {
+      int count = 0;
+      while (reader.next(NullWritable.get(), previous)) {
+        DoubleColumnVector vector = (DoubleColumnVector) previous.cols[0];
+        assertTrue(vector.noNulls);
+        for (int i = 0; i < vector.vector.length; i++) {
+          if (count == nElements) {
+            break;
+          }
+          assertEquals("Failed at " + count, 
getLongValue(isDictionaryEncoding, count), vector
+              .vector[i], 0);
+          assertFalse(vector.isNull[i]);
+          count++;
+        }
+      }
+      assertEquals(nElements, count);
+    } finally {
+      reader.close();
+    }
+  }
+
+  protected void doubleReadLong(boolean isDictionaryEncoding) throws Exception 
{
+    Configuration c = new Configuration();
+    c.set(IOConstants.COLUMNS, "int64_field");
+    c.set(IOConstants.COLUMNS_TYPES, "double");
+    c.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
+    c.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
+    VectorizedParquetRecordReader reader =
+        createTestParquetReader("message test { required int64 int64_field;}", 
c);
+    VectorizedRowBatch previous = reader.createValue();
+    try {
+      int count = 0;
+      while (reader.next(NullWritable.get(), previous)) {
+        DoubleColumnVector vector = (DoubleColumnVector) previous.cols[0];
+        assertTrue(vector.noNulls);
+        for (int i = 0; i < vector.vector.length; i++) {
+          if (count == nElements) {
+            break;
+          }
+          assertEquals("Failed at " + count, 
getLongValue(isDictionaryEncoding, count),
+              vector.vector[i], 0);
+          assertFalse(vector.isNull[i]);
+          count++;
+        }
+      }
+      assertEquals(nElements, count);
+    } finally {
+      reader.close();
+    }
+  }
+
   protected void longRead(boolean isDictionaryEncoding) throws Exception {
-    Configuration conf = new Configuration();
-    conf.set(IOConstants.COLUMNS, "int64_field");
-    conf.set(IOConstants.COLUMNS_TYPES, "bigint");
-    conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
-    conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
+    Configuration c = new Configuration();
+    c.set(IOConstants.COLUMNS, "int64_field");
+    c.set(IOConstants.COLUMNS_TYPES, "bigint");
+    c.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
+    c.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
+    longRead(isDictionaryEncoding, c);
+  }
+
+  private void longRead(boolean isDictionaryEncoding, Configuration conf) 
throws Exception {
     VectorizedParquetRecordReader reader =
-      createTestParquetReader("message test { required int64 int64_field;}", 
conf);
+        createTestParquetReader("message test { required int64 int64_field;}", 
conf);
     VectorizedRowBatch previous = reader.createValue();
     try {
       int c = 0;
@@ -417,13 +621,49 @@ public class VectorizedColumnReaderTestBase {
   }
 
   protected void doubleRead(boolean isDictionaryEncoding) throws Exception {
-    Configuration conf = new Configuration();
-    conf.set(IOConstants.COLUMNS, "double_field");
-    conf.set(IOConstants.COLUMNS_TYPES, "double");
-    conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
-    conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
+    Configuration c = new Configuration();
+    c.set(IOConstants.COLUMNS, "double_field");
+    c.set(IOConstants.COLUMNS_TYPES, "double");
+    c.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
+    c.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
+    doubleRead(isDictionaryEncoding, c);
+  }
+
+  protected void stringReadDouble(boolean isDictionaryEncoding) throws 
Exception {
+    Configuration readerConf = new Configuration();
+    readerConf.set(IOConstants.COLUMNS, "double_field");
+    readerConf.set(IOConstants.COLUMNS_TYPES, "string");
+    readerConf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
+    readerConf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
+    VectorizedParquetRecordReader reader =
+        createTestParquetReader("message test { required double 
double_field;}", readerConf);
+    VectorizedRowBatch previous = reader.createValue();
+    try {
+      int c = 0;
+      while (reader.next(NullWritable.get(), previous)) {
+        BytesColumnVector vector = (BytesColumnVector) previous.cols[0];
+        assertTrue(vector.noNulls);
+        for (int i = 0; i < vector.vector.length; i++) {
+          if (c == nElements) {
+            break;
+          }
+          String actual = new String(Arrays.copyOfRange(vector.vector[i], 
vector.start[i], vector
+              .start[i] + vector.length[i]));
+          assertEquals("Failed at " + c, 
String.valueOf(getDoubleValue(isDictionaryEncoding, c)),
+              actual);
+          assertFalse(vector.isNull[i]);
+          c++;
+        }
+      }
+      assertEquals(nElements, c);
+    } finally {
+      reader.close();
+    }
+  }
+
+  private void doubleRead(boolean isDictionaryEncoding, Configuration conf) 
throws Exception {
     VectorizedParquetRecordReader reader =
-      createTestParquetReader("message test { required double double_field;}", 
conf);
+        createTestParquetReader("message test { required double 
double_field;}", conf);
     VectorizedRowBatch previous = reader.createValue();
     try {
       int c = 0;
@@ -435,7 +675,7 @@ public class VectorizedColumnReaderTestBase {
             break;
           }
           assertEquals("Failed at " + c, getDoubleValue(isDictionaryEncoding, 
c), vector.vector[i],
-            0);
+              0);
           assertFalse(vector.isNull[i]);
           c++;
         }
@@ -447,13 +687,26 @@ public class VectorizedColumnReaderTestBase {
   }
 
   protected void floatRead(boolean isDictionaryEncoding) throws Exception {
-    Configuration conf = new Configuration();
-    conf.set(IOConstants.COLUMNS, "float_field");
-    conf.set(IOConstants.COLUMNS_TYPES, "float");
-    conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
-    conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
+    Configuration c = new Configuration();
+    c.set(IOConstants.COLUMNS, "float_field");
+    c.set(IOConstants.COLUMNS_TYPES, "float");
+    c.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
+    c.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
+    floatRead(isDictionaryEncoding, c);
+  }
+
+  protected void doubleReadFloat(boolean isDictionaryEncoding) throws 
Exception {
+    Configuration c = new Configuration();
+    c.set(IOConstants.COLUMNS, "float_field");
+    c.set(IOConstants.COLUMNS_TYPES, "double");
+    c.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
+    c.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
+    floatRead(isDictionaryEncoding, c);
+  }
+
+  private void floatRead(boolean isDictionaryEncoding, Configuration conf) 
throws Exception {
     VectorizedParquetRecordReader reader =
-      createTestParquetReader("message test { required float float_field;}", 
conf);
+        createTestParquetReader("message test { required float float_field;}", 
conf);
     VectorizedRowBatch previous = reader.createValue();
     try {
       int c = 0;
@@ -465,7 +718,7 @@ public class VectorizedColumnReaderTestBase {
             break;
           }
           assertEquals("Failed at " + c, getFloatValue(isDictionaryEncoding, 
c), vector.vector[i],
-            0);
+              0);
           assertFalse(vector.isNull[i]);
           c++;
         }
@@ -483,7 +736,7 @@ public class VectorizedColumnReaderTestBase {
     conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
     conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
     VectorizedParquetRecordReader reader =
-      createTestParquetReader("message test { required boolean 
boolean_field;}", conf);
+        createTestParquetReader("message test { required boolean 
boolean_field;}", conf);
     VectorizedRowBatch previous = reader.createValue();
     try {
       int c = 0;
@@ -505,6 +758,38 @@ public class VectorizedColumnReaderTestBase {
     }
   }
 
+  protected void stringReadBoolean() throws Exception {
+    Configuration conf = new Configuration();
+    conf.set(IOConstants.COLUMNS, "boolean_field");
+    conf.set(IOConstants.COLUMNS_TYPES, "string");
+    conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
+    conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
+    VectorizedParquetRecordReader reader =
+        createTestParquetReader("message test { required boolean 
boolean_field;}", conf);
+    VectorizedRowBatch previous = reader.createValue();
+    try {
+      int c = 0;
+      while (reader.next(NullWritable.get(), previous)) {
+        BytesColumnVector vector = (BytesColumnVector) previous.cols[0];
+        assertTrue(vector.noNulls);
+        for (int i = 0; i < vector.vector.length; i++) {
+          if (c == nElements) {
+            break;
+          }
+
+          String actual = new String(Arrays.copyOfRange(vector.vector[i], 
vector.start[i], vector
+              .start[i] + vector.length[i]));
+          assertEquals("Failed at " + c, String.valueOf(getBooleanValue(c)), 
actual);
+          assertFalse(vector.isNull[i]);
+          c++;
+        }
+      }
+      assertEquals(nElements, c);
+    } finally {
+      reader.close();
+    }
+  }
+
   protected void binaryRead(boolean isDictionaryEncoding) throws Exception {
     Configuration conf = new Configuration();
     conf.set(IOConstants.COLUMNS, "binary_field_some_null");
@@ -512,7 +797,7 @@ public class VectorizedColumnReaderTestBase {
     conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
     conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
     VectorizedParquetRecordReader reader =
-      createTestParquetReader("message test { required binary 
binary_field_some_null;}", conf);
+        createTestParquetReader("message test { required binary 
binary_field_some_null;}", conf);
     VectorizedRowBatch previous = reader.createValue();
     int c = 0;
     try {
@@ -527,7 +812,7 @@ public class VectorizedColumnReaderTestBase {
           assertEquals("Null assert failed at " + c, isNull(c), 
vector.isNull[i]);
           if (!vector.isNull[i]) {
             actual = new String(ArrayUtils
-              .subarray(vector.vector[i], vector.start[i], vector.start[i] + 
vector.length[i]));
+                .subarray(vector.vector[i], vector.start[i], vector.start[i] + 
vector.length[i]));
             assertEquals("failed at " + c, getStr(isDictionaryEncoding, c), 
actual);
           } else {
             noNull = false;
@@ -550,11 +835,11 @@ public class VectorizedColumnReaderTestBase {
     conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
     conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
     String schema = "message hive_schema {\n"
-      + "group struct_field {\n"
-      + "  optional int32 a;\n"
-      + "  optional double b;\n"
-      + "}\n"
-      + "}\n";
+        + "group struct_field {\n"
+        + "  optional int32 a;\n"
+        + "  optional double b;\n"
+        + "}\n"
+        + "}\n";
     VectorizedParquetRecordReader reader = createTestParquetReader(schema, 
conf);
     VectorizedRowBatch previous = reader.createValue();
     int c = 0;
@@ -588,13 +873,13 @@ public class VectorizedColumnReaderTestBase {
     conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
     conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
     String schema = "message hive_schema {\n"
-      + "group nested_struct_field {\n"
-      + "  optional group nsf {\n"
-      + "    optional int32 c;\n"
-      + "    optional int32 d;\n"
-      + "  }"
-      + "optional double e;\n"
-      + "}\n";
+        + "group nested_struct_field {\n"
+        + "  optional group nsf {\n"
+        + "    optional int32 c;\n"
+        + "    optional int32 d;\n"
+        + "  }"
+        + "optional double e;\n"
+        + "}\n";
     VectorizedParquetRecordReader reader = createTestParquetReader(schema, 
conf);
     VectorizedRowBatch previous = reader.createValue();
     int c = 0;
@@ -631,11 +916,11 @@ public class VectorizedColumnReaderTestBase {
     conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
     conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
     String schema = "message hive_schema {\n"
-      + "group nested_struct_field {\n"
-      + "  optional group nsf {\n"
-      + "    optional int32 c;\n"
-      + "  }"
-      + "}\n";
+        + "group nested_struct_field {\n"
+        + "  optional group nsf {\n"
+        + "    optional int32 c;\n"
+        + "  }"
+        + "}\n";
     VectorizedParquetRecordReader reader = createTestParquetReader(schema, 
conf);
     VectorizedRowBatch previous = reader.createValue();
     int c = 0;
@@ -668,10 +953,10 @@ public class VectorizedColumnReaderTestBase {
     conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
     conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
     String schema = "message hive_schema {\n"
-      + "group struct_field_some_null {\n"
-      + "  optional int32 f;\n"
-      + "  optional double g;\n"
-      + "}\n";
+        + "group struct_field_some_null {\n"
+        + "  optional int32 f;\n"
+        + "  optional double g;\n"
+        + "}\n";
     VectorizedParquetRecordReader reader = createTestParquetReader(schema, 
conf);
     VectorizedRowBatch previous = reader.createValue();
     int c = 0;
@@ -706,14 +991,48 @@ public class VectorizedColumnReaderTestBase {
     }
   }
 
-  protected void decimalRead(boolean isDictionaryEncoding) throws Exception {
+  protected void stringReadDecimal(boolean isDictionaryEncoding) throws 
Exception {
     Configuration conf = new Configuration();
     conf.set(IOConstants.COLUMNS, "value");
-    conf.set(IOConstants.COLUMNS_TYPES, "decimal(5,2)");
+    conf.set(IOConstants.COLUMNS_TYPES, "string");
     conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
     conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
     VectorizedParquetRecordReader reader =
-      createTestParquetReader("message hive_schema { required value 
(DECIMAL(5,2));}", conf);
+        createTestParquetReader("message hive_schema { required value 
(DECIMAL(5,2));}", conf);
+    VectorizedRowBatch previous = reader.createValue();
+    try {
+      int c = 0;
+      while (reader.next(NullWritable.get(), previous)) {
+        BytesColumnVector vector = (BytesColumnVector) previous.cols[0];
+        assertTrue(vector.noNulls);
+        for (int i = 0; i < vector.vector.length; i++) {
+          if (c == nElements) {
+            break;
+          }
+
+          String actual = new String(Arrays.copyOfRange(vector.vector[i], 
vector.start[i], vector
+              .start[i] + vector.length[i]));
+          assertEquals("Check failed at pos " + c, 
getDecimal(isDictionaryEncoding, c).toString(),
+              actual);
+
+          assertFalse(vector.isNull[i]);
+          c++;
+        }
+      }
+      assertEquals(nElements, c);
+    } finally {
+      reader.close();
+    }
+  }
+
+  protected void decimalRead(boolean isDictionaryEncoding) throws Exception {
+    Configuration readerConf = new Configuration();
+    readerConf.set(IOConstants.COLUMNS, "value");
+    readerConf.set(IOConstants.COLUMNS_TYPES, "decimal(5,2)");
+    readerConf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
+    readerConf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
+    VectorizedParquetRecordReader reader =
+        createTestParquetReader("message hive_schema { required value 
(DECIMAL(5,2));}", readerConf);
     VectorizedRowBatch previous = reader.createValue();
     try {
       int c = 0;
@@ -725,7 +1044,8 @@ public class VectorizedColumnReaderTestBase {
             break;
           }
           assertEquals("Check failed at pos " + c, 
getDecimal(isDictionaryEncoding, c),
-            vector.vector[i].getHiveDecimal());
+              vector.vector[i].getHiveDecimal());
+
           assertFalse(vector.isNull[i]);
           c++;
         }

http://git-wip-us.apache.org/repos/asf/hive/blob/7ddac02b/ql/src/test/queries/clientpositive/schema_evol_par_vec_table_dictionary_encoding.q
----------------------------------------------------------------------
diff --git 
a/ql/src/test/queries/clientpositive/schema_evol_par_vec_table_dictionary_encoding.q
 
b/ql/src/test/queries/clientpositive/schema_evol_par_vec_table_dictionary_encoding.q
new file mode 100644
index 0000000..6b706ab
--- /dev/null
+++ 
b/ql/src/test/queries/clientpositive/schema_evol_par_vec_table_dictionary_encoding.q
@@ -0,0 +1,94 @@
+set hive.fetch.task.conversion=none;
+set hive.vectorized.execution.enabled=true;
+set parquet.enable.dictionary=true;
+
+drop table test_alter;
+drop table test_alter2;
+drop table test_alter3;
+
+create table test_alter (id string) stored as parquet;
+insert into test_alter values ('1'), ('2'), ('3');
+select * from test_alter;
+
+-- add new column -> empty col values should return NULL
+alter table test_alter add columns (newCol string);
+select * from test_alter;
+
+-- insert data into new column -> New data should be returned
+insert into test_alter values ('4', '100');
+select * from test_alter;
+
+-- remove the newly added column
+-- this works in vectorized execution
+alter table test_alter replace columns (id string);
+select * from test_alter;
+
+-- add column using replace column syntax
+alter table test_alter replace columns (id string, id2 string);
+-- this surprisingly doesn't return the 100 added to 4th row above
+select * from test_alter;
+insert into test_alter values ('5', '100');
+select * from test_alter;
+
+-- use the same column name and datatype
+alter table test_alter replace columns (id string, id2 string);
+select * from test_alter;
+
+-- change string to char
+alter table test_alter replace columns (id char(10), id2 string);
+select * from test_alter;
+
+-- change string to varchar
+alter table test_alter replace columns (id string, id2 string);
+alter table test_alter replace columns (id varchar(10), id2 string);
+select * from test_alter;
+
+-- change columntype and column name
+alter table test_alter replace columns (id string, id2 string);
+alter table test_alter replace columns (idv varchar(10), id2 string);
+select * from test_alter;
+
+-- test int to long type conversion
+create table test_alter2 (id int) stored as parquet;
+insert into test_alter2 values (1);
+alter table test_alter2 replace columns (id bigint);
+select * from test_alter2;
+
+-- test float to double type conversion
+drop table test_alter2;
+create table test_alter2 (id float) stored as parquet;
+insert into test_alter2 values (1.5);
+alter table test_alter2 replace columns (id double);
+select * from test_alter2;
+
+drop table test_alter2;
+create table test_alter2 (ts timestamp) stored as parquet;
+insert into test_alter2 values ('2018-01-01 13:14:15.123456'), ('2018-01-02 
14:15:16.123456'), ('2018-01-03 16:17:18.123456');
+select * from test_alter2;
+alter table test_alter2 replace columns (ts string);
+select * from test_alter2;
+
+drop table test_alter2;
+create table test_alter2 (ts timestamp) stored as parquet;
+insert into test_alter2 values ('2018-01-01 13:14:15.123456'), ('2018-01-02 
14:15:16.123456'), ('2018-01-03 16:17:18.123456');
+select * from test_alter2;
+alter table test_alter2 replace columns (ts varchar(19));
+-- this should truncate the microseconds
+select * from test_alter2;
+
+drop table test_alter2;
+create table test_alter2 (ts timestamp) stored as parquet;
+insert into test_alter2 values ('2018-01-01 13:14:15.123456'), ('2018-01-02 
14:15:16.123456'), ('2018-01-03 16:17:18.123456');
+select * from test_alter2;
+alter table test_alter2 replace columns (ts char(25));
+select * from test_alter2;
+
+-- test integer types upconversion
+create table test_alter3 (id1 tinyint, id2 smallint, id3 int, id4 bigint) 
stored as parquet;
+insert into test_alter3 values (10, 20, 30, 40);
+alter table test_alter3 replace columns (id1 smallint, id2 int, id3 bigint, 
id4 decimal(10,4));
+-- this fails mostly due to bigint to decimal
+-- select * from test_alter3;
+select id1, id2, id3 from test_alter3;
+
+

http://git-wip-us.apache.org/repos/asf/hive/blob/7ddac02b/ql/src/test/queries/clientpositive/schema_evol_par_vec_table_non_dictionary_encoding.q
----------------------------------------------------------------------
diff --git 
a/ql/src/test/queries/clientpositive/schema_evol_par_vec_table_non_dictionary_encoding.q
 
b/ql/src/test/queries/clientpositive/schema_evol_par_vec_table_non_dictionary_encoding.q
new file mode 100644
index 0000000..3006bd4
--- /dev/null
+++ 
b/ql/src/test/queries/clientpositive/schema_evol_par_vec_table_non_dictionary_encoding.q
@@ -0,0 +1,94 @@
+set hive.fetch.task.conversion=none;
+set hive.vectorized.execution.enabled=true;
+set parquet.enable.dictionary=false;
+
+drop table test_alter;
+drop table test_alter2;
+drop table test_alter3;
+
+create table test_alter (id string) stored as parquet;
+insert into test_alter values ('1'), ('2'), ('3');
+select * from test_alter;
+
+-- add new column -> empty col values should return NULL
+alter table test_alter add columns (newCol string);
+select * from test_alter;
+
+-- insert data into new column -> New data should be returned
+insert into test_alter values ('4', '100');
+select * from test_alter;
+
+-- remove the newly added column
+-- this works in vectorized execution
+alter table test_alter replace columns (id string);
+select * from test_alter;
+
+-- add column using replace column syntax
+alter table test_alter replace columns (id string, id2 string);
+-- this surprisingly doesn't return the 100 added to 4th row above
+select * from test_alter;
+insert into test_alter values ('5', '100');
+select * from test_alter;
+
+-- use the same column name and datatype
+alter table test_alter replace columns (id string, id2 string);
+select * from test_alter;
+
+-- change string to char
+alter table test_alter replace columns (id char(10), id2 string);
+select * from test_alter;
+
+-- change string to varchar
+alter table test_alter replace columns (id string, id2 string);
+alter table test_alter replace columns (id varchar(10), id2 string);
+select * from test_alter;
+
+-- change columntype and column name
+alter table test_alter replace columns (id string, id2 string);
+alter table test_alter replace columns (idv varchar(10), id2 string);
+select * from test_alter;
+
+-- test int to long type conversion
+create table test_alter2 (id int) stored as parquet;
+insert into test_alter2 values (1);
+alter table test_alter2 replace columns (id bigint);
+select * from test_alter2;
+
+-- test float to double type conversion
+drop table test_alter2;
+create table test_alter2 (id float) stored as parquet;
+insert into test_alter2 values (1.5);
+alter table test_alter2 replace columns (id double);
+select * from test_alter2;
+
+drop table test_alter2;
+create table test_alter2 (ts timestamp) stored as parquet;
+insert into test_alter2 values ('2018-01-01 13:14:15.123456'), ('2018-01-02 
14:15:16.123456'), ('2018-01-03 16:17:18.123456');
+select * from test_alter2;
+alter table test_alter2 replace columns (ts string);
+select * from test_alter2;
+
+drop table test_alter2;
+create table test_alter2 (ts timestamp) stored as parquet;
+insert into test_alter2 values ('2018-01-01 13:14:15.123456'), ('2018-01-02 
14:15:16.123456'), ('2018-01-03 16:17:18.123456');
+select * from test_alter2;
+alter table test_alter2 replace columns (ts varchar(19));
+-- this should truncate the microseconds
+select * from test_alter2;
+
+drop table test_alter2;
+create table test_alter2 (ts timestamp) stored as parquet;
+insert into test_alter2 values ('2018-01-01 13:14:15.123456'), ('2018-01-02 
14:15:16.123456'), ('2018-01-03 16:17:18.123456');
+select * from test_alter2;
+alter table test_alter2 replace columns (ts char(25));
+select * from test_alter2;
+
+-- test integer types upconversion
+create table test_alter3 (id1 tinyint, id2 smallint, id3 int, id4 bigint) 
stored as parquet;
+insert into test_alter3 values (10, 20, 30, 40);
+alter table test_alter3 replace columns (id1 smallint, id2 int, id3 bigint, 
id4 decimal(10,4));
+-- this fails mostly due to bigint to decimal
+-- select * from test_alter3;
+select id1, id2, id3 from test_alter3;
+
+

http://git-wip-us.apache.org/repos/asf/hive/blob/7ddac02b/ql/src/test/results/clientpositive/schema_evol_par_vec_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/schema_evol_par_vec_table.q.out 
b/ql/src/test/results/clientpositive/schema_evol_par_vec_table.q.out
new file mode 100644
index 0000000..a6128b6
--- /dev/null
+++ b/ql/src/test/results/clientpositive/schema_evol_par_vec_table.q.out
@@ -0,0 +1,357 @@
+PREHOOK: query: drop table test_alter
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table test_alter
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table test_alter2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table test_alter2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table test_alter3
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table test_alter3
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table test_alter (id string) stored as parquet
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: create table test_alter (id string) stored as parquet
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test_alter
+PREHOOK: query: insert into test_alter values ('1'), ('2'), ('3')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: insert into test_alter values ('1'), ('2'), ('3')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@test_alter
+POSTHOOK: Lineage: test_alter.id SCRIPT []
+PREHOOK: query: select * from test_alter
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+1
+2
+3
+PREHOOK: query: alter table test_alter add columns (newCol string)
+PREHOOK: type: ALTERTABLE_ADDCOLS
+PREHOOK: Input: default@test_alter
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: alter table test_alter add columns (newCol string)
+POSTHOOK: type: ALTERTABLE_ADDCOLS
+POSTHOOK: Input: default@test_alter
+POSTHOOK: Output: default@test_alter
+PREHOOK: query: select * from test_alter
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+1      NULL
+2      NULL
+3      NULL
+PREHOOK: query: insert into test_alter values ('4', '100')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: insert into test_alter values ('4', '100')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@test_alter
+POSTHOOK: Lineage: test_alter.id SCRIPT []
+POSTHOOK: Lineage: test_alter.newcol SCRIPT []
+PREHOOK: query: select * from test_alter
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+1      NULL
+2      NULL
+3      NULL
+4      100
+PREHOOK: query: alter table test_alter replace columns (id string)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: alter table test_alter replace columns (id string)
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter
+POSTHOOK: Output: default@test_alter
+PREHOOK: query: select * from test_alter
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+1
+2
+3
+4
+PREHOOK: query: alter table test_alter replace columns (id string, id2 string)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: alter table test_alter replace columns (id string, id2 string)
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter
+POSTHOOK: Output: default@test_alter
+PREHOOK: query: select * from test_alter
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+1      NULL
+2      NULL
+3      NULL
+4      NULL
+PREHOOK: query: insert into test_alter values ('5', '100')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: insert into test_alter values ('5', '100')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@test_alter
+POSTHOOK: Lineage: test_alter.id SCRIPT []
+POSTHOOK: Lineage: test_alter.id2 SCRIPT []
+PREHOOK: query: select * from test_alter
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+5      100
+1      NULL
+2      NULL
+3      NULL
+4      NULL
+PREHOOK: query: alter table test_alter replace columns (id string, id2 string)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: alter table test_alter replace columns (id string, id2 string)
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter
+POSTHOOK: Output: default@test_alter
+PREHOOK: query: select * from test_alter
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+5      100
+1      NULL
+2      NULL
+3      NULL
+4      NULL
+PREHOOK: query: alter table test_alter replace columns (id char(10), id2 
string)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: alter table test_alter replace columns (id char(10), id2 
string)
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter
+POSTHOOK: Output: default@test_alter
+PREHOOK: query: select * from test_alter
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+5              100
+1              NULL
+2              NULL
+3              NULL
+4              NULL
+PREHOOK: query: alter table test_alter replace columns (id string, id2 string)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: alter table test_alter replace columns (id string, id2 string)
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter
+POSTHOOK: Output: default@test_alter
+PREHOOK: query: alter table test_alter replace columns (id varchar(10), id2 
string)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: alter table test_alter replace columns (id varchar(10), id2 
string)
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter
+POSTHOOK: Output: default@test_alter
+PREHOOK: query: select * from test_alter
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+5      100
+1      NULL
+2      NULL
+3      NULL
+4      NULL
+PREHOOK: query: alter table test_alter replace columns (id string, id2 string)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: alter table test_alter replace columns (id string, id2 string)
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter
+POSTHOOK: Output: default@test_alter
+PREHOOK: query: alter table test_alter replace columns (idv varchar(10), id2 
string)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: alter table test_alter replace columns (idv varchar(10), id2 
string)
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter
+POSTHOOK: Output: default@test_alter
+PREHOOK: query: select * from test_alter
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+NULL   100
+NULL   NULL
+NULL   NULL
+NULL   NULL
+NULL   NULL
+PREHOOK: query: create table test_alter2 (id int) stored as parquet
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: create table test_alter2 (id int) stored as parquet
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test_alter2
+PREHOOK: query: insert into test_alter2 values (1)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: insert into test_alter2 values (1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@test_alter2
+POSTHOOK: Lineage: test_alter2.id SCRIPT []
+PREHOOK: query: alter table test_alter2 replace columns (id bigint)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter2
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: alter table test_alter2 replace columns (id bigint)
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter2
+POSTHOOK: Output: default@test_alter2
+PREHOOK: query: select * from test_alter2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter2
+#### A masked pattern was here ####
+1
+PREHOOK: query: drop table test_alter2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@test_alter2
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: drop table test_alter2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@test_alter2
+POSTHOOK: Output: default@test_alter2
+PREHOOK: query: create table test_alter2 (id float) stored as parquet
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: create table test_alter2 (id float) stored as parquet
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test_alter2
+PREHOOK: query: insert into test_alter2 values (1.5)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: insert into test_alter2 values (1.5)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@test_alter2
+POSTHOOK: Lineage: test_alter2.id SCRIPT []
+PREHOOK: query: alter table test_alter2 replace columns (id double)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter2
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: alter table test_alter2 replace columns (id double)
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter2
+POSTHOOK: Output: default@test_alter2
+PREHOOK: query: select * from test_alter2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter2
+#### A masked pattern was here ####
+1.5
+PREHOOK: query: create table test_alter3 (id1 tinyint, id2 smallint, id3 int, 
id4 bigint) stored as parquet
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test_alter3
+POSTHOOK: query: create table test_alter3 (id1 tinyint, id2 smallint, id3 int, 
id4 bigint) stored as parquet
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test_alter3
+PREHOOK: query: insert into test_alter3 values (10, 20, 30, 40)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@test_alter3
+POSTHOOK: query: insert into test_alter3 values (10, 20, 30, 40)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@test_alter3
+POSTHOOK: Lineage: test_alter3.id1 SCRIPT []
+POSTHOOK: Lineage: test_alter3.id2 SCRIPT []
+POSTHOOK: Lineage: test_alter3.id3 SCRIPT []
+POSTHOOK: Lineage: test_alter3.id4 SCRIPT []
+PREHOOK: query: alter table test_alter3 replace columns (id1 smallint, id2 
int, id3 bigint, id4 decimal(10,4))
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter3
+PREHOOK: Output: default@test_alter3
+POSTHOOK: query: alter table test_alter3 replace columns (id1 smallint, id2 
int, id3 bigint, id4 decimal(10,4))
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter3
+POSTHOOK: Output: default@test_alter3
+PREHOOK: query: select id1, id2, id3 from test_alter3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter3
+#### A masked pattern was here ####
+POSTHOOK: query: select id1, id2, id3 from test_alter3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter3
+#### A masked pattern was here ####
+10     20      30

http://git-wip-us.apache.org/repos/asf/hive/blob/7ddac02b/ql/src/test/results/clientpositive/schema_evol_par_vec_table_dictionary_encoding.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/schema_evol_par_vec_table_dictionary_encoding.q.out
 
b/ql/src/test/results/clientpositive/schema_evol_par_vec_table_dictionary_encoding.q.out
new file mode 100644
index 0000000..1d2f36d
--- /dev/null
+++ 
b/ql/src/test/results/clientpositive/schema_evol_par_vec_table_dictionary_encoding.q.out
@@ -0,0 +1,522 @@
+PREHOOK: query: drop table test_alter
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table test_alter
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table test_alter2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table test_alter2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table test_alter3
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table test_alter3
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table test_alter (id string) stored as parquet
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: create table test_alter (id string) stored as parquet
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test_alter
+PREHOOK: query: insert into test_alter values ('1'), ('2'), ('3')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: insert into test_alter values ('1'), ('2'), ('3')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@test_alter
+POSTHOOK: Lineage: test_alter.id SCRIPT []
+PREHOOK: query: select * from test_alter
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+1
+2
+3
+PREHOOK: query: alter table test_alter add columns (newCol string)
+PREHOOK: type: ALTERTABLE_ADDCOLS
+PREHOOK: Input: default@test_alter
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: alter table test_alter add columns (newCol string)
+POSTHOOK: type: ALTERTABLE_ADDCOLS
+POSTHOOK: Input: default@test_alter
+POSTHOOK: Output: default@test_alter
+PREHOOK: query: select * from test_alter
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+1      NULL
+2      NULL
+3      NULL
+PREHOOK: query: insert into test_alter values ('4', '100')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: insert into test_alter values ('4', '100')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@test_alter
+POSTHOOK: Lineage: test_alter.id SCRIPT []
+POSTHOOK: Lineage: test_alter.newcol SCRIPT []
+PREHOOK: query: select * from test_alter
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+1      NULL
+2      NULL
+3      NULL
+4      100
+PREHOOK: query: alter table test_alter replace columns (id string)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: alter table test_alter replace columns (id string)
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter
+POSTHOOK: Output: default@test_alter
+PREHOOK: query: select * from test_alter
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+1
+2
+3
+4
+PREHOOK: query: alter table test_alter replace columns (id string, id2 string)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: alter table test_alter replace columns (id string, id2 string)
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter
+POSTHOOK: Output: default@test_alter
+PREHOOK: query: select * from test_alter
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+1      NULL
+2      NULL
+3      NULL
+4      NULL
+PREHOOK: query: insert into test_alter values ('5', '100')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: insert into test_alter values ('5', '100')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@test_alter
+POSTHOOK: Lineage: test_alter.id SCRIPT []
+POSTHOOK: Lineage: test_alter.id2 SCRIPT []
+PREHOOK: query: select * from test_alter
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+5      100
+1      NULL
+2      NULL
+3      NULL
+4      NULL
+PREHOOK: query: alter table test_alter replace columns (id string, id2 string)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: alter table test_alter replace columns (id string, id2 string)
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter
+POSTHOOK: Output: default@test_alter
+PREHOOK: query: select * from test_alter
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+5      100
+1      NULL
+2      NULL
+3      NULL
+4      NULL
+PREHOOK: query: alter table test_alter replace columns (id char(10), id2 
string)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: alter table test_alter replace columns (id char(10), id2 
string)
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter
+POSTHOOK: Output: default@test_alter
+PREHOOK: query: select * from test_alter
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+5              100
+1              NULL
+2              NULL
+3              NULL
+4              NULL
+PREHOOK: query: alter table test_alter replace columns (id string, id2 string)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: alter table test_alter replace columns (id string, id2 string)
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter
+POSTHOOK: Output: default@test_alter
+PREHOOK: query: alter table test_alter replace columns (id varchar(10), id2 
string)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: alter table test_alter replace columns (id varchar(10), id2 
string)
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter
+POSTHOOK: Output: default@test_alter
+PREHOOK: query: select * from test_alter
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+5      100
+1      NULL
+2      NULL
+3      NULL
+4      NULL
+PREHOOK: query: alter table test_alter replace columns (id string, id2 string)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: alter table test_alter replace columns (id string, id2 string)
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter
+POSTHOOK: Output: default@test_alter
+PREHOOK: query: alter table test_alter replace columns (idv varchar(10), id2 
string)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter
+PREHOOK: Output: default@test_alter
+POSTHOOK: query: alter table test_alter replace columns (idv varchar(10), id2 
string)
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter
+POSTHOOK: Output: default@test_alter
+PREHOOK: query: select * from test_alter
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter
+#### A masked pattern was here ####
+NULL   100
+NULL   NULL
+NULL   NULL
+NULL   NULL
+NULL   NULL
+PREHOOK: query: create table test_alter2 (id int) stored as parquet
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: create table test_alter2 (id int) stored as parquet
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test_alter2
+PREHOOK: query: insert into test_alter2 values (1)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: insert into test_alter2 values (1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@test_alter2
+POSTHOOK: Lineage: test_alter2.id SCRIPT []
+PREHOOK: query: alter table test_alter2 replace columns (id bigint)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter2
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: alter table test_alter2 replace columns (id bigint)
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter2
+POSTHOOK: Output: default@test_alter2
+PREHOOK: query: select * from test_alter2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter2
+#### A masked pattern was here ####
+1
+PREHOOK: query: drop table test_alter2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@test_alter2
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: drop table test_alter2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@test_alter2
+POSTHOOK: Output: default@test_alter2
+PREHOOK: query: create table test_alter2 (id float) stored as parquet
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: create table test_alter2 (id float) stored as parquet
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test_alter2
+PREHOOK: query: insert into test_alter2 values (1.5)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: insert into test_alter2 values (1.5)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@test_alter2
+POSTHOOK: Lineage: test_alter2.id SCRIPT []
+PREHOOK: query: alter table test_alter2 replace columns (id double)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter2
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: alter table test_alter2 replace columns (id double)
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter2
+POSTHOOK: Output: default@test_alter2
+PREHOOK: query: select * from test_alter2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter2
+#### A masked pattern was here ####
+1.5
+PREHOOK: query: drop table test_alter2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@test_alter2
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: drop table test_alter2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@test_alter2
+POSTHOOK: Output: default@test_alter2
+PREHOOK: query: create table test_alter2 (ts timestamp) stored as parquet
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: create table test_alter2 (ts timestamp) stored as parquet
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test_alter2
+PREHOOK: query: insert into test_alter2 values ('2018-01-01 13:14:15.123456'), 
('2018-01-02 14:15:16.123456'), ('2018-01-03 16:17:18.123456')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: insert into test_alter2 values ('2018-01-01 
13:14:15.123456'), ('2018-01-02 14:15:16.123456'), ('2018-01-03 
16:17:18.123456')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@test_alter2
+POSTHOOK: Lineage: test_alter2.ts SCRIPT []
+PREHOOK: query: select * from test_alter2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter2
+#### A masked pattern was here ####
+2018-01-01 13:14:15.123456
+2018-01-02 14:15:16.123456
+2018-01-03 16:17:18.123456
+PREHOOK: query: alter table test_alter2 replace columns (ts string)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter2
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: alter table test_alter2 replace columns (ts string)
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter2
+POSTHOOK: Output: default@test_alter2
+PREHOOK: query: select * from test_alter2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter2
+#### A masked pattern was here ####
+2018-01-01 13:14:15.123456
+2018-01-02 14:15:16.123456
+2018-01-03 16:17:18.123456
+PREHOOK: query: drop table test_alter2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@test_alter2
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: drop table test_alter2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@test_alter2
+POSTHOOK: Output: default@test_alter2
+PREHOOK: query: create table test_alter2 (ts timestamp) stored as parquet
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: create table test_alter2 (ts timestamp) stored as parquet
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test_alter2
+PREHOOK: query: insert into test_alter2 values ('2018-01-01 13:14:15.123456'), 
('2018-01-02 14:15:16.123456'), ('2018-01-03 16:17:18.123456')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: insert into test_alter2 values ('2018-01-01 
13:14:15.123456'), ('2018-01-02 14:15:16.123456'), ('2018-01-03 
16:17:18.123456')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@test_alter2
+POSTHOOK: Lineage: test_alter2.ts SCRIPT []
+PREHOOK: query: select * from test_alter2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter2
+#### A masked pattern was here ####
+2018-01-01 13:14:15.123456
+2018-01-02 14:15:16.123456
+2018-01-03 16:17:18.123456
+PREHOOK: query: alter table test_alter2 replace columns (ts varchar(19))
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter2
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: alter table test_alter2 replace columns (ts varchar(19))
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter2
+POSTHOOK: Output: default@test_alter2
+PREHOOK: query: select * from test_alter2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter2
+#### A masked pattern was here ####
+2018-01-01 13:14:15
+2018-01-02 14:15:16
+2018-01-03 16:17:18
+PREHOOK: query: drop table test_alter2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@test_alter2
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: drop table test_alter2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@test_alter2
+POSTHOOK: Output: default@test_alter2
+PREHOOK: query: create table test_alter2 (ts timestamp) stored as parquet
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: create table test_alter2 (ts timestamp) stored as parquet
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test_alter2
+PREHOOK: query: insert into test_alter2 values ('2018-01-01 13:14:15.123456'), 
('2018-01-02 14:15:16.123456'), ('2018-01-03 16:17:18.123456')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: insert into test_alter2 values ('2018-01-01 
13:14:15.123456'), ('2018-01-02 14:15:16.123456'), ('2018-01-03 
16:17:18.123456')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@test_alter2
+POSTHOOK: Lineage: test_alter2.ts SCRIPT []
+PREHOOK: query: select * from test_alter2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter2
+#### A masked pattern was here ####
+2018-01-01 13:14:15.123456
+2018-01-02 14:15:16.123456
+2018-01-03 16:17:18.123456
+PREHOOK: query: alter table test_alter2 replace columns (ts char(25))
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter2
+PREHOOK: Output: default@test_alter2
+POSTHOOK: query: alter table test_alter2 replace columns (ts char(25))
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter2
+POSTHOOK: Output: default@test_alter2
+PREHOOK: query: select * from test_alter2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from test_alter2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter2
+#### A masked pattern was here ####
+2018-01-01 13:14:15.12345
+2018-01-02 14:15:16.12345
+2018-01-03 16:17:18.12345
+PREHOOK: query: create table test_alter3 (id1 tinyint, id2 smallint, id3 int, 
id4 bigint) stored as parquet
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test_alter3
+POSTHOOK: query: create table test_alter3 (id1 tinyint, id2 smallint, id3 int, 
id4 bigint) stored as parquet
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test_alter3
+PREHOOK: query: insert into test_alter3 values (10, 20, 30, 40)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@test_alter3
+POSTHOOK: query: insert into test_alter3 values (10, 20, 30, 40)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@test_alter3
+POSTHOOK: Lineage: test_alter3.id1 SCRIPT []
+POSTHOOK: Lineage: test_alter3.id2 SCRIPT []
+POSTHOOK: Lineage: test_alter3.id3 SCRIPT []
+POSTHOOK: Lineage: test_alter3.id4 SCRIPT []
+PREHOOK: query: alter table test_alter3 replace columns (id1 smallint, id2 
int, id3 bigint, id4 decimal(10,4))
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@test_alter3
+PREHOOK: Output: default@test_alter3
+POSTHOOK: query: alter table test_alter3 replace columns (id1 smallint, id2 
int, id3 bigint, id4 decimal(10,4))
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@test_alter3
+POSTHOOK: Output: default@test_alter3
+PREHOOK: query: select id1, id2, id3 from test_alter3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_alter3
+#### A masked pattern was here ####
+POSTHOOK: query: select id1, id2, id3 from test_alter3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_alter3
+#### A masked pattern was here ####
+10     20      30

Reply via email to