http://git-wip-us.apache.org/repos/asf/hive/blob/0fd9069e/ql/src/test/results/clientpositive/tez/schema_evol_text_nonvec_fetchwork_table.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/tez/schema_evol_text_nonvec_fetchwork_table.q.out
 
b/ql/src/test/results/clientpositive/tez/schema_evol_text_nonvec_fetchwork_table.q.out
new file mode 100644
index 0000000..4003c20
--- /dev/null
+++ 
b/ql/src/test/results/clientpositive/tez/schema_evol_text_nonvec_fetchwork_table.q.out
@@ -0,0 +1,297 @@
+PREHOOK: query: -- SORT_QUERY_RESULTS
+--
+-- FILE VARIATION: ORC, Non-Vectorized, MapWork, Table
+--
+--
+-- SECTION VARIATION: ALTER TABLE ADD COLUMNS
+---
+CREATE TABLE table1(a INT, b STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table1
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+--
+-- FILE VARIATION: ORC, Non-Vectorized, MapWork, Table
+--
+--
+-- SECTION VARIATION: ALTER TABLE ADD COLUMNS
+---
+CREATE TABLE table1(a INT, b STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@table1
+PREHOOK: query: insert into table table1 values(1, 'original'),(2, 
'original'), (3, 'original'),(4, 'original')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__1
+PREHOOK: Output: default@table1
+POSTHOOK: query: insert into table table1 values(1, 'original'),(2, 
'original'), (3, 'original'),(4, 'original')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__1
+POSTHOOK: Output: default@table1
+POSTHOOK: Lineage: table1.a EXPRESSION 
[(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: table1.b SIMPLE 
[(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, 
type:string, comment:), ]
+_col0  _col1
+PREHOOK: query: select a,b from table1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table1
+#### A masked pattern was here ####
+POSTHOOK: query: select a,b from table1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table1
+#### A masked pattern was here ####
+a      b
+1      original
+2      original
+3      original
+4      original
+PREHOOK: query: -- ADD COLUMNS
+alter table table1 add columns(c int, d string)
+PREHOOK: type: ALTERTABLE_ADDCOLS
+PREHOOK: Input: default@table1
+PREHOOK: Output: default@table1
+POSTHOOK: query: -- ADD COLUMNS
+alter table table1 add columns(c int, d string)
+POSTHOOK: type: ALTERTABLE_ADDCOLS
+POSTHOOK: Input: default@table1
+POSTHOOK: Output: default@table1
+PREHOOK: query: insert into table table1 values(1, 'new', 10, 'ten'),(2, 
'new', 20, 'twenty'), (3, 'new', 30, 'thirty'),(4, 'new', 40, 'forty')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__2
+PREHOOK: Output: default@table1
+POSTHOOK: query: insert into table table1 values(1, 'new', 10, 'ten'),(2, 
'new', 20, 'twenty'), (3, 'new', 30, 'thirty'),(4, 'new', 40, 'forty')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__2
+POSTHOOK: Output: default@table1
+POSTHOOK: Lineage: table1.a EXPRESSION 
[(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: table1.b SIMPLE 
[(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col2, 
type:string, comment:), ]
+POSTHOOK: Lineage: table1.c EXPRESSION 
[(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col3, 
type:string, comment:), ]
+POSTHOOK: Lineage: table1.d SIMPLE 
[(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col4, 
type:string, comment:), ]
+_col0  _col1   _col2   _col3
+PREHOOK: query: select a,b,c,d from table1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table1
+#### A masked pattern was here ####
+POSTHOOK: query: select a,b,c,d from table1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table1
+#### A masked pattern was here ####
+a      b       c       d
+1      new     10      ten
+1      original        NULL    NULL
+2      new     20      twenty
+2      original        NULL    NULL
+3      new     30      thirty
+3      original        NULL    NULL
+4      new     40      forty
+4      original        NULL    NULL
+PREHOOK: query: -- ADD COLUMNS
+alter table table1 add columns(e string)
+PREHOOK: type: ALTERTABLE_ADDCOLS
+PREHOOK: Input: default@table1
+PREHOOK: Output: default@table1
+POSTHOOK: query: -- ADD COLUMNS
+alter table table1 add columns(e string)
+POSTHOOK: type: ALTERTABLE_ADDCOLS
+POSTHOOK: Input: default@table1
+POSTHOOK: Output: default@table1
+PREHOOK: query: insert into table table1 values(5, 'new', 100, 'hundred', 
'another1'),(6, 'new', 200, 'two hundred', 'another2')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__3
+PREHOOK: Output: default@table1
+POSTHOOK: query: insert into table table1 values(5, 'new', 100, 'hundred', 
'another1'),(6, 'new', 200, 'two hundred', 'another2')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__3
+POSTHOOK: Output: default@table1
+POSTHOOK: Lineage: table1.a EXPRESSION 
[(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: table1.b SIMPLE 
[(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col2, 
type:string, comment:), ]
+POSTHOOK: Lineage: table1.c EXPRESSION 
[(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col3, 
type:string, comment:), ]
+POSTHOOK: Lineage: table1.d SIMPLE 
[(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col4, 
type:string, comment:), ]
+POSTHOOK: Lineage: table1.e SIMPLE 
[(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col5, 
type:string, comment:), ]
+_col0  _col1   _col2   _col3   _col4
+PREHOOK: query: select a,b,c,d,e from table1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table1
+#### A masked pattern was here ####
+POSTHOOK: query: select a,b,c,d,e from table1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table1
+#### A masked pattern was here ####
+a      b       c       d       e
+1      new     10      ten     NULL
+1      original        NULL    NULL    NULL
+2      new     20      twenty  NULL
+2      original        NULL    NULL    NULL
+3      new     30      thirty  NULL
+3      original        NULL    NULL    NULL
+4      new     40      forty   NULL
+4      original        NULL    NULL    NULL
+5      new     100     hundred another1
+6      new     200     two hundred     another2
+PREHOOK: query: --
+-- SECTION VARIATION: ALTER TABLE CHANGE COLUMN
+-- smallint = (2-byte signed integer, from -32,768 to 32,767)
+--
+CREATE TABLE table3(a smallint, b STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table3
+POSTHOOK: query: --
+-- SECTION VARIATION: ALTER TABLE CHANGE COLUMN
+-- smallint = (2-byte signed integer, from -32,768 to 32,767)
+--
+CREATE TABLE table3(a smallint, b STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@table3
+PREHOOK: query: insert into table table3 values(1000, 'original'),(6737, 
'original'), ('3', 'original'),('4', 'original')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__4
+PREHOOK: Output: default@table3
+POSTHOOK: query: insert into table table3 values(1000, 'original'),(6737, 
'original'), ('3', 'original'),('4', 'original')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__4
+POSTHOOK: Output: default@table3
+POSTHOOK: Lineage: table3.a EXPRESSION 
[(values__tmp__table__4)values__tmp__table__4.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: table3.b SIMPLE 
[(values__tmp__table__4)values__tmp__table__4.FieldSchema(name:tmp_values_col2, 
type:string, comment:), ]
+_col0  _col1
+PREHOOK: query: select a,b from table3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table3
+#### A masked pattern was here ####
+POSTHOOK: query: select a,b from table3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table3
+#### A masked pattern was here ####
+a      b
+1000   original
+3      original
+4      original
+6737   original
+PREHOOK: query: -- ADD COLUMNS ... RESTRICT
+alter table table3 change column a a int
+PREHOOK: type: ALTERTABLE_RENAMECOL
+PREHOOK: Input: default@table3
+PREHOOK: Output: default@table3
+POSTHOOK: query: -- ADD COLUMNS ... RESTRICT
+alter table table3 change column a a int
+POSTHOOK: type: ALTERTABLE_RENAMECOL
+POSTHOOK: Input: default@table3
+POSTHOOK: Output: default@table3
+PREHOOK: query: insert into table table3 values(72909, 'new'),(200, 'new'), 
(32768, 'new'),(40000, 'new')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__5
+PREHOOK: Output: default@table3
+POSTHOOK: query: insert into table table3 values(72909, 'new'),(200, 'new'), 
(32768, 'new'),(40000, 'new')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__5
+POSTHOOK: Output: default@table3
+POSTHOOK: Lineage: table3.a EXPRESSION 
[(values__tmp__table__5)values__tmp__table__5.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: table3.b SIMPLE 
[(values__tmp__table__5)values__tmp__table__5.FieldSchema(name:tmp_values_col2, 
type:string, comment:), ]
+_col0  _col1
+PREHOOK: query: select a,b from table3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table3
+#### A masked pattern was here ####
+POSTHOOK: query: select a,b from table3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table3
+#### A masked pattern was here ####
+a      b
+1000   original
+200    new
+3      original
+32768  new
+4      original
+40000  new
+6737   original
+72909  new
+PREHOOK: query: -- ADD COLUMNS ... RESTRICT
+alter table table3 add columns(e string)
+PREHOOK: type: ALTERTABLE_ADDCOLS
+PREHOOK: Input: default@table3
+PREHOOK: Output: default@table3
+POSTHOOK: query: -- ADD COLUMNS ... RESTRICT
+alter table table3 add columns(e string)
+POSTHOOK: type: ALTERTABLE_ADDCOLS
+POSTHOOK: Input: default@table3
+POSTHOOK: Output: default@table3
+PREHOOK: query: insert into table table3 values(5000, 'new', 
'another5'),(90000, 'new', 'another6')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__6
+PREHOOK: Output: default@table3
+POSTHOOK: query: insert into table table3 values(5000, 'new', 
'another5'),(90000, 'new', 'another6')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__6
+POSTHOOK: Output: default@table3
+POSTHOOK: Lineage: table3.a EXPRESSION 
[(values__tmp__table__6)values__tmp__table__6.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: table3.b SIMPLE 
[(values__tmp__table__6)values__tmp__table__6.FieldSchema(name:tmp_values_col2, 
type:string, comment:), ]
+POSTHOOK: Lineage: table3.e SIMPLE 
[(values__tmp__table__6)values__tmp__table__6.FieldSchema(name:tmp_values_col3, 
type:string, comment:), ]
+_col0  _col1   _col2
+PREHOOK: query: select a,b from table3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table3
+#### A masked pattern was here ####
+POSTHOOK: query: select a,b from table3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table3
+#### A masked pattern was here ####
+a      b
+1000   original
+200    new
+3      original
+32768  new
+4      original
+40000  new
+5000   new
+6737   original
+72909  new
+90000  new
+PREHOOK: query: -- ADD COLUMNS ... RESTRICT
+alter table table3 change column a a int
+PREHOOK: type: ALTERTABLE_RENAMECOL
+PREHOOK: Input: default@table3
+PREHOOK: Output: default@table3
+POSTHOOK: query: -- ADD COLUMNS ... RESTRICT
+alter table table3 change column a a int
+POSTHOOK: type: ALTERTABLE_RENAMECOL
+POSTHOOK: Input: default@table3
+POSTHOOK: Output: default@table3
+PREHOOK: query: select a,b from table3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table3
+#### A masked pattern was here ####
+POSTHOOK: query: select a,b from table3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table3
+#### A masked pattern was here ####
+a      b
+1000   original
+200    new
+3      original
+32768  new
+4      original
+40000  new
+5000   new
+6737   original
+72909  new
+90000  new
+PREHOOK: query: DROP TABLE table1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@table1
+PREHOOK: Output: default@table1
+POSTHOOK: query: DROP TABLE table1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@table1
+POSTHOOK: Output: default@table1
+PREHOOK: query: DROP TABLE table2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE table3
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@table3
+PREHOOK: Output: default@table3
+POSTHOOK: query: DROP TABLE table3
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@table3
+POSTHOOK: Output: default@table3

http://git-wip-us.apache.org/repos/asf/hive/blob/0fd9069e/ql/src/test/results/clientpositive/tez/schema_evol_text_nonvec_mapwork_part.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/tez/schema_evol_text_nonvec_mapwork_part.q.out
 
b/ql/src/test/results/clientpositive/tez/schema_evol_text_nonvec_mapwork_part.q.out
new file mode 100644
index 0000000..44f5822
--- /dev/null
+++ 
b/ql/src/test/results/clientpositive/tez/schema_evol_text_nonvec_mapwork_part.q.out
@@ -0,0 +1,642 @@
+PREHOOK: query: -- SORT_QUERY_RESULTS
+--
+-- FILE VARIATION: TEXT, Non-Vectorized, MapWork, Partitioned
+--
+--
+-- SECTION VARIATION: ALTER TABLE ADD COLUMNS ... STATIC INSERT
+---
+CREATE TABLE partitioned1(a INT, b STRING) PARTITIONED BY(part INT) STORED AS 
TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@partitioned1
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+--
+-- FILE VARIATION: TEXT, Non-Vectorized, MapWork, Partitioned
+--
+--
+-- SECTION VARIATION: ALTER TABLE ADD COLUMNS ... STATIC INSERT
+---
+CREATE TABLE partitioned1(a INT, b STRING) PARTITIONED BY(part INT) STORED AS 
TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@partitioned1
+PREHOOK: query: insert into table partitioned1 partition(part=1) values(1, 
'original'),(2, 'original'), (3, 'original'),(4, 'original')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__1
+PREHOOK: Output: default@partitioned1@part=1
+POSTHOOK: query: insert into table partitioned1 partition(part=1) values(1, 
'original'),(2, 'original'), (3, 'original'),(4, 'original')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__1
+POSTHOOK: Output: default@partitioned1@part=1
+POSTHOOK: Lineage: partitioned1 PARTITION(part=1).a EXPRESSION 
[(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: partitioned1 PARTITION(part=1).b SIMPLE 
[(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, 
type:string, comment:), ]
+_col0  _col1
+PREHOOK: query: -- Table-Non-Cascade ADD COLUMNS ...
+alter table partitioned1 add columns(c int, d string)
+PREHOOK: type: ALTERTABLE_ADDCOLS
+PREHOOK: Input: default@partitioned1
+PREHOOK: Output: default@partitioned1
+POSTHOOK: query: -- Table-Non-Cascade ADD COLUMNS ...
+alter table partitioned1 add columns(c int, d string)
+POSTHOOK: type: ALTERTABLE_ADDCOLS
+POSTHOOK: Input: default@partitioned1
+POSTHOOK: Output: default@partitioned1
+PREHOOK: query: insert into table partitioned1 partition(part=2) values(1, 
'new', 10, 'ten'),(2, 'new', 20, 'twenty'), (3, 'new', 30, 'thirty'),(4, 'new', 
40, 'forty')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__2
+PREHOOK: Output: default@partitioned1@part=2
+POSTHOOK: query: insert into table partitioned1 partition(part=2) values(1, 
'new', 10, 'ten'),(2, 'new', 20, 'twenty'), (3, 'new', 30, 'thirty'),(4, 'new', 
40, 'forty')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__2
+POSTHOOK: Output: default@partitioned1@part=2
+POSTHOOK: Lineage: partitioned1 PARTITION(part=2).a EXPRESSION 
[(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: partitioned1 PARTITION(part=2).b SIMPLE 
[(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col2, 
type:string, comment:), ]
+POSTHOOK: Lineage: partitioned1 PARTITION(part=2).c EXPRESSION 
[(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col3, 
type:string, comment:), ]
+POSTHOOK: Lineage: partitioned1 PARTITION(part=2).d SIMPLE 
[(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col4, 
type:string, comment:), ]
+_col0  _col1   _col2   _col3
+PREHOOK: query: insert into table partitioned1 partition(part=1) values(5, 
'new', 100, 'hundred'),(6, 'new', 200, 'two hundred')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__3
+PREHOOK: Output: default@partitioned1@part=1
+POSTHOOK: query: insert into table partitioned1 partition(part=1) values(5, 
'new', 100, 'hundred'),(6, 'new', 200, 'two hundred')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__3
+POSTHOOK: Output: default@partitioned1@part=1
+POSTHOOK: Lineage: partitioned1 PARTITION(part=1).a EXPRESSION 
[(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: partitioned1 PARTITION(part=1).b SIMPLE 
[(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col2, 
type:string, comment:), ]
+POSTHOOK: Lineage: partitioned1 PARTITION(part=1).c EXPRESSION 
[(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col3, 
type:string, comment:), ]
+POSTHOOK: Lineage: partitioned1 PARTITION(part=1).d SIMPLE 
[(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col4, 
type:string, comment:), ]
+_col0  _col1   _col2   _col3
+PREHOOK: query: -- SELECT permutation columns to make sure NULL defaulting 
works right
+select part,a,b from partitioned1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@partitioned1
+PREHOOK: Input: default@partitioned1@part=1
+PREHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+POSTHOOK: query: -- SELECT permutation columns to make sure NULL defaulting 
works right
+select part,a,b from partitioned1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@partitioned1
+POSTHOOK: Input: default@partitioned1@part=1
+POSTHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+part   a       b
+1      1       original
+1      2       original
+1      3       original
+1      4       original
+1      5       new
+1      6       new
+2      1       new
+2      2       new
+2      3       new
+2      4       new
+PREHOOK: query: select part,a,b,c from partitioned1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@partitioned1
+PREHOOK: Input: default@partitioned1@part=1
+PREHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+POSTHOOK: query: select part,a,b,c from partitioned1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@partitioned1
+POSTHOOK: Input: default@partitioned1@part=1
+POSTHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+part   a       b       c
+1      1       original        NULL
+1      2       original        NULL
+1      3       original        NULL
+1      4       original        NULL
+1      5       new     NULL
+1      6       new     NULL
+2      1       new     10
+2      2       new     20
+2      3       new     30
+2      4       new     40
+PREHOOK: query: select part,a,b,c,d from partitioned1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@partitioned1
+PREHOOK: Input: default@partitioned1@part=1
+PREHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+POSTHOOK: query: select part,a,b,c,d from partitioned1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@partitioned1
+POSTHOOK: Input: default@partitioned1@part=1
+POSTHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+part   a       b       c       d
+1      1       original        NULL    NULL
+1      2       original        NULL    NULL
+1      3       original        NULL    NULL
+1      4       original        NULL    NULL
+1      5       new     NULL    NULL
+1      6       new     NULL    NULL
+2      1       new     10      ten
+2      2       new     20      twenty
+2      3       new     30      thirty
+2      4       new     40      forty
+PREHOOK: query: select part,a,c,d from partitioned1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@partitioned1
+PREHOOK: Input: default@partitioned1@part=1
+PREHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+POSTHOOK: query: select part,a,c,d from partitioned1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@partitioned1
+POSTHOOK: Input: default@partitioned1@part=1
+POSTHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+part   a       c       d
+1      1       NULL    NULL
+1      2       NULL    NULL
+1      3       NULL    NULL
+1      4       NULL    NULL
+1      5       NULL    NULL
+1      6       NULL    NULL
+2      1       10      ten
+2      2       20      twenty
+2      3       30      thirty
+2      4       40      forty
+PREHOOK: query: select part,a,d from partitioned1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@partitioned1
+PREHOOK: Input: default@partitioned1@part=1
+PREHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+POSTHOOK: query: select part,a,d from partitioned1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@partitioned1
+POSTHOOK: Input: default@partitioned1@part=1
+POSTHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+part   a       d
+1      1       NULL
+1      2       NULL
+1      3       NULL
+1      4       NULL
+1      5       NULL
+1      6       NULL
+2      1       ten
+2      2       twenty
+2      3       thirty
+2      4       forty
+PREHOOK: query: select part,c from partitioned1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@partitioned1
+PREHOOK: Input: default@partitioned1@part=1
+PREHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+POSTHOOK: query: select part,c from partitioned1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@partitioned1
+POSTHOOK: Input: default@partitioned1@part=1
+POSTHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+part   c
+1      NULL
+1      NULL
+1      NULL
+1      NULL
+1      NULL
+1      NULL
+2      10
+2      20
+2      30
+2      40
+PREHOOK: query: select part,d from partitioned1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@partitioned1
+PREHOOK: Input: default@partitioned1@part=1
+PREHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+POSTHOOK: query: select part,d from partitioned1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@partitioned1
+POSTHOOK: Input: default@partitioned1@part=1
+POSTHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+part   d
+1      NULL
+1      NULL
+1      NULL
+1      NULL
+1      NULL
+1      NULL
+2      forty
+2      ten
+2      thirty
+2      twenty
+PREHOOK: query: --
+-- SECTION VARIATION: ALTER TABLE CHANGE COLUMN ... STATIC INSERT
+-- smallint = (2-byte signed integer, from -32,768 to 32,767)
+--
+CREATE TABLE partitioned2(a smallint, b STRING) PARTITIONED BY(part INT) 
STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@partitioned2
+POSTHOOK: query: --
+-- SECTION VARIATION: ALTER TABLE CHANGE COLUMN ... STATIC INSERT
+-- smallint = (2-byte signed integer, from -32,768 to 32,767)
+--
+CREATE TABLE partitioned2(a smallint, b STRING) PARTITIONED BY(part INT) 
STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@partitioned2
+PREHOOK: query: insert into table partitioned2 partition(part=1) values(1000, 
'original'),(6737, 'original'), ('3', 'original'),('4', 'original')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__4
+PREHOOK: Output: default@partitioned2@part=1
+POSTHOOK: query: insert into table partitioned2 partition(part=1) values(1000, 
'original'),(6737, 'original'), ('3', 'original'),('4', 'original')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__4
+POSTHOOK: Output: default@partitioned2@part=1
+POSTHOOK: Lineage: partitioned2 PARTITION(part=1).a EXPRESSION 
[(values__tmp__table__4)values__tmp__table__4.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: partitioned2 PARTITION(part=1).b SIMPLE 
[(values__tmp__table__4)values__tmp__table__4.FieldSchema(name:tmp_values_col2, 
type:string, comment:), ]
+_col0  _col1
+PREHOOK: query: -- Table-Non-Cascade CHANGE COLUMNS ...
+alter table partitioned2 change column a a int
+PREHOOK: type: ALTERTABLE_RENAMECOL
+PREHOOK: Input: default@partitioned2
+PREHOOK: Output: default@partitioned2
+POSTHOOK: query: -- Table-Non-Cascade CHANGE COLUMNS ...
+alter table partitioned2 change column a a int
+POSTHOOK: type: ALTERTABLE_RENAMECOL
+POSTHOOK: Input: default@partitioned2
+POSTHOOK: Output: default@partitioned2
+PREHOOK: query: insert into table partitioned2 partition(part=2) values(72909, 
'new'),(200, 'new'), (32768, 'new'),(40000, 'new')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__5
+PREHOOK: Output: default@partitioned2@part=2
+POSTHOOK: query: insert into table partitioned2 partition(part=2) 
values(72909, 'new'),(200, 'new'), (32768, 'new'),(40000, 'new')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__5
+POSTHOOK: Output: default@partitioned2@part=2
+POSTHOOK: Lineage: partitioned2 PARTITION(part=2).a EXPRESSION 
[(values__tmp__table__5)values__tmp__table__5.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: partitioned2 PARTITION(part=2).b SIMPLE 
[(values__tmp__table__5)values__tmp__table__5.FieldSchema(name:tmp_values_col2, 
type:string, comment:), ]
+_col0  _col1
+PREHOOK: query: insert into table partitioned2 partition(part=1) values(5000, 
'new'),(90000, 'new')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__6
+PREHOOK: Output: default@partitioned2@part=1
+POSTHOOK: query: insert into table partitioned2 partition(part=1) values(5000, 
'new'),(90000, 'new')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__6
+POSTHOOK: Output: default@partitioned2@part=1
+POSTHOOK: Lineage: partitioned2 PARTITION(part=1).a EXPRESSION 
[(values__tmp__table__6)values__tmp__table__6.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: partitioned2 PARTITION(part=1).b SIMPLE 
[(values__tmp__table__6)values__tmp__table__6.FieldSchema(name:tmp_values_col2, 
type:string, comment:), ]
+_col0  _col1
+PREHOOK: query: select part,a,b from partitioned2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@partitioned2
+PREHOOK: Input: default@partitioned2@part=1
+PREHOOK: Input: default@partitioned2@part=2
+#### A masked pattern was here ####
+POSTHOOK: query: select part,a,b from partitioned2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@partitioned2
+POSTHOOK: Input: default@partitioned2@part=1
+POSTHOOK: Input: default@partitioned2@part=2
+#### A masked pattern was here ####
+part   a       b
+1      1000    original
+1      3       original
+1      4       original
+1      5000    new
+1      6737    original
+1      NULL    new
+2      200     new
+2      32768   new
+2      40000   new
+2      72909   new
+PREHOOK: query: --
+--
+-- SECTION VARIATION: ALTER TABLE ADD COLUMNS ... DYNAMIC INSERT
+---
+CREATE TABLE partitioned3(a INT, b STRING) PARTITIONED BY(part INT) STORED AS 
TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@partitioned3
+POSTHOOK: query: --
+--
+-- SECTION VARIATION: ALTER TABLE ADD COLUMNS ... DYNAMIC INSERT
+---
+CREATE TABLE partitioned3(a INT, b STRING) PARTITIONED BY(part INT) STORED AS 
TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@partitioned3
+PREHOOK: query: insert into table partitioned3 partition(part=1) values(1, 
'original'),(2, 'original'), (3, 'original'),(4, 'original')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__7
+PREHOOK: Output: default@partitioned3@part=1
+POSTHOOK: query: insert into table partitioned3 partition(part=1) values(1, 
'original'),(2, 'original'), (3, 'original'),(4, 'original')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__7
+POSTHOOK: Output: default@partitioned3@part=1
+POSTHOOK: Lineage: partitioned3 PARTITION(part=1).a EXPRESSION 
[(values__tmp__table__7)values__tmp__table__7.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: partitioned3 PARTITION(part=1).b SIMPLE 
[(values__tmp__table__7)values__tmp__table__7.FieldSchema(name:tmp_values_col2, 
type:string, comment:), ]
+_col0  _col1
+PREHOOK: query: -- Table-Non-Cascade ADD COLUMNS ...
+alter table partitioned3 add columns(c int, d string)
+PREHOOK: type: ALTERTABLE_ADDCOLS
+PREHOOK: Input: default@partitioned3
+PREHOOK: Output: default@partitioned3
+POSTHOOK: query: -- Table-Non-Cascade ADD COLUMNS ...
+alter table partitioned3 add columns(c int, d string)
+POSTHOOK: type: ALTERTABLE_ADDCOLS
+POSTHOOK: Input: default@partitioned3
+POSTHOOK: Output: default@partitioned3
+PREHOOK: query: insert into table partitioned3 partition(part) values(1, 
'new', 10, 'ten', 2),(2, 'new', 20, 'twenty', 2), (3, 'new', 30, 'thirty', 
2),(4, 'new', 40, 'forty', 2),
+    (5, 'new', 100, 'hundred', 1),(6, 'new', 200, 'two hundred', 1)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__8
+PREHOOK: Output: default@partitioned3
+POSTHOOK: query: insert into table partitioned3 partition(part) values(1, 
'new', 10, 'ten', 2),(2, 'new', 20, 'twenty', 2), (3, 'new', 30, 'thirty', 
2),(4, 'new', 40, 'forty', 2),
+    (5, 'new', 100, 'hundred', 1),(6, 'new', 200, 'two hundred', 1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__8
+POSTHOOK: Output: default@partitioned3@part=1
+POSTHOOK: Output: default@partitioned3@part=2
+POSTHOOK: Lineage: partitioned3 PARTITION(part=1).a EXPRESSION 
[(values__tmp__table__8)values__tmp__table__8.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: partitioned3 PARTITION(part=1).b SIMPLE 
[(values__tmp__table__8)values__tmp__table__8.FieldSchema(name:tmp_values_col2, 
type:string, comment:), ]
+POSTHOOK: Lineage: partitioned3 PARTITION(part=1).c EXPRESSION 
[(values__tmp__table__8)values__tmp__table__8.FieldSchema(name:tmp_values_col3, 
type:string, comment:), ]
+POSTHOOK: Lineage: partitioned3 PARTITION(part=1).d SIMPLE 
[(values__tmp__table__8)values__tmp__table__8.FieldSchema(name:tmp_values_col4, 
type:string, comment:), ]
+POSTHOOK: Lineage: partitioned3 PARTITION(part=2).a EXPRESSION 
[(values__tmp__table__8)values__tmp__table__8.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: partitioned3 PARTITION(part=2).b SIMPLE 
[(values__tmp__table__8)values__tmp__table__8.FieldSchema(name:tmp_values_col2, 
type:string, comment:), ]
+POSTHOOK: Lineage: partitioned3 PARTITION(part=2).c EXPRESSION 
[(values__tmp__table__8)values__tmp__table__8.FieldSchema(name:tmp_values_col3, 
type:string, comment:), ]
+POSTHOOK: Lineage: partitioned3 PARTITION(part=2).d SIMPLE 
[(values__tmp__table__8)values__tmp__table__8.FieldSchema(name:tmp_values_col4, 
type:string, comment:), ]
+_col0  _col1   _col2   _col3   _col4
+PREHOOK: query: -- SELECT permutation columns to make sure NULL defaulting 
works right
+select part,a,b from partitioned1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@partitioned1
+PREHOOK: Input: default@partitioned1@part=1
+PREHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+POSTHOOK: query: -- SELECT permutation columns to make sure NULL defaulting 
works right
+select part,a,b from partitioned1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@partitioned1
+POSTHOOK: Input: default@partitioned1@part=1
+POSTHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+part   a       b
+1      1       original
+1      2       original
+1      3       original
+1      4       original
+1      5       new
+1      6       new
+2      1       new
+2      2       new
+2      3       new
+2      4       new
+PREHOOK: query: select part,a,b,c from partitioned1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@partitioned1
+PREHOOK: Input: default@partitioned1@part=1
+PREHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+POSTHOOK: query: select part,a,b,c from partitioned1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@partitioned1
+POSTHOOK: Input: default@partitioned1@part=1
+POSTHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+part   a       b       c
+1      1       original        NULL
+1      2       original        NULL
+1      3       original        NULL
+1      4       original        NULL
+1      5       new     NULL
+1      6       new     NULL
+2      1       new     10
+2      2       new     20
+2      3       new     30
+2      4       new     40
+PREHOOK: query: select part,a,b,c,d from partitioned1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@partitioned1
+PREHOOK: Input: default@partitioned1@part=1
+PREHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+POSTHOOK: query: select part,a,b,c,d from partitioned1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@partitioned1
+POSTHOOK: Input: default@partitioned1@part=1
+POSTHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+part   a       b       c       d
+1      1       original        NULL    NULL
+1      2       original        NULL    NULL
+1      3       original        NULL    NULL
+1      4       original        NULL    NULL
+1      5       new     NULL    NULL
+1      6       new     NULL    NULL
+2      1       new     10      ten
+2      2       new     20      twenty
+2      3       new     30      thirty
+2      4       new     40      forty
+PREHOOK: query: select part,a,c,d from partitioned1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@partitioned1
+PREHOOK: Input: default@partitioned1@part=1
+PREHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+POSTHOOK: query: select part,a,c,d from partitioned1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@partitioned1
+POSTHOOK: Input: default@partitioned1@part=1
+POSTHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+part   a       c       d
+1      1       NULL    NULL
+1      2       NULL    NULL
+1      3       NULL    NULL
+1      4       NULL    NULL
+1      5       NULL    NULL
+1      6       NULL    NULL
+2      1       10      ten
+2      2       20      twenty
+2      3       30      thirty
+2      4       40      forty
+PREHOOK: query: select part,a,d from partitioned1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@partitioned1
+PREHOOK: Input: default@partitioned1@part=1
+PREHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+POSTHOOK: query: select part,a,d from partitioned1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@partitioned1
+POSTHOOK: Input: default@partitioned1@part=1
+POSTHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+part   a       d
+1      1       NULL
+1      2       NULL
+1      3       NULL
+1      4       NULL
+1      5       NULL
+1      6       NULL
+2      1       ten
+2      2       twenty
+2      3       thirty
+2      4       forty
+PREHOOK: query: select part,c from partitioned1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@partitioned1
+PREHOOK: Input: default@partitioned1@part=1
+PREHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+POSTHOOK: query: select part,c from partitioned1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@partitioned1
+POSTHOOK: Input: default@partitioned1@part=1
+POSTHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+part   c
+1      NULL
+1      NULL
+1      NULL
+1      NULL
+1      NULL
+1      NULL
+2      10
+2      20
+2      30
+2      40
+PREHOOK: query: select part,d from partitioned1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@partitioned1
+PREHOOK: Input: default@partitioned1@part=1
+PREHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+POSTHOOK: query: select part,d from partitioned1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@partitioned1
+POSTHOOK: Input: default@partitioned1@part=1
+POSTHOOK: Input: default@partitioned1@part=2
+#### A masked pattern was here ####
+part   d
+1      NULL
+1      NULL
+1      NULL
+1      NULL
+1      NULL
+1      NULL
+2      forty
+2      ten
+2      thirty
+2      twenty
+PREHOOK: query: --
+-- SECTION VARIATION: ALTER TABLE CHANGE COLUMN ... DYNAMIC INSERT
+-- smallint = (2-byte signed integer, from -32,768 to 32,767)
+--
+CREATE TABLE partitioned4(a smallint, b STRING) PARTITIONED BY(part INT) 
STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@partitioned4
+POSTHOOK: query: --
+-- SECTION VARIATION: ALTER TABLE CHANGE COLUMN ... DYNAMIC INSERT
+-- smallint = (2-byte signed integer, from -32,768 to 32,767)
+--
+CREATE TABLE partitioned4(a smallint, b STRING) PARTITIONED BY(part INT) 
STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@partitioned4
+PREHOOK: query: insert into table partitioned4 partition(part=1) values(1000, 
'original'),(6737, 'original'), ('3', 'original'),('4', 'original')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__9
+PREHOOK: Output: default@partitioned4@part=1
+POSTHOOK: query: insert into table partitioned4 partition(part=1) values(1000, 
'original'),(6737, 'original'), ('3', 'original'),('4', 'original')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__9
+POSTHOOK: Output: default@partitioned4@part=1
+POSTHOOK: Lineage: partitioned4 PARTITION(part=1).a EXPRESSION 
[(values__tmp__table__9)values__tmp__table__9.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: partitioned4 PARTITION(part=1).b SIMPLE 
[(values__tmp__table__9)values__tmp__table__9.FieldSchema(name:tmp_values_col2, 
type:string, comment:), ]
+_col0  _col1
+PREHOOK: query: -- Table-Non-Cascade CHANGE COLUMNS ...
+alter table partitioned4 change column a a int
+PREHOOK: type: ALTERTABLE_RENAMECOL
+PREHOOK: Input: default@partitioned4
+PREHOOK: Output: default@partitioned4
+POSTHOOK: query: -- Table-Non-Cascade CHANGE COLUMNS ...
+alter table partitioned4 change column a a int
+POSTHOOK: type: ALTERTABLE_RENAMECOL
+POSTHOOK: Input: default@partitioned4
+POSTHOOK: Output: default@partitioned4
+PREHOOK: query: insert into table partitioned4 partition(part) values(72909, 
'new', 2),(200, 'new', 2), (32768, 'new', 2),(40000, 'new', 2),
+    (5000, 'new', 1),(90000, 'new', 1)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__10
+PREHOOK: Output: default@partitioned4
+POSTHOOK: query: insert into table partitioned4 partition(part) values(72909, 
'new', 2),(200, 'new', 2), (32768, 'new', 2),(40000, 'new', 2),
+    (5000, 'new', 1),(90000, 'new', 1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__10
+POSTHOOK: Output: default@partitioned4@part=1
+POSTHOOK: Output: default@partitioned4@part=2
+POSTHOOK: Lineage: partitioned4 PARTITION(part=1).a EXPRESSION 
[(values__tmp__table__10)values__tmp__table__10.FieldSchema(name:tmp_values_col1,
 type:string, comment:), ]
+POSTHOOK: Lineage: partitioned4 PARTITION(part=1).b SIMPLE 
[(values__tmp__table__10)values__tmp__table__10.FieldSchema(name:tmp_values_col2,
 type:string, comment:), ]
+POSTHOOK: Lineage: partitioned4 PARTITION(part=2).a EXPRESSION 
[(values__tmp__table__10)values__tmp__table__10.FieldSchema(name:tmp_values_col1,
 type:string, comment:), ]
+POSTHOOK: Lineage: partitioned4 PARTITION(part=2).b SIMPLE 
[(values__tmp__table__10)values__tmp__table__10.FieldSchema(name:tmp_values_col2,
 type:string, comment:), ]
+_col0  _col1   _col2
+PREHOOK: query: select part,a,b from partitioned4
+PREHOOK: type: QUERY
+PREHOOK: Input: default@partitioned4
+PREHOOK: Input: default@partitioned4@part=1
+PREHOOK: Input: default@partitioned4@part=2
+#### A masked pattern was here ####
+POSTHOOK: query: select part,a,b from partitioned4
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@partitioned4
+POSTHOOK: Input: default@partitioned4@part=1
+POSTHOOK: Input: default@partitioned4@part=2
+#### A masked pattern was here ####
+part   a       b
+1      1000    original
+1      3       original
+1      4       original
+1      5000    new
+1      6737    original
+1      NULL    new
+2      200     new
+2      32768   new
+2      40000   new
+2      72909   new
+PREHOOK: query: DROP TABLE partitioned1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@partitioned1
+PREHOOK: Output: default@partitioned1
+POSTHOOK: query: DROP TABLE partitioned1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@partitioned1
+POSTHOOK: Output: default@partitioned1
+PREHOOK: query: DROP TABLE partitioned2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@partitioned2
+PREHOOK: Output: default@partitioned2
+POSTHOOK: query: DROP TABLE partitioned2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@partitioned2
+POSTHOOK: Output: default@partitioned2
+PREHOOK: query: DROP TABLE partitioned3
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@partitioned3
+PREHOOK: Output: default@partitioned3
+POSTHOOK: query: DROP TABLE partitioned3
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@partitioned3
+POSTHOOK: Output: default@partitioned3
+PREHOOK: query: DROP TABLE partitioned4
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@partitioned4
+PREHOOK: Output: default@partitioned4
+POSTHOOK: query: DROP TABLE partitioned4
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@partitioned4
+POSTHOOK: Output: default@partitioned4

http://git-wip-us.apache.org/repos/asf/hive/blob/0fd9069e/ql/src/test/results/clientpositive/tez/schema_evol_text_nonvec_mapwork_table.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/tez/schema_evol_text_nonvec_mapwork_table.q.out
 
b/ql/src/test/results/clientpositive/tez/schema_evol_text_nonvec_mapwork_table.q.out
new file mode 100644
index 0000000..4003c20
--- /dev/null
+++ 
b/ql/src/test/results/clientpositive/tez/schema_evol_text_nonvec_mapwork_table.q.out
@@ -0,0 +1,297 @@
+PREHOOK: query: -- SORT_QUERY_RESULTS
+--
+-- FILE VARIATION: ORC, Non-Vectorized, MapWork, Table
+--
+--
+-- SECTION VARIATION: ALTER TABLE ADD COLUMNS
+---
+CREATE TABLE table1(a INT, b STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table1
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+--
+-- FILE VARIATION: ORC, Non-Vectorized, MapWork, Table
+--
+--
+-- SECTION VARIATION: ALTER TABLE ADD COLUMNS
+---
+CREATE TABLE table1(a INT, b STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@table1
+PREHOOK: query: insert into table table1 values(1, 'original'),(2, 
'original'), (3, 'original'),(4, 'original')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__1
+PREHOOK: Output: default@table1
+POSTHOOK: query: insert into table table1 values(1, 'original'),(2, 
'original'), (3, 'original'),(4, 'original')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__1
+POSTHOOK: Output: default@table1
+POSTHOOK: Lineage: table1.a EXPRESSION 
[(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: table1.b SIMPLE 
[(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, 
type:string, comment:), ]
+_col0  _col1
+PREHOOK: query: select a,b from table1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table1
+#### A masked pattern was here ####
+POSTHOOK: query: select a,b from table1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table1
+#### A masked pattern was here ####
+a      b
+1      original
+2      original
+3      original
+4      original
+PREHOOK: query: -- ADD COLUMNS
+alter table table1 add columns(c int, d string)
+PREHOOK: type: ALTERTABLE_ADDCOLS
+PREHOOK: Input: default@table1
+PREHOOK: Output: default@table1
+POSTHOOK: query: -- ADD COLUMNS
+alter table table1 add columns(c int, d string)
+POSTHOOK: type: ALTERTABLE_ADDCOLS
+POSTHOOK: Input: default@table1
+POSTHOOK: Output: default@table1
+PREHOOK: query: insert into table table1 values(1, 'new', 10, 'ten'),(2, 
'new', 20, 'twenty'), (3, 'new', 30, 'thirty'),(4, 'new', 40, 'forty')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__2
+PREHOOK: Output: default@table1
+POSTHOOK: query: insert into table table1 values(1, 'new', 10, 'ten'),(2, 
'new', 20, 'twenty'), (3, 'new', 30, 'thirty'),(4, 'new', 40, 'forty')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__2
+POSTHOOK: Output: default@table1
+POSTHOOK: Lineage: table1.a EXPRESSION 
[(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: table1.b SIMPLE 
[(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col2, 
type:string, comment:), ]
+POSTHOOK: Lineage: table1.c EXPRESSION 
[(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col3, 
type:string, comment:), ]
+POSTHOOK: Lineage: table1.d SIMPLE 
[(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col4, 
type:string, comment:), ]
+_col0  _col1   _col2   _col3
+PREHOOK: query: select a,b,c,d from table1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table1
+#### A masked pattern was here ####
+POSTHOOK: query: select a,b,c,d from table1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table1
+#### A masked pattern was here ####
+a      b       c       d
+1      new     10      ten
+1      original        NULL    NULL
+2      new     20      twenty
+2      original        NULL    NULL
+3      new     30      thirty
+3      original        NULL    NULL
+4      new     40      forty
+4      original        NULL    NULL
+PREHOOK: query: -- ADD COLUMNS
+alter table table1 add columns(e string)
+PREHOOK: type: ALTERTABLE_ADDCOLS
+PREHOOK: Input: default@table1
+PREHOOK: Output: default@table1
+POSTHOOK: query: -- ADD COLUMNS
+alter table table1 add columns(e string)
+POSTHOOK: type: ALTERTABLE_ADDCOLS
+POSTHOOK: Input: default@table1
+POSTHOOK: Output: default@table1
+PREHOOK: query: insert into table table1 values(5, 'new', 100, 'hundred', 
'another1'),(6, 'new', 200, 'two hundred', 'another2')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__3
+PREHOOK: Output: default@table1
+POSTHOOK: query: insert into table table1 values(5, 'new', 100, 'hundred', 
'another1'),(6, 'new', 200, 'two hundred', 'another2')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__3
+POSTHOOK: Output: default@table1
+POSTHOOK: Lineage: table1.a EXPRESSION 
[(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: table1.b SIMPLE 
[(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col2, 
type:string, comment:), ]
+POSTHOOK: Lineage: table1.c EXPRESSION 
[(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col3, 
type:string, comment:), ]
+POSTHOOK: Lineage: table1.d SIMPLE 
[(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col4, 
type:string, comment:), ]
+POSTHOOK: Lineage: table1.e SIMPLE 
[(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col5, 
type:string, comment:), ]
+_col0  _col1   _col2   _col3   _col4
+PREHOOK: query: select a,b,c,d,e from table1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table1
+#### A masked pattern was here ####
+POSTHOOK: query: select a,b,c,d,e from table1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table1
+#### A masked pattern was here ####
+a      b       c       d       e
+1      new     10      ten     NULL
+1      original        NULL    NULL    NULL
+2      new     20      twenty  NULL
+2      original        NULL    NULL    NULL
+3      new     30      thirty  NULL
+3      original        NULL    NULL    NULL
+4      new     40      forty   NULL
+4      original        NULL    NULL    NULL
+5      new     100     hundred another1
+6      new     200     two hundred     another2
+PREHOOK: query: --
+-- SECTION VARIATION: ALTER TABLE CHANGE COLUMN
+-- smallint = (2-byte signed integer, from -32,768 to 32,767)
+--
+CREATE TABLE table3(a smallint, b STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table3
+POSTHOOK: query: --
+-- SECTION VARIATION: ALTER TABLE CHANGE COLUMN
+-- smallint = (2-byte signed integer, from -32,768 to 32,767)
+--
+CREATE TABLE table3(a smallint, b STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@table3
+PREHOOK: query: insert into table table3 values(1000, 'original'),(6737, 
'original'), ('3', 'original'),('4', 'original')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__4
+PREHOOK: Output: default@table3
+POSTHOOK: query: insert into table table3 values(1000, 'original'),(6737, 
'original'), ('3', 'original'),('4', 'original')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__4
+POSTHOOK: Output: default@table3
+POSTHOOK: Lineage: table3.a EXPRESSION 
[(values__tmp__table__4)values__tmp__table__4.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: table3.b SIMPLE 
[(values__tmp__table__4)values__tmp__table__4.FieldSchema(name:tmp_values_col2, 
type:string, comment:), ]
+_col0  _col1
+PREHOOK: query: select a,b from table3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table3
+#### A masked pattern was here ####
+POSTHOOK: query: select a,b from table3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table3
+#### A masked pattern was here ####
+a      b
+1000   original
+3      original
+4      original
+6737   original
+PREHOOK: query: -- ADD COLUMNS ... RESTRICT
+alter table table3 change column a a int
+PREHOOK: type: ALTERTABLE_RENAMECOL
+PREHOOK: Input: default@table3
+PREHOOK: Output: default@table3
+POSTHOOK: query: -- ADD COLUMNS ... RESTRICT
+alter table table3 change column a a int
+POSTHOOK: type: ALTERTABLE_RENAMECOL
+POSTHOOK: Input: default@table3
+POSTHOOK: Output: default@table3
+PREHOOK: query: insert into table table3 values(72909, 'new'),(200, 'new'), 
(32768, 'new'),(40000, 'new')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__5
+PREHOOK: Output: default@table3
+POSTHOOK: query: insert into table table3 values(72909, 'new'),(200, 'new'), 
(32768, 'new'),(40000, 'new')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__5
+POSTHOOK: Output: default@table3
+POSTHOOK: Lineage: table3.a EXPRESSION 
[(values__tmp__table__5)values__tmp__table__5.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: table3.b SIMPLE 
[(values__tmp__table__5)values__tmp__table__5.FieldSchema(name:tmp_values_col2, 
type:string, comment:), ]
+_col0  _col1
+PREHOOK: query: select a,b from table3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table3
+#### A masked pattern was here ####
+POSTHOOK: query: select a,b from table3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table3
+#### A masked pattern was here ####
+a      b
+1000   original
+200    new
+3      original
+32768  new
+4      original
+40000  new
+6737   original
+72909  new
+PREHOOK: query: -- ADD COLUMNS ... RESTRICT
+alter table table3 add columns(e string)
+PREHOOK: type: ALTERTABLE_ADDCOLS
+PREHOOK: Input: default@table3
+PREHOOK: Output: default@table3
+POSTHOOK: query: -- ADD COLUMNS ... RESTRICT
+alter table table3 add columns(e string)
+POSTHOOK: type: ALTERTABLE_ADDCOLS
+POSTHOOK: Input: default@table3
+POSTHOOK: Output: default@table3
+PREHOOK: query: insert into table table3 values(5000, 'new', 
'another5'),(90000, 'new', 'another6')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__6
+PREHOOK: Output: default@table3
+POSTHOOK: query: insert into table table3 values(5000, 'new', 
'another5'),(90000, 'new', 'another6')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__6
+POSTHOOK: Output: default@table3
+POSTHOOK: Lineage: table3.a EXPRESSION 
[(values__tmp__table__6)values__tmp__table__6.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: table3.b SIMPLE 
[(values__tmp__table__6)values__tmp__table__6.FieldSchema(name:tmp_values_col2, 
type:string, comment:), ]
+POSTHOOK: Lineage: table3.e SIMPLE 
[(values__tmp__table__6)values__tmp__table__6.FieldSchema(name:tmp_values_col3, 
type:string, comment:), ]
+_col0  _col1   _col2
+PREHOOK: query: select a,b from table3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table3
+#### A masked pattern was here ####
+POSTHOOK: query: select a,b from table3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table3
+#### A masked pattern was here ####
+a      b
+1000   original
+200    new
+3      original
+32768  new
+4      original
+40000  new
+5000   new
+6737   original
+72909  new
+90000  new
+PREHOOK: query: -- ADD COLUMNS ... RESTRICT
+alter table table3 change column a a int
+PREHOOK: type: ALTERTABLE_RENAMECOL
+PREHOOK: Input: default@table3
+PREHOOK: Output: default@table3
+POSTHOOK: query: -- ADD COLUMNS ... RESTRICT
+alter table table3 change column a a int
+POSTHOOK: type: ALTERTABLE_RENAMECOL
+POSTHOOK: Input: default@table3
+POSTHOOK: Output: default@table3
+PREHOOK: query: select a,b from table3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table3
+#### A masked pattern was here ####
+POSTHOOK: query: select a,b from table3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table3
+#### A masked pattern was here ####
+a      b
+1000   original
+200    new
+3      original
+32768  new
+4      original
+40000  new
+5000   new
+6737   original
+72909  new
+90000  new
+PREHOOK: query: DROP TABLE table1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@table1
+PREHOOK: Output: default@table1
+POSTHOOK: query: DROP TABLE table1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@table1
+POSTHOOK: Output: default@table1
+PREHOOK: query: DROP TABLE table2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE table3
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@table3
+PREHOOK: Output: default@table3
+POSTHOOK: query: DROP TABLE table3
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@table3
+POSTHOOK: Output: default@table3

http://git-wip-us.apache.org/repos/asf/hive/blob/0fd9069e/ql/src/test/results/clientpositive/tez/vector_partition_diff_num_cols.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/tez/vector_partition_diff_num_cols.q.out 
b/ql/src/test/results/clientpositive/tez/vector_partition_diff_num_cols.q.out
index 65b2ff1..04f8a1f 100644
--- 
a/ql/src/test/results/clientpositive/tez/vector_partition_diff_num_cols.q.out
+++ 
b/ql/src/test/results/clientpositive/tez/vector_partition_diff_num_cols.q.out
@@ -237,6 +237,7 @@ STAGE PLANS:
                         sort order: 
                         Statistics: Num rows: 1 Data size: 8 Basic stats: 
COMPLETE Column stats: NONE
                         value expressions: _col0 (type: bigint)
+            Execution mode: vectorized
         Reducer 2 
             Reduce Operator Tree:
               Group By Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/0fd9069e/ql/src/test/results/clientpositive/vector_partition_diff_num_cols.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/vector_partition_diff_num_cols.q.out 
b/ql/src/test/results/clientpositive/vector_partition_diff_num_cols.q.out
index 0437ff6..99f42e9 100644
--- a/ql/src/test/results/clientpositive/vector_partition_diff_num_cols.q.out
+++ b/ql/src/test/results/clientpositive/vector_partition_diff_num_cols.q.out
@@ -225,6 +225,7 @@ STAGE PLANS:
                   sort order: 
                   Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE 
Column stats: NONE
                   value expressions: _col0 (type: bigint)
+      Execution mode: vectorized
       Reduce Operator Tree:
         Group By Operator
           aggregations: sum(VALUE._col0)

http://git-wip-us.apache.org/repos/asf/hive/blob/0fd9069e/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java
----------------------------------------------------------------------
diff --git 
a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java
 
b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java
index 2b6d9c0..7c77dd8 100644
--- 
a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java
+++ 
b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java
@@ -42,6 +42,7 @@ import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveDecim
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hadoop.io.Text;
 
@@ -62,7 +63,7 @@ import org.apache.hadoop.io.Text;
 public class BinarySortableDeserializeRead implements DeserializeRead {
   public static final Log LOG = 
LogFactory.getLog(BinarySortableDeserializeRead.class.getName());
 
-  private PrimitiveTypeInfo[] primitiveTypeInfos;
+  private TypeInfo[] typeInfos;
 
   // The sort order (ascending/descending) for each field. Set to true when 
descending (invert).
   private boolean[] columnSortOrderIsDesc;
@@ -94,14 +95,14 @@ public class BinarySortableDeserializeRead implements 
DeserializeRead {
     this(primitiveTypeInfos, null);
   }
 
-  public BinarySortableDeserializeRead(PrimitiveTypeInfo[] primitiveTypeInfos,
+  public BinarySortableDeserializeRead(TypeInfo[] typeInfos,
           boolean[] columnSortOrderIsDesc) {
-    this.primitiveTypeInfos = primitiveTypeInfos;
-    fieldCount = primitiveTypeInfos.length;
+    this.typeInfos = typeInfos;
+    fieldCount = typeInfos.length;
     if (columnSortOrderIsDesc != null) {
       this.columnSortOrderIsDesc = columnSortOrderIsDesc;
     } else {
-      this.columnSortOrderIsDesc = new boolean[primitiveTypeInfos.length];
+      this.columnSortOrderIsDesc = new boolean[typeInfos.length];
       Arrays.fill(this.columnSortOrderIsDesc, false);
     }
     inputByteBuffer = new InputByteBuffer();
@@ -117,8 +118,8 @@ public class BinarySortableDeserializeRead implements 
DeserializeRead {
   /*
    * The primitive type information for all fields.
    */
-  public PrimitiveTypeInfo[] primitiveTypeInfos() {
-    return primitiveTypeInfos;
+  public TypeInfo[] typeInfos() {
+    return typeInfos;
   }
 
   /*
@@ -176,7 +177,7 @@ public class BinarySortableDeserializeRead implements 
DeserializeRead {
 
     // We have a field and are positioned to it.
 
-    if (primitiveTypeInfos[fieldIndex].getPrimitiveCategory() != 
PrimitiveCategory.DECIMAL) {
+    if (((PrimitiveTypeInfo) typeInfos[fieldIndex]).getPrimitiveCategory() != 
PrimitiveCategory.DECIMAL) {
       return false;
     }
 
@@ -375,7 +376,7 @@ public class BinarySortableDeserializeRead implements 
DeserializeRead {
             (BinarySortableReadHiveCharResults) readHiveCharResults;
 
     if (!binarySortableReadHiveCharResults.isInit()) {
-      binarySortableReadHiveCharResults.init((CharTypeInfo) 
primitiveTypeInfos[fieldIndex]);
+      binarySortableReadHiveCharResults.init((CharTypeInfo) 
typeInfos[fieldIndex]);
     }
 
     HiveCharWritable hiveCharWritable = 
binarySortableReadHiveCharResults.getHiveCharWritable();
@@ -416,7 +417,7 @@ public class BinarySortableDeserializeRead implements 
DeserializeRead {
     BinarySortableReadHiveVarcharResults binarySortableReadHiveVarcharResults 
= (BinarySortableReadHiveVarcharResults) readHiveVarcharResults;
 
     if (!binarySortableReadHiveVarcharResults.isInit()) {
-      binarySortableReadHiveVarcharResults.init((VarcharTypeInfo) 
primitiveTypeInfos[fieldIndex]);
+      binarySortableReadHiveVarcharResults.init((VarcharTypeInfo) 
typeInfos[fieldIndex]);
     }
 
     HiveVarcharWritable hiveVarcharWritable = 
binarySortableReadHiveVarcharResults.getHiveVarcharWritable();
@@ -733,7 +734,7 @@ public class BinarySortableDeserializeRead implements 
DeserializeRead {
     }
     tempHiveDecimalWritable.set(bd);
 
-    saveDecimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfos[fieldIndex];
+    saveDecimalTypeInfo = (DecimalTypeInfo) typeInfos[fieldIndex];
 
     int precision = saveDecimalTypeInfo.getPrecision();
     int scale = saveDecimalTypeInfo.getScale();

http://git-wip-us.apache.org/repos/asf/hive/blob/0fd9069e/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java
----------------------------------------------------------------------
diff --git 
a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java
 
b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java
index 285ae10..3779f1a 100644
--- 
a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java
+++ 
b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java
@@ -32,13 +32,8 @@ import 
org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.serde2.ByteStream.Output;
 import org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe;
-import org.apache.hadoop.hive.serde2.binarysortable.InputByteBuffer;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
-import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
-import org.apache.hadoop.hive.serde2.lazy.LazyHiveIntervalDayTime;
-import org.apache.hadoop.hive.serde2.lazy.LazyHiveIntervalYearMonth;
 import org.apache.hadoop.hive.serde2.fast.SerializeWrite;
 import org.apache.hive.common.util.DateUtils;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/0fd9069e/serde/src/java/org/apache/hadoop/hive/serde2/fast/DeserializeRead.java
----------------------------------------------------------------------
diff --git 
a/serde/src/java/org/apache/hadoop/hive/serde2/fast/DeserializeRead.java 
b/serde/src/java/org/apache/hadoop/hive/serde2/fast/DeserializeRead.java
index b187aff..c2b0cfc 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/fast/DeserializeRead.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/fast/DeserializeRead.java
@@ -35,7 +35,7 @@ import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 
 /*
@@ -55,9 +55,9 @@ import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 public interface DeserializeRead {
 
   /*
-   * The primitive type information for all fields.
+   * The type information for all fields.
    */
-  PrimitiveTypeInfo[] primitiveTypeInfos();
+  TypeInfo[] typeInfos();
 
   /*
    * Set the range of bytes to be deserialized.

http://git-wip-us.apache.org/repos/asf/hive/blob/0fd9069e/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java
----------------------------------------------------------------------
diff --git 
a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java
 
b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java
index 8c5b0b3..d4220ac 100644
--- 
a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java
+++ 
b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java
@@ -30,32 +30,24 @@ import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.serde2.fast.DeserializeRead;
-import 
org.apache.hadoop.hive.serde2.fast.DeserializeRead.ReadIntervalDayTimeResults;
-import 
org.apache.hadoop.hive.serde2.fast.DeserializeRead.ReadIntervalYearMonthResults;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
-import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
-import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
 import org.apache.hadoop.hive.serde2.lazy.LazyBinary;
 import org.apache.hadoop.hive.serde2.lazy.LazyByte;
 import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
 import org.apache.hadoop.hive.serde2.lazy.LazyLong;
-import org.apache.hadoop.hive.serde2.lazy.LazyPrimitive;
 import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.lazy.LazyShort;
 import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
-import 
org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;
-import 
org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParameters;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
-import 
org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hadoop.io.Text;
 import org.apache.hive.common.util.TimestampParser;
@@ -69,21 +61,19 @@ import org.apache.hive.common.util.TimestampParser;
  *
  * Reading some fields require a results object to receive value information.  
A separate
  * results object is created by the caller at initialization per different 
field even for the same
- * type. 
+ * type.
  *
  * Some type values are by reference to either bytes in the deserialization 
buffer or to
  * other type specific buffers.  So, those references are only valid until the 
next time set is
  * called.
  */
-public class LazySimpleDeserializeRead implements DeserializeRead {
+public final class LazySimpleDeserializeRead implements DeserializeRead {
   public static final Log LOG = 
LogFactory.getLog(LazySimpleDeserializeRead.class.getName());
 
-  private PrimitiveTypeInfo[] primitiveTypeInfos;
+  private TypeInfo[] typeInfos;
 
-  private LazySerDeParameters lazyParams;
 
   private byte separator;
-  private boolean lastColumnTakesRest;
   private boolean isEscaped;
   private byte escapeChar;
   private byte[] nullSequenceBytes;
@@ -122,21 +112,19 @@ public class LazySimpleDeserializeRead implements 
DeserializeRead {
   private boolean readBeyondBufferRangeWarned;
   private boolean bufferRangeHasExtraDataWarned;
 
-  public LazySimpleDeserializeRead(PrimitiveTypeInfo[] primitiveTypeInfos,
+  public LazySimpleDeserializeRead(TypeInfo[] typeInfos,
       byte separator, LazySerDeParameters lazyParams) {
 
-    this.primitiveTypeInfos = primitiveTypeInfos;
+    this.typeInfos = typeInfos;
 
     this.separator = separator;
-    this.lazyParams = lazyParams;
 
-    lastColumnTakesRest = lazyParams.isLastColumnTakesRest();
     isEscaped = lazyParams.isEscaped();
     escapeChar = lazyParams.getEscapeChar();
     nullSequenceBytes = lazyParams.getNullSequence().getBytes();
     isExtendedBooleanLiteral = lazyParams.isExtendedBooleanLiteral();
 
-    fieldCount = primitiveTypeInfos.length;
+    fieldCount = typeInfos.length;
     tempText = new Text();
     readBeyondConfiguredFieldsWarned = false;
     readBeyondBufferRangeWarned = false;
@@ -148,10 +136,11 @@ public class LazySimpleDeserializeRead implements 
DeserializeRead {
   }
 
   /*
-   * The primitive type information for all fields.
+   * The type information for all fields.
    */
-  public PrimitiveTypeInfo[] primitiveTypeInfos() {
-    return primitiveTypeInfos;
+  @Override
+  public TypeInfo[] typeInfos() {
+    return typeInfos;
   }
 
   /*
@@ -189,7 +178,7 @@ public class LazySimpleDeserializeRead implements 
DeserializeRead {
       if (!readBeyondBufferRangeWarned) {
         // Warn only once.
         int length = end - start;
-        LOG.info("Reading beyond buffer range! Buffer range " +  start 
+        LOG.info("Reading beyond buffer range! Buffer range " +  start
             + " for length " + length + " but reading more (NULLs returned)."
             + "  Ignoring similar problems.");
         readBeyondBufferRangeWarned = true;
@@ -243,7 +232,7 @@ public class LazySimpleDeserializeRead implements 
DeserializeRead {
       }
     }
 
-    switch (primitiveTypeInfos[fieldIndex].getPrimitiveCategory()) {
+    switch (((PrimitiveTypeInfo) 
typeInfos[fieldIndex]).getPrimitiveCategory()) {
     case BOOLEAN:
       {
         int i = fieldStart;
@@ -427,7 +416,7 @@ public class LazySimpleDeserializeRead implements 
DeserializeRead {
         try {
           s = new String(bytes, fieldStart, fieldLength, "US-ASCII");
         } catch (UnsupportedEncodingException e) {
-          LOG.error(e);
+          LOG.error("Unsupported encoding found ", e);
           s = "";
         }
 
@@ -466,7 +455,7 @@ public class LazySimpleDeserializeRead implements 
DeserializeRead {
 //    }
       break;
     case INTERVAL_DAY_TIME:
-      {    
+      {
         String s = null;
         try {
           s = Text.decode(bytes, fieldStart, fieldLength);
@@ -491,7 +480,7 @@ public class LazySimpleDeserializeRead implements 
DeserializeRead {
         }
 
         saveDecimal = HiveDecimal.create(byteData);
-        saveDecimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfos[fieldIndex];
+        saveDecimalTypeInfo = (DecimalTypeInfo) typeInfos[fieldIndex];
         int precision = saveDecimalTypeInfo.getPrecision();
         int scale = saveDecimalTypeInfo.getScale();
         saveDecimal = HiveDecimalUtils.enforcePrecisionScale(saveDecimal, 
precision, scale);
@@ -507,7 +496,7 @@ public class LazySimpleDeserializeRead implements 
DeserializeRead {
       break;
 
     default:
-      throw new Error("Unexpected primitive category " + 
primitiveTypeInfos[fieldIndex].getPrimitiveCategory());
+      throw new Error("Unexpected primitive category " + ((PrimitiveTypeInfo) 
typeInfos[fieldIndex]).getPrimitiveCategory());
     }
 
     return false;
@@ -529,13 +518,14 @@ public class LazySimpleDeserializeRead implements 
DeserializeRead {
   /*
    * Call this method after all fields have been read to check for extra 
fields.
    */
+  @Override
   public void extraFieldsCheck() {
     if (offset < end) {
       // We did not consume all of the byte range.
       if (!bufferRangeHasExtraDataWarned) {
         // Warn only once.
         int length = end - start;
-        LOG.info("Not all fields were read in the buffer range! Buffer range " 
+  start 
+        LOG.info("Not all fields were read in the buffer range! Buffer range " 
+  start
             + " for length " + length + " but reading more (NULLs returned)."
             + "  Ignoring similar problems.");
         bufferRangeHasExtraDataWarned = true;
@@ -630,7 +620,7 @@ public class LazySimpleDeserializeRead implements 
DeserializeRead {
   }
 
   // Reading a STRING field require a results object to receive value 
information.  A separate
-  // results object is created by the caller at initialization per different 
bytes field. 
+  // results object is created by the caller at initialization per different 
bytes field.
   @Override
   public ReadStringResults createReadStringResults() {
     return new LazySimpleReadStringResults();
@@ -663,17 +653,18 @@ public class LazySimpleDeserializeRead implements 
DeserializeRead {
   }
 
   // Reading a CHAR field require a results object to receive value 
information.  A separate
-  // results object is created by the caller at initialization per different 
CHAR field. 
+  // results object is created by the caller at initialization per different 
CHAR field.
   @Override
   public ReadHiveCharResults createReadHiveCharResults() {
     return new LazySimpleReadHiveCharResults();
   }
 
+  @Override
   public void readHiveChar(ReadHiveCharResults readHiveCharResults) throws 
IOException {
     LazySimpleReadHiveCharResults LazySimpleReadHiveCharResults = 
(LazySimpleReadHiveCharResults) readHiveCharResults;
 
     if (!LazySimpleReadHiveCharResults.isInit()) {
-      LazySimpleReadHiveCharResults.init((CharTypeInfo) 
primitiveTypeInfos[fieldIndex]);
+      LazySimpleReadHiveCharResults.init((CharTypeInfo) typeInfos[fieldIndex]);
     }
 
     if (LazySimpleReadHiveCharResults.readStringResults == null) {
@@ -714,17 +705,18 @@ public class LazySimpleDeserializeRead implements 
DeserializeRead {
   }
 
   // Reading a VARCHAR field require a results object to receive value 
information.  A separate
-  // results object is created by the caller at initialization per different 
VARCHAR field. 
+  // results object is created by the caller at initialization per different 
VARCHAR field.
   @Override
   public ReadHiveVarcharResults createReadHiveVarcharResults() {
     return new LazySimpleReadHiveVarcharResults();
   }
 
+  @Override
   public void readHiveVarchar(ReadHiveVarcharResults readHiveVarcharResults) 
throws IOException {
     LazySimpleReadHiveVarcharResults lazySimpleReadHiveVarvarcharResults = 
(LazySimpleReadHiveVarcharResults) readHiveVarcharResults;
 
     if (!lazySimpleReadHiveVarvarcharResults.isInit()) {
-      lazySimpleReadHiveVarvarcharResults.init((VarcharTypeInfo) 
primitiveTypeInfos[fieldIndex]);
+      lazySimpleReadHiveVarvarcharResults.init((VarcharTypeInfo) 
typeInfos[fieldIndex]);
     }
 
     if (lazySimpleReadHiveVarvarcharResults.readStringResults == null) {
@@ -757,7 +749,7 @@ public class LazySimpleDeserializeRead implements 
DeserializeRead {
   }
 
   // Reading a BINARY field require a results object to receive value 
information.  A separate
-  // results object is created by the caller at initialization per different 
bytes field. 
+  // results object is created by the caller at initialization per different 
bytes field.
   @Override
   public ReadBinaryResults createReadBinaryResults() {
     return new LazySimpleReadBinaryResults();
@@ -787,7 +779,7 @@ public class LazySimpleDeserializeRead implements 
DeserializeRead {
   }
 
   // Reading a DATE field require a results object to receive value 
information.  A separate
-  // results object is created by the caller at initialization per different 
DATE field. 
+  // results object is created by the caller at initialization per different 
DATE field.
   @Override
   public ReadDateResults createReadDateResults() {
     return new LazySimpleReadDateResults();
@@ -821,7 +813,7 @@ public class LazySimpleDeserializeRead implements 
DeserializeRead {
 
   // Reading a INTERVAL_YEAR_MONTH field require a results object to receive 
value information.
   // A separate results object is created by the caller at initialization per 
different
-  // INTERVAL_YEAR_MONTH field. 
+  // INTERVAL_YEAR_MONTH field.
   @Override
   public ReadIntervalYearMonthResults createReadIntervalYearMonthResults() {
     return new LazySimpleReadIntervalYearMonthResults();
@@ -833,7 +825,7 @@ public class LazySimpleDeserializeRead implements 
DeserializeRead {
     LazySimpleReadIntervalYearMonthResults 
lazySimpleReadIntervalYearMonthResults =
             (LazySimpleReadIntervalYearMonthResults) 
readIntervalYearMonthResults;
 
-    HiveIntervalYearMonthWritable hiveIntervalYearMonthWritable = 
+    HiveIntervalYearMonthWritable hiveIntervalYearMonthWritable =
             
lazySimpleReadIntervalYearMonthResults.getHiveIntervalYearMonthWritable();
     hiveIntervalYearMonthWritable.set(saveIntervalYearMonth);
     saveIntervalYearMonth = null;
@@ -857,7 +849,7 @@ public class LazySimpleDeserializeRead implements 
DeserializeRead {
 
   // Reading a INTERVAL_DAY_TIME field require a results object to receive 
value information.
   // A separate results object is created by the caller at initialization per 
different
-  // INTERVAL_DAY_TIME field. 
+  // INTERVAL_DAY_TIME field.
   @Override
   public ReadIntervalDayTimeResults createReadIntervalDayTimeResults() {
     return new LazySimpleReadIntervalDayTimeResults();
@@ -869,7 +861,7 @@ public class LazySimpleDeserializeRead implements 
DeserializeRead {
     LazySimpleReadIntervalDayTimeResults lazySimpleReadIntervalDayTimeResults =
         (LazySimpleReadIntervalDayTimeResults) readIntervalDayTimeResults;
 
-    HiveIntervalDayTimeWritable hiveIntervalDayTimeWritable = 
+    HiveIntervalDayTimeWritable hiveIntervalDayTimeWritable =
             
lazySimpleReadIntervalDayTimeResults.getHiveIntervalDayTimeWritable();
     hiveIntervalDayTimeWritable.set(saveIntervalDayTime);
     saveIntervalDayTime = null;
@@ -892,7 +884,7 @@ public class LazySimpleDeserializeRead implements 
DeserializeRead {
   }
 
   // Reading a TIMESTAMP field require a results object to receive value 
information.  A separate
-  // results object is created by the caller at initialization per different 
TIMESTAMP field. 
+  // results object is created by the caller at initialization per different 
TIMESTAMP field.
   @Override
   public ReadTimestampResults createReadTimestampResults() {
     return new LazySimpleReadTimestampResults();
@@ -900,7 +892,7 @@ public class LazySimpleDeserializeRead implements 
DeserializeRead {
 
   @Override
   public void readTimestamp(ReadTimestampResults readTimestampResults) {
-    LazySimpleReadTimestampResults lazySimpleReadTimestampResults = 
+    LazySimpleReadTimestampResults lazySimpleReadTimestampResults =
             (LazySimpleReadTimestampResults) readTimestampResults;
 
     TimestampWritable timestampWritable = 
lazySimpleReadTimestampResults.getTimestampWritable();
@@ -928,7 +920,7 @@ public class LazySimpleDeserializeRead implements 
DeserializeRead {
   }
 
   // Reading a DECIMAL field require a results object to receive value 
information.  A separate
-  // results object is created by the caller at initialization per different 
DECIMAL field. 
+  // results object is created by the caller at initialization per different 
DECIMAL field.
   @Override
   public ReadDecimalResults createReadDecimalResults() {
     return new LazySimpleReadDecimalResults();
@@ -952,101 +944,6 @@ public class LazySimpleDeserializeRead implements 
DeserializeRead {
   private static int maxLongDigitsCount = maxLongBytes.length;
   private static byte[] minLongNoSignBytes = ((Long) 
Long.MIN_VALUE).toString().substring(1).getBytes();
 
-  private boolean parseLongFast() {
-
-    // Parse without using exceptions for better performance.
-    int i = fieldStart;
-    int end = fieldStart + fieldLength;
-    boolean negative = false;
-    if (i >= end) {
-      return false;    // Empty field.
-    }
-    if (bytes[i] == '+') {
-      i++;
-      if (i >= end) {
-        return false;
-      }
-    } else if (bytes[i] == '-') {
-      negative = true;
-      i++;
-      if (i >= end) {
-        return false;
-      }
-    }
-    // Skip leading zeros.
-    boolean atLeastOneZero = false;
-    while (true) {
-      if (bytes[i] != '0') {
-        break;
-      }
-      i++;
-      if (i >= end) {
-        saveLong = 0;
-        return true;
-      }
-      atLeastOneZero = true;
-    }
-    // We tolerate and ignore decimal places.
-    if (bytes[i] == '.') {
-      if (!atLeastOneZero) {
-        return false;
-      }
-      saveLong = 0;
-      // Fall through below and verify trailing decimal digits.
-    } else {
-      if (!Character.isDigit(bytes[i])) {
-        return false;
-      }
-      int nonLeadingZeroStart = i;
-      int digitCount = 1;
-      saveLong = Character.digit(bytes[i], 10);
-      i++;
-      while (i < end) {
-        if (!Character.isDigit(bytes[i])) {
-          break;
-        }
-        digitCount++;
-        if (digitCount > maxLongDigitsCount) {
-          return false;
-        } else if (digitCount == maxLongDigitsCount) {
-          // Use the old trick of comparing against number string to check for 
overflow.
-          if (!negative) {
-            if (byteArrayCompareRanges(bytes, nonLeadingZeroStart, 
maxLongBytes, 0, digitCount) >= 1) {
-              return false;
-            }
-          } else {
-            if (byteArrayCompareRanges(bytes, nonLeadingZeroStart, 
minLongNoSignBytes, 0, digitCount) >= 1) {
-              return false;
-            }
-          }
-        }
-        saveLong = (saveLong * 10) + Character.digit(bytes[i], 10);
-      }
-      if (negative) {
-        // Safe because of our number string comparision against min 
(negative) long.
-        saveLong = -saveLong;
-      }
-      if (i >= end) {
-        return true;
-      }
-      if (bytes[i] != '.') {
-        return false;
-      }
-    }
-    // Fall through to here if we detect the start of trailing decimal 
digits...
-    // We verify trailing digits only.
-    while (true) {
-      i++;
-      if (i >= end) {
-        break;
-      }
-      if (!Character.isDigit(bytes[i])) {
-        return false;
-      }
-    }
-    return true;
-  }
-
   public static int byteArrayCompareRanges(byte[] arg1, int start1, byte[] 
arg2, int start2, int len) {
     for (int i = 0; i < len; i++) {
       // Note the "& 0xff" is just a way to convert unsigned bytes to signed 
integer.
@@ -1059,4 +956,4 @@ public class LazySimpleDeserializeRead implements 
DeserializeRead {
     return 0;
   }
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/0fd9069e/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
----------------------------------------------------------------------
diff --git 
a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
 
b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
index 77838a1..46f37eb 100644
--- 
a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
+++ 
b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.serde2.lazy.fast;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
-import java.nio.charset.CharacterCodingException;
 import java.sql.Date;
 import java.sql.Timestamp;
 
@@ -34,7 +33,6 @@ import 
org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.serde2.ByteStream.Output;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -47,13 +45,6 @@ import org.apache.hadoop.hive.serde2.lazy.LazyLong;
 import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.lazy.LazyTimestamp;
 import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
-import 
org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParameters;
-import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils;
-import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector;
-import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector;
-import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
-import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
-import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
 import org.apache.hadoop.hive.serde2.fast.SerializeWrite;
 import org.apache.hadoop.io.Text;
 import org.apache.hive.common.util.DateUtils;
@@ -516,4 +507,4 @@ public class LazySimpleSerializeWrite implements 
SerializeWrite {
 
     index++;
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/0fd9069e/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
----------------------------------------------------------------------
diff --git 
a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java 
b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
index 3d14fbe..2751adc 100644
--- 
a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
+++ 
b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
@@ -311,6 +311,35 @@ public class LazyBinarySerDe extends AbstractSerDe {
     public boolean value;
   }
 
+  private static void writeDateToByteStream(RandomAccessOutput byteStream,
+                                            DateWritable date) {
+    LazyBinaryUtils.writeVInt(byteStream, date.getDays());
+  }
+
+  public static void setFromBytes(byte[] bytes, int offset, int length,
+                                  HiveDecimalWritable dec) {
+    LazyBinaryUtils.VInt vInt = new LazyBinaryUtils.VInt();
+    LazyBinaryUtils.readVInt(bytes, offset, vInt);
+    int scale = vInt.value;
+    offset += vInt.length;
+    LazyBinaryUtils.readVInt(bytes, offset, vInt);
+    offset += vInt.length;
+    byte[] internalStorage = dec.getInternalStorage();
+    if (internalStorage.length != vInt.value) {
+      internalStorage = new byte[vInt.value];
+    }
+    System.arraycopy(bytes, offset, internalStorage, 0, vInt.value);
+    dec.set(internalStorage, scale);
+  }
+
+  public static void writeToByteStream(RandomAccessOutput byteStream,
+                                       HiveDecimalWritable dec) {
+    LazyBinaryUtils.writeVInt(byteStream, dec.getScale());
+    byte[] internalStorage = dec.getInternalStorage();
+    LazyBinaryUtils.writeVInt(byteStream, internalStorage.length);
+    byteStream.write(internalStorage, 0, internalStorage.length);
+  }
+
   /**
    * A recursive function that serialize an object to a byte buffer based on 
its
    * object inspector.

Reply via email to