http://git-wip-us.apache.org/repos/asf/hive/blob/dd512593/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out 
b/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out
index 95ebf46..1827f67 100644
--- a/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out
+++ b/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out
@@ -283,8 +283,8 @@ STAGE PLANS:
             Map Vectorization:
                 enabled: true
                 enabledConditionsMet: 
hive.vectorized.use.vectorized.input.format IS true
-                inputFormatFeatureSupport: []
-                featureSupportInUse: []
+                inputFormatFeatureSupport: [DECIMAL_64]
+                featureSupportInUse: [DECIMAL_64]
                 inputFileFormats: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                 allNative: true
                 usesVectorUDFAdaptor: false
@@ -491,8 +491,8 @@ STAGE PLANS:
             Map Vectorization:
                 enabled: true
                 enabledConditionsMet: 
hive.vectorized.use.vectorized.input.format IS true
-                inputFormatFeatureSupport: []
-                featureSupportInUse: []
+                inputFormatFeatureSupport: [DECIMAL_64]
+                featureSupportInUse: [DECIMAL_64]
                 inputFileFormats: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                 allNative: true
                 usesVectorUDFAdaptor: true
@@ -683,8 +683,8 @@ STAGE PLANS:
             Map Vectorization:
                 enabled: true
                 enabledConditionsMet: 
hive.vectorized.use.vectorized.input.format IS true
-                inputFormatFeatureSupport: []
-                featureSupportInUse: []
+                inputFormatFeatureSupport: [DECIMAL_64]
+                featureSupportInUse: [DECIMAL_64]
                 inputFileFormats: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                 allNative: true
                 usesVectorUDFAdaptor: true
@@ -875,8 +875,8 @@ STAGE PLANS:
             Map Vectorization:
                 enabled: true
                 enabledConditionsMet: 
hive.vectorized.use.vectorized.input.format IS true
-                inputFormatFeatureSupport: []
-                featureSupportInUse: []
+                inputFormatFeatureSupport: [DECIMAL_64]
+                featureSupportInUse: [DECIMAL_64]
                 inputFileFormats: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                 allNative: true
                 usesVectorUDFAdaptor: true
@@ -1016,8 +1016,8 @@ STAGE PLANS:
             Map Vectorization:
                 enabled: true
                 enabledConditionsMet: 
hive.vectorized.use.vectorized.input.format IS true
-                inputFormatFeatureSupport: []
-                featureSupportInUse: []
+                inputFormatFeatureSupport: [DECIMAL_64]
+                featureSupportInUse: [DECIMAL_64]
                 inputFileFormats: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                 allNative: false
                 usesVectorUDFAdaptor: false
@@ -1141,8 +1141,8 @@ STAGE PLANS:
             Map Vectorization:
                 enabled: true
                 enabledConditionsMet: 
hive.vectorized.use.vectorized.input.format IS true
-                inputFormatFeatureSupport: []
-                featureSupportInUse: []
+                inputFormatFeatureSupport: [DECIMAL_64]
+                featureSupportInUse: [DECIMAL_64]
                 inputFileFormats: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                 allNative: false
                 usesVectorUDFAdaptor: false
@@ -1284,8 +1284,8 @@ STAGE PLANS:
             Map Vectorization:
                 enabled: true
                 enabledConditionsMet: 
hive.vectorized.use.vectorized.input.format IS true
-                inputFormatFeatureSupport: []
-                featureSupportInUse: []
+                inputFormatFeatureSupport: [DECIMAL_64]
+                featureSupportInUse: [DECIMAL_64]
                 inputFileFormats: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                 allNative: false
                 usesVectorUDFAdaptor: false

http://git-wip-us.apache.org/repos/asf/hive/blob/dd512593/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out 
b/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out
index 3c9cf03..d10faeb 100644
--- 
a/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out
+++ 
b/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out
@@ -370,10 +370,10 @@ POSTHOOK: Lineage: over10k_orc_bucketed_n0.si SIMPLE 
[(over10k_n9)over10k_n9.Fie
 POSTHOOK: Lineage: over10k_orc_bucketed_n0.t SIMPLE 
[(over10k_n9)over10k_n9.FieldSchema(name:t, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: over10k_orc_bucketed_n0.ts SIMPLE 
[(over10k_n9)over10k_n9.FieldSchema(name:ts, type:timestamp, comment:null), ]
 Found 4 items
--rw-rw-rw-   3 ### USER ### ### GROUP ###       8997 ### HDFS DATE ### 
hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7773 ### HDFS DATE ### 
hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7358 ### HDFS DATE ### 
hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7261 ### HDFS DATE ### 
hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       8914 ### HDFS DATE ### 
hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7709 ### HDFS DATE ### 
hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7284 ### HDFS DATE ### 
hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7190 ### HDFS DATE ### 
hdfs://### HDFS PATH ###
 PREHOOK: query: insert into over10k_orc_bucketed_n0 select * from over10k_n9
 PREHOOK: type: QUERY
 PREHOOK: Input: default@over10k_n9
@@ -394,14 +394,14 @@ POSTHOOK: Lineage: over10k_orc_bucketed_n0.si SIMPLE 
[(over10k_n9)over10k_n9.Fie
 POSTHOOK: Lineage: over10k_orc_bucketed_n0.t SIMPLE 
[(over10k_n9)over10k_n9.FieldSchema(name:t, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: over10k_orc_bucketed_n0.ts SIMPLE 
[(over10k_n9)over10k_n9.FieldSchema(name:ts, type:timestamp, comment:null), ]
 Found 8 items
--rw-rw-rw-   3 ### USER ### ### GROUP ###       8997 ### HDFS DATE ### 
hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       8997 ### HDFS DATE ### 
hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7773 ### HDFS DATE ### 
hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7773 ### HDFS DATE ### 
hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7358 ### HDFS DATE ### 
hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7358 ### HDFS DATE ### 
hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7261 ### HDFS DATE ### 
hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7261 ### HDFS DATE ### 
hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       8914 ### HDFS DATE ### 
hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       8914 ### HDFS DATE ### 
hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7709 ### HDFS DATE ### 
hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7709 ### HDFS DATE ### 
hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7284 ### HDFS DATE ### 
hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7284 ### HDFS DATE ### 
hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7190 ### HDFS DATE ### 
hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7190 ### HDFS DATE ### 
hdfs://### HDFS PATH ###
 PREHOOK: query: select distinct 7 as seven, INPUT__FILE__NAME from 
over10k_orc_bucketed_n0
 PREHOOK: type: QUERY
 PREHOOK: Input: default@over10k_orc_bucketed_n0
@@ -680,22 +680,22 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: over10k_orc_bucketed_n0
-                  Statistics: Num rows: 1247 Data size: 713720 Basic stats: 
COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 1237 Data size: 707880 Basic stats: 
COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: ROW__ID (type: 
struct<writeid:bigint,bucketid:int,rowid:bigint>)
                     outputColumnNames: ROW__ID
-                    Statistics: Num rows: 1247 Data size: 713720 Basic stats: 
COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 1237 Data size: 707880 Basic stats: 
COMPLETE Column stats: COMPLETE
                     Group By Operator
                       aggregations: count()
                       keys: ROW__ID (type: 
struct<writeid:bigint,bucketid:int,rowid:bigint>)
                       mode: hash
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 623 Data size: 52332 Basic stats: 
COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 618 Data size: 51912 Basic stats: 
COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         key expressions: _col0 (type: 
struct<writeid:bigint,bucketid:int,rowid:bigint>)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: 
struct<writeid:bigint,bucketid:int,rowid:bigint>)
-                        Statistics: Num rows: 623 Data size: 52332 Basic 
stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 618 Data size: 51912 Basic 
stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: bigint)
         Reducer 2 
             Reduce Operator Tree:
@@ -704,13 +704,13 @@ STAGE PLANS:
                 keys: KEY._col0 (type: 
struct<writeid:bigint,bucketid:int,rowid:bigint>)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 623 Data size: 52332 Basic stats: 
COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 618 Data size: 51912 Basic stats: 
COMPLETE Column stats: COMPLETE
                 Filter Operator
                   predicate: (_col1 > 1L) (type: boolean)
-                  Statistics: Num rows: 207 Data size: 17388 Basic stats: 
COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 206 Data size: 17304 Basic stats: 
COMPLETE Column stats: COMPLETE
                   File Output Operator
                     compressed: false
-                    Statistics: Num rows: 207 Data size: 17388 Basic stats: 
COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 206 Data size: 17304 Basic stats: 
COMPLETE Column stats: COMPLETE
                     table:
                         input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/dd512593/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out 
b/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out
index a6eb4e5..dee97db 100644
--- a/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out
+++ b/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out
@@ -623,11 +623,11 @@ Stage-0
                 TableScan [TS_0] (rows=500/500 width=178)
                   
default@src,src,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
 
-PREHOOK: query: create table orc_merge5_n1 (userid bigint, string1 string, 
subtype double, decimal1 decimal, ts timestamp) stored as orc
+PREHOOK: query: create table orc_merge5_n1 (userid bigint, string1 string, 
subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@orc_merge5_n1
-POSTHOOK: query: create table orc_merge5_n1 (userid bigint, string1 string, 
subtype double, decimal1 decimal, ts timestamp) stored as orc
+POSTHOOK: query: create table orc_merge5_n1 (userid bigint, string1 string, 
subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@orc_merge5_n1

http://git-wip-us.apache.org/repos/asf/hive/blob/dd512593/ql/src/test/results/clientpositive/tez/vector_non_string_partition.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/tez/vector_non_string_partition.q.out 
b/ql/src/test/results/clientpositive/tez/vector_non_string_partition.q.out
index fa72556..130029c 100644
--- a/ql/src/test/results/clientpositive/tez/vector_non_string_partition.q.out
+++ b/ql/src/test/results/clientpositive/tez/vector_non_string_partition.q.out
@@ -83,8 +83,8 @@ STAGE PLANS:
             Map Vectorization:
                 enabled: true
                 enabledConditionsMet: 
hive.vectorized.use.vectorized.input.format IS true
-                inputFormatFeatureSupport: []
-                featureSupportInUse: []
+                inputFormatFeatureSupport: [DECIMAL_64]
+                featureSupportInUse: [DECIMAL_64]
                 inputFileFormats: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                 allNative: true
                 usesVectorUDFAdaptor: false
@@ -204,8 +204,8 @@ STAGE PLANS:
             Map Vectorization:
                 enabled: true
                 enabledConditionsMet: 
hive.vectorized.use.vectorized.input.format IS true
-                inputFormatFeatureSupport: []
-                featureSupportInUse: []
+                inputFormatFeatureSupport: [DECIMAL_64]
+                featureSupportInUse: [DECIMAL_64]
                 inputFileFormats: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                 allNative: true
                 usesVectorUDFAdaptor: false

http://git-wip-us.apache.org/repos/asf/hive/blob/dd512593/ql/src/test/results/clientpositive/vector_aggregate_9.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_aggregate_9.q.out 
b/ql/src/test/results/clientpositive/vector_aggregate_9.q.out
index fe7fbfd..9487881 100644
--- a/ql/src/test/results/clientpositive/vector_aggregate_9.q.out
+++ b/ql/src/test/results/clientpositive/vector_aggregate_9.q.out
@@ -158,8 +158,8 @@ STAGE PLANS:
       Map Vectorization:
           enabled: true
           enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS 
true
-          inputFormatFeatureSupport: []
-          featureSupportInUse: []
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
           inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
           allNative: false
           usesVectorUDFAdaptor: false
@@ -264,8 +264,8 @@ STAGE PLANS:
       Map Vectorization:
           enabled: true
           enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS 
true
-          inputFormatFeatureSupport: []
-          featureSupportInUse: []
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
           inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
           allNative: false
           usesVectorUDFAdaptor: false
@@ -370,8 +370,8 @@ STAGE PLANS:
       Map Vectorization:
           enabled: true
           enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS 
true
-          inputFormatFeatureSupport: []
-          featureSupportInUse: []
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
           inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
           allNative: false
           usesVectorUDFAdaptor: false

http://git-wip-us.apache.org/repos/asf/hive/blob/dd512593/ql/src/test/results/clientpositive/vector_aggregate_without_gby.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/vector_aggregate_without_gby.q.out 
b/ql/src/test/results/clientpositive/vector_aggregate_without_gby.q.out
index 4d2b0dc..19641ee 100644
--- a/ql/src/test/results/clientpositive/vector_aggregate_without_gby.q.out
+++ b/ql/src/test/results/clientpositive/vector_aggregate_without_gby.q.out
@@ -97,8 +97,8 @@ STAGE PLANS:
       Map Vectorization:
           enabled: true
           enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS 
true
-          inputFormatFeatureSupport: []
-          featureSupportInUse: []
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
           inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
           allNative: false
           usesVectorUDFAdaptor: false

http://git-wip-us.apache.org/repos/asf/hive/blob/dd512593/ql/src/test/results/clientpositive/vector_between_columns.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_between_columns.q.out 
b/ql/src/test/results/clientpositive/vector_between_columns.q.out
index 7967df1..f5e095b 100644
--- a/ql/src/test/results/clientpositive/vector_between_columns.q.out
+++ b/ql/src/test/results/clientpositive/vector_between_columns.q.out
@@ -173,8 +173,8 @@ STAGE PLANS:
       Map Vectorization:
           enabled: true
           enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS 
true
-          inputFormatFeatureSupport: []
-          featureSupportInUse: []
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
           inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
           allNative: false
           usesVectorUDFAdaptor: true
@@ -333,8 +333,8 @@ STAGE PLANS:
       Map Vectorization:
           enabled: true
           enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS 
true
-          inputFormatFeatureSupport: []
-          featureSupportInUse: []
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
           inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
           allNative: false
           usesVectorUDFAdaptor: true

http://git-wip-us.apache.org/repos/asf/hive/blob/dd512593/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out 
b/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out
index b66c0b0..ac9ef5c 100644
--- a/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out
+++ b/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out
@@ -170,7 +170,7 @@ STAGE PLANS:
                     1 _col10 (type: binary)
                   Map Join Vectorization:
                       bigTableKeyExpressions: col 10:binary
-                      bigTableValueExpressions: col 0:tinyint, col 1:smallint, 
col 2:int, col 3:bigint, col 4:float, col 5:double, col 6:boolean, col 
7:string, col 8:timestamp, col 9:decimal(4,2), col 10:binary
+                      bigTableValueExpressions: col 0:tinyint, col 1:smallint, 
col 2:int, col 3:bigint, col 4:float, col 5:double, col 6:boolean, col 
7:string, col 8:timestamp, ConvertDecimal64ToDecimal(col 
9:decimal(4,2)/DECIMAL_64) -> 12:decimal(4,2), col 10:binary
                       className: VectorMapJoinOperator
                       native: false
                       nativeConditionsMet: hive.mapjoin.optimized.hashtable IS 
true, hive.vectorized.execution.mapjoin.native.enabled IS true, One MapJoin 
Condition IS true, No nullsafe IS true, Small table vectorizes IS true, 
Optimized Table and Supports Key Types IS true
@@ -211,8 +211,8 @@ STAGE PLANS:
       Map Vectorization:
           enabled: true
           enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS 
true
-          inputFormatFeatureSupport: []
-          featureSupportInUse: []
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
           inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
           allNative: false
           usesVectorUDFAdaptor: true
@@ -364,8 +364,8 @@ STAGE PLANS:
       Map Vectorization:
           enabled: true
           enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS 
true
-          inputFormatFeatureSupport: []
-          featureSupportInUse: []
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
           inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
           allNative: false
           usesVectorUDFAdaptor: false
@@ -582,8 +582,8 @@ STAGE PLANS:
       Map Vectorization:
           enabled: true
           enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS 
true
-          inputFormatFeatureSupport: []
-          featureSupportInUse: []
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
           inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
           allNative: false
           usesVectorUDFAdaptor: false

http://git-wip-us.apache.org/repos/asf/hive/blob/dd512593/ql/src/test/results/clientpositive/vector_bround.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_bround.q.out 
b/ql/src/test/results/clientpositive/vector_bround.q.out
index 644902b..68086c2 100644
--- a/ql/src/test/results/clientpositive/vector_bround.q.out
+++ b/ql/src/test/results/clientpositive/vector_bround.q.out
@@ -91,8 +91,8 @@ STAGE PLANS:
       Map Vectorization:
           enabled: true
           enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS 
true
-          inputFormatFeatureSupport: []
-          featureSupportInUse: []
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
           inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
           allNative: false
           usesVectorUDFAdaptor: false

Reply via email to