http://git-wip-us.apache.org/repos/asf/hive/blob/1f258e96/ql/src/test/results/clientpositive/llap/rcfile_createas1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/rcfile_createas1.q.out 
b/ql/src/test/results/clientpositive/llap/rcfile_createas1.q.out
new file mode 100644
index 0000000..7d1f526
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/rcfile_createas1.q.out
@@ -0,0 +1,180 @@
+PREHOOK: query: DROP TABLE rcfile_createas1a
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE rcfile_createas1a
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE rcfile_createas1b
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE rcfile_createas1b
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE rcfile_createas1a (key INT, value STRING)
+    PARTITIONED BY (ds string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@rcfile_createas1a
+POSTHOOK: query: CREATE TABLE rcfile_createas1a (key INT, value STRING)
+    PARTITIONED BY (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@rcfile_createas1a
+PREHOOK: query: INSERT OVERWRITE TABLE rcfile_createas1a PARTITION (ds='1')
+    SELECT * FROM src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@rcfile_createas1a@ds=1
+POSTHOOK: query: INSERT OVERWRITE TABLE rcfile_createas1a PARTITION (ds='1')
+    SELECT * FROM src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@rcfile_createas1a@ds=1
+POSTHOOK: Lineage: rcfile_createas1a PARTITION(ds=1).key EXPRESSION 
[(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_createas1a PARTITION(ds=1).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: INSERT OVERWRITE TABLE rcfile_createas1a PARTITION (ds='2')
+    SELECT * FROM src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@rcfile_createas1a@ds=2
+POSTHOOK: query: INSERT OVERWRITE TABLE rcfile_createas1a PARTITION (ds='2')
+    SELECT * FROM src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@rcfile_createas1a@ds=2
+POSTHOOK: Lineage: rcfile_createas1a PARTITION(ds=2).key EXPRESSION 
[(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_createas1a PARTITION(ds=2).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: EXPLAIN
+    CREATE TABLE rcfile_createas1b
+    STORED AS RCFILE AS 
+        SELECT key, value, PMOD(HASH(key), 50) as part
+        FROM rcfile_createas1a
+PREHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: query: EXPLAIN
+    CREATE TABLE rcfile_createas1b
+    STORED AS RCFILE AS 
+        SELECT key, value, PMOD(HASH(key), 50) as part
+        FROM rcfile_createas1a
+POSTHOOK: type: CREATETABLE_AS_SELECT
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-4 depends on stages: Stage-2, Stage-0
+  Stage-3 depends on stages: Stage-4
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: rcfile_createas1a
+                  Statistics: Num rows: 1000 Data size: 18624 Basic stats: 
COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: string), 
(hash(key) pmod 50) (type: int)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 1000 Data size: 18624 Basic stats: 
COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 1000 Data size: 18624 Basic stats: 
COMPLETE Column stats: NONE
+                      table:
+                          input format: 
org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                          output format: 
org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                          serde: 
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                          name: default.rcfile_createas1b
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-4
+      Create Table Operator:
+        Create Table
+          columns: key int, value string, part int
+          input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+          output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          name: default.rcfile_createas1b
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+
+  Stage: Stage-0
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+PREHOOK: query: CREATE TABLE rcfile_createas1b
+    STORED AS RCFILE AS 
+        SELECT key, value, PMOD(HASH(key), 50) as part
+        FROM rcfile_createas1a
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@rcfile_createas1a
+PREHOOK: Input: default@rcfile_createas1a@ds=1
+PREHOOK: Input: default@rcfile_createas1a@ds=2
+PREHOOK: Output: database:default
+PREHOOK: Output: default@rcfile_createas1b
+POSTHOOK: query: CREATE TABLE rcfile_createas1b
+    STORED AS RCFILE AS 
+        SELECT key, value, PMOD(HASH(key), 50) as part
+        FROM rcfile_createas1a
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@rcfile_createas1a
+POSTHOOK: Input: default@rcfile_createas1a@ds=1
+POSTHOOK: Input: default@rcfile_createas1a@ds=2
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@rcfile_createas1b
+POSTHOOK: Lineage: rcfile_createas1b.key SIMPLE 
[(rcfile_createas1a)rcfile_createas1a.FieldSchema(name:key, type:int, 
comment:null), ]
+POSTHOOK: Lineage: rcfile_createas1b.part EXPRESSION 
[(rcfile_createas1a)rcfile_createas1a.FieldSchema(name:key, type:int, 
comment:null), ]
+POSTHOOK: Lineage: rcfile_createas1b.value SIMPLE 
[(rcfile_createas1a)rcfile_createas1a.FieldSchema(name:value, type:string, 
comment:null), ]
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(key, value) USING 'tr \t _' AS (c)
+    FROM rcfile_createas1a
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@rcfile_createas1a
+PREHOOK: Input: default@rcfile_createas1a@ds=1
+PREHOOK: Input: default@rcfile_createas1a@ds=2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(key, value) USING 'tr \t _' AS (c)
+    FROM rcfile_createas1a
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@rcfile_createas1a
+POSTHOOK: Input: default@rcfile_createas1a@ds=1
+POSTHOOK: Input: default@rcfile_createas1a@ds=2
+#### A masked pattern was here ####
+14412220296
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(key, value) USING 'tr \t _' AS (c)
+    FROM rcfile_createas1b
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@rcfile_createas1b
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(key, value) USING 'tr \t _' AS (c)
+    FROM rcfile_createas1b
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@rcfile_createas1b
+#### A masked pattern was here ####
+14412220296
+PREHOOK: query: DROP TABLE rcfile_createas1a
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@rcfile_createas1a
+PREHOOK: Output: default@rcfile_createas1a
+POSTHOOK: query: DROP TABLE rcfile_createas1a
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@rcfile_createas1a
+POSTHOOK: Output: default@rcfile_createas1a
+PREHOOK: query: DROP TABLE rcfile_createas1b
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@rcfile_createas1b
+PREHOOK: Output: default@rcfile_createas1b
+POSTHOOK: query: DROP TABLE rcfile_createas1b
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@rcfile_createas1b
+POSTHOOK: Output: default@rcfile_createas1b

http://git-wip-us.apache.org/repos/asf/hive/blob/1f258e96/ql/src/test/results/clientpositive/llap/rcfile_merge2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/rcfile_merge2.q.out 
b/ql/src/test/results/clientpositive/llap/rcfile_merge2.q.out
new file mode 100644
index 0000000..2bb8e8a
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/rcfile_merge2.q.out
@@ -0,0 +1,230 @@
+PREHOOK: query: DROP TABLE rcfile_merge2a
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE rcfile_merge2a
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE rcfile_merge2a (key INT, value STRING)
+    PARTITIONED BY (one string, two string, three string)
+    STORED AS RCFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@rcfile_merge2a
+POSTHOOK: query: CREATE TABLE rcfile_merge2a (key INT, value STRING)
+    PARTITIONED BY (one string, two string, three string)
+    STORED AS RCFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@rcfile_merge2a
+PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE rcfile_merge2a PARTITION 
(one='1', two, three)
+    SELECT key, value, PMOD(HASH(key), 10) as two, 
+        PMOD(HASH(value), 10) as three
+    FROM src
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN INSERT OVERWRITE TABLE rcfile_merge2a PARTITION 
(one='1', two, three)
+    SELECT key, value, PMOD(HASH(key), 10) as two, 
+        PMOD(HASH(value), 10) as three
+    FROM src
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 89000 Basic stats: 
COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: UDFToInteger(key) (type: int), value (type: 
string), (hash(key) pmod 10) (type: int), (hash(value) pmod 10) (type: int)
+                    outputColumnNames: _col0, _col1, _col2, _col3
+                    Statistics: Num rows: 500 Data size: 51500 Basic stats: 
COMPLETE Column stats: COMPLETE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 500 Data size: 51500 Basic stats: 
COMPLETE Column stats: COMPLETE
+                      table:
+                          input format: 
org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                          output format: 
org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                          serde: 
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                          name: default.rcfile_merge2a
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            one 1
+            three 
+            two 
+          replace: true
+          table:
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.rcfile_merge2a
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+
+PREHOOK: query: INSERT OVERWRITE TABLE rcfile_merge2a PARTITION (one='1', two, 
three)
+    SELECT key, value, PMOD(HASH(key), 10) as two, 
+        PMOD(HASH(value), 10) as three
+    FROM src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@rcfile_merge2a@one=1
+POSTHOOK: query: INSERT OVERWRITE TABLE rcfile_merge2a PARTITION (one='1', 
two, three)
+    SELECT key, value, PMOD(HASH(key), 10) as two, 
+        PMOD(HASH(value), 10) as three
+    FROM src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@rcfile_merge2a@one=1/two=0/three=2
+POSTHOOK: Output: default@rcfile_merge2a@one=1/two=0/three=8
+POSTHOOK: Output: default@rcfile_merge2a@one=1/two=1/three=3
+POSTHOOK: Output: default@rcfile_merge2a@one=1/two=1/three=9
+POSTHOOK: Output: default@rcfile_merge2a@one=1/two=2/three=0
+POSTHOOK: Output: default@rcfile_merge2a@one=1/two=2/three=4
+POSTHOOK: Output: default@rcfile_merge2a@one=1/two=3/three=1
+POSTHOOK: Output: default@rcfile_merge2a@one=1/two=3/three=5
+POSTHOOK: Output: default@rcfile_merge2a@one=1/two=4/three=2
+POSTHOOK: Output: default@rcfile_merge2a@one=1/two=4/three=6
+POSTHOOK: Output: default@rcfile_merge2a@one=1/two=5/three=3
+POSTHOOK: Output: default@rcfile_merge2a@one=1/two=5/three=7
+POSTHOOK: Output: default@rcfile_merge2a@one=1/two=6/three=4
+POSTHOOK: Output: default@rcfile_merge2a@one=1/two=6/three=8
+POSTHOOK: Output: default@rcfile_merge2a@one=1/two=7/three=5
+POSTHOOK: Output: default@rcfile_merge2a@one=1/two=7/three=9
+POSTHOOK: Output: default@rcfile_merge2a@one=1/two=8/three=0
+POSTHOOK: Output: default@rcfile_merge2a@one=1/two=8/three=6
+POSTHOOK: Output: default@rcfile_merge2a@one=1/two=9/three=1
+POSTHOOK: Output: default@rcfile_merge2a@one=1/two=9/three=7
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=0,three=2).key 
EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=0,three=2).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=0,three=8).key 
EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=0,three=8).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=1,three=3).key 
EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=1,three=3).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=1,three=9).key 
EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=1,three=9).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=2,three=0).key 
EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=2,three=0).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=2,three=4).key 
EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=2,three=4).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=3,three=1).key 
EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=3,three=1).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=3,three=5).key 
EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=3,three=5).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=4,three=2).key 
EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=4,three=2).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=4,three=6).key 
EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=4,three=6).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=5,three=3).key 
EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=5,three=3).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=5,three=7).key 
EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=5,three=7).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=6,three=4).key 
EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=6,three=4).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=6,three=8).key 
EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=6,three=8).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=7,three=5).key 
EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=7,three=5).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=7,three=9).key 
EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=7,three=9).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=8,three=0).key 
EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=8,three=0).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=8,three=6).key 
EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=8,three=6).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=9,three=1).key 
EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=9,three=1).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=9,three=7).key 
EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge2a PARTITION(one=1,two=9,three=7).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c)
+    FROM rcfile_merge2a
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@rcfile_merge2a
+PREHOOK: Input: default@rcfile_merge2a@one=1/two=0/three=2
+PREHOOK: Input: default@rcfile_merge2a@one=1/two=0/three=8
+PREHOOK: Input: default@rcfile_merge2a@one=1/two=1/three=3
+PREHOOK: Input: default@rcfile_merge2a@one=1/two=1/three=9
+PREHOOK: Input: default@rcfile_merge2a@one=1/two=2/three=0
+PREHOOK: Input: default@rcfile_merge2a@one=1/two=2/three=4
+PREHOOK: Input: default@rcfile_merge2a@one=1/two=3/three=1
+PREHOOK: Input: default@rcfile_merge2a@one=1/two=3/three=5
+PREHOOK: Input: default@rcfile_merge2a@one=1/two=4/three=2
+PREHOOK: Input: default@rcfile_merge2a@one=1/two=4/three=6
+PREHOOK: Input: default@rcfile_merge2a@one=1/two=5/three=3
+PREHOOK: Input: default@rcfile_merge2a@one=1/two=5/three=7
+PREHOOK: Input: default@rcfile_merge2a@one=1/two=6/three=4
+PREHOOK: Input: default@rcfile_merge2a@one=1/two=6/three=8
+PREHOOK: Input: default@rcfile_merge2a@one=1/two=7/three=5
+PREHOOK: Input: default@rcfile_merge2a@one=1/two=7/three=9
+PREHOOK: Input: default@rcfile_merge2a@one=1/two=8/three=0
+PREHOOK: Input: default@rcfile_merge2a@one=1/two=8/three=6
+PREHOOK: Input: default@rcfile_merge2a@one=1/two=9/three=1
+PREHOOK: Input: default@rcfile_merge2a@one=1/two=9/three=7
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c)
+    FROM rcfile_merge2a
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@rcfile_merge2a
+POSTHOOK: Input: default@rcfile_merge2a@one=1/two=0/three=2
+POSTHOOK: Input: default@rcfile_merge2a@one=1/two=0/three=8
+POSTHOOK: Input: default@rcfile_merge2a@one=1/two=1/three=3
+POSTHOOK: Input: default@rcfile_merge2a@one=1/two=1/three=9
+POSTHOOK: Input: default@rcfile_merge2a@one=1/two=2/three=0
+POSTHOOK: Input: default@rcfile_merge2a@one=1/two=2/three=4
+POSTHOOK: Input: default@rcfile_merge2a@one=1/two=3/three=1
+POSTHOOK: Input: default@rcfile_merge2a@one=1/two=3/three=5
+POSTHOOK: Input: default@rcfile_merge2a@one=1/two=4/three=2
+POSTHOOK: Input: default@rcfile_merge2a@one=1/two=4/three=6
+POSTHOOK: Input: default@rcfile_merge2a@one=1/two=5/three=3
+POSTHOOK: Input: default@rcfile_merge2a@one=1/two=5/three=7
+POSTHOOK: Input: default@rcfile_merge2a@one=1/two=6/three=4
+POSTHOOK: Input: default@rcfile_merge2a@one=1/two=6/three=8
+POSTHOOK: Input: default@rcfile_merge2a@one=1/two=7/three=5
+POSTHOOK: Input: default@rcfile_merge2a@one=1/two=7/three=9
+POSTHOOK: Input: default@rcfile_merge2a@one=1/two=8/three=0
+POSTHOOK: Input: default@rcfile_merge2a@one=1/two=8/three=6
+POSTHOOK: Input: default@rcfile_merge2a@one=1/two=9/three=1
+POSTHOOK: Input: default@rcfile_merge2a@one=1/two=9/three=7
+#### A masked pattern was here ####
+-4209012844
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(key, value, '1', PMOD(HASH(key), 10), 
+        PMOD(HASH(value), 10)) USING 'tr \t _' AS (c)
+    FROM src
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(key, value, '1', PMOD(HASH(key), 10), 
+        PMOD(HASH(value), 10)) USING 'tr \t _' AS (c)
+    FROM src
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+-4209012844
+PREHOOK: query: DROP TABLE rcfile_merge2a
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@rcfile_merge2a
+PREHOOK: Output: default@rcfile_merge2a
+POSTHOOK: query: DROP TABLE rcfile_merge2a
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@rcfile_merge2a
+POSTHOOK: Output: default@rcfile_merge2a

http://git-wip-us.apache.org/repos/asf/hive/blob/1f258e96/ql/src/test/results/clientpositive/llap/rcfile_merge3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/rcfile_merge3.q.out 
b/ql/src/test/results/clientpositive/llap/rcfile_merge3.q.out
new file mode 100644
index 0000000..11b0b48
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/rcfile_merge3.q.out
@@ -0,0 +1,169 @@
+PREHOOK: query: DROP TABLE rcfile_merge3a
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE rcfile_merge3a
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE rcfile_merge3b
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE rcfile_merge3b
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE rcfile_merge3a (key int, value string) 
+    PARTITIONED BY (ds string) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@rcfile_merge3a
+POSTHOOK: query: CREATE TABLE rcfile_merge3a (key int, value string) 
+    PARTITIONED BY (ds string) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@rcfile_merge3a
+PREHOOK: query: CREATE TABLE rcfile_merge3b (key int, value string) STORED AS 
RCFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@rcfile_merge3b
+POSTHOOK: query: CREATE TABLE rcfile_merge3b (key int, value string) STORED AS 
RCFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@rcfile_merge3b
+PREHOOK: query: INSERT OVERWRITE TABLE rcfile_merge3a PARTITION (ds='1')
+    SELECT * FROM src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@rcfile_merge3a@ds=1
+POSTHOOK: query: INSERT OVERWRITE TABLE rcfile_merge3a PARTITION (ds='1')
+    SELECT * FROM src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@rcfile_merge3a@ds=1
+POSTHOOK: Lineage: rcfile_merge3a PARTITION(ds=1).key EXPRESSION 
[(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge3a PARTITION(ds=1).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: INSERT OVERWRITE TABLE rcfile_merge3a PARTITION (ds='2')
+    SELECT * FROM src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@rcfile_merge3a@ds=2
+POSTHOOK: query: INSERT OVERWRITE TABLE rcfile_merge3a PARTITION (ds='2')
+    SELECT * FROM src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@rcfile_merge3a@ds=2
+POSTHOOK: Lineage: rcfile_merge3a PARTITION(ds=2).key EXPRESSION 
[(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge3a PARTITION(ds=2).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE rcfile_merge3b
+    SELECT key, value FROM rcfile_merge3a
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN INSERT OVERWRITE TABLE rcfile_merge3b
+    SELECT key, value FROM rcfile_merge3a
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: rcfile_merge3a
+                  Statistics: Num rows: 1000 Data size: 18624 Basic stats: 
COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 1000 Data size: 18624 Basic stats: 
COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 1000 Data size: 18624 Basic stats: 
COMPLETE Column stats: NONE
+                      table:
+                          input format: 
org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                          output format: 
org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                          serde: 
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                          name: default.rcfile_merge3b
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.rcfile_merge3b
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+
+PREHOOK: query: INSERT OVERWRITE TABLE rcfile_merge3b
+    SELECT key, value FROM rcfile_merge3a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@rcfile_merge3a
+PREHOOK: Input: default@rcfile_merge3a@ds=1
+PREHOOK: Input: default@rcfile_merge3a@ds=2
+PREHOOK: Output: default@rcfile_merge3b
+POSTHOOK: query: INSERT OVERWRITE TABLE rcfile_merge3b
+    SELECT key, value FROM rcfile_merge3a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@rcfile_merge3a
+POSTHOOK: Input: default@rcfile_merge3a@ds=1
+POSTHOOK: Input: default@rcfile_merge3a@ds=2
+POSTHOOK: Output: default@rcfile_merge3b
+POSTHOOK: Lineage: rcfile_merge3b.key SIMPLE 
[(rcfile_merge3a)rcfile_merge3a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: rcfile_merge3b.value SIMPLE 
[(rcfile_merge3a)rcfile_merge3a.FieldSchema(name:value, type:string, 
comment:null), ]
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(key, value) USING 'tr \t _' AS (c)
+    FROM rcfile_merge3a
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@rcfile_merge3a
+PREHOOK: Input: default@rcfile_merge3a@ds=1
+PREHOOK: Input: default@rcfile_merge3a@ds=2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(key, value) USING 'tr \t _' AS (c)
+    FROM rcfile_merge3a
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@rcfile_merge3a
+POSTHOOK: Input: default@rcfile_merge3a@ds=1
+POSTHOOK: Input: default@rcfile_merge3a@ds=2
+#### A masked pattern was here ####
+14412220296
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(key, value) USING 'tr \t _' AS (c)
+    FROM rcfile_merge3b
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@rcfile_merge3b
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(key, value) USING 'tr \t _' AS (c)
+    FROM rcfile_merge3b
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@rcfile_merge3b
+#### A masked pattern was here ####
+14412220296
+PREHOOK: query: DROP TABLE rcfile_merge3a
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@rcfile_merge3a
+PREHOOK: Output: default@rcfile_merge3a
+POSTHOOK: query: DROP TABLE rcfile_merge3a
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@rcfile_merge3a
+POSTHOOK: Output: default@rcfile_merge3a
+PREHOOK: query: DROP TABLE rcfile_merge3b
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@rcfile_merge3b
+PREHOOK: Output: default@rcfile_merge3b
+POSTHOOK: query: DROP TABLE rcfile_merge3b
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@rcfile_merge3b
+POSTHOOK: Output: default@rcfile_merge3b

http://git-wip-us.apache.org/repos/asf/hive/blob/1f258e96/ql/src/test/results/clientpositive/llap/rcfile_merge4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/rcfile_merge4.q.out 
b/ql/src/test/results/clientpositive/llap/rcfile_merge4.q.out
new file mode 100644
index 0000000..443014b
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/rcfile_merge4.q.out
@@ -0,0 +1,169 @@
+PREHOOK: query: DROP TABLE rcfile_merge3a
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE rcfile_merge3a
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE rcfile_merge3b
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE rcfile_merge3b
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE rcfile_merge3a (key int, value string) 
+    PARTITIONED BY (ds string) STORED AS RCFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@rcfile_merge3a
+POSTHOOK: query: CREATE TABLE rcfile_merge3a (key int, value string) 
+    PARTITIONED BY (ds string) STORED AS RCFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@rcfile_merge3a
+PREHOOK: query: CREATE TABLE rcfile_merge3b (key int, value string) STORED AS 
TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@rcfile_merge3b
+POSTHOOK: query: CREATE TABLE rcfile_merge3b (key int, value string) STORED AS 
TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@rcfile_merge3b
+PREHOOK: query: INSERT OVERWRITE TABLE rcfile_merge3a PARTITION (ds='1')
+    SELECT * FROM src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@rcfile_merge3a@ds=1
+POSTHOOK: query: INSERT OVERWRITE TABLE rcfile_merge3a PARTITION (ds='1')
+    SELECT * FROM src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@rcfile_merge3a@ds=1
+POSTHOOK: Lineage: rcfile_merge3a PARTITION(ds=1).key EXPRESSION 
[(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge3a PARTITION(ds=1).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: INSERT OVERWRITE TABLE rcfile_merge3a PARTITION (ds='2')
+    SELECT * FROM src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@rcfile_merge3a@ds=2
+POSTHOOK: query: INSERT OVERWRITE TABLE rcfile_merge3a PARTITION (ds='2')
+    SELECT * FROM src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@rcfile_merge3a@ds=2
+POSTHOOK: Lineage: rcfile_merge3a PARTITION(ds=2).key EXPRESSION 
[(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: rcfile_merge3a PARTITION(ds=2).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE rcfile_merge3b
+    SELECT key, value FROM rcfile_merge3a
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN INSERT OVERWRITE TABLE rcfile_merge3b
+    SELECT key, value FROM rcfile_merge3a
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: rcfile_merge3a
+                  Statistics: Num rows: 1000 Data size: 17624 Basic stats: 
COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 1000 Data size: 17624 Basic stats: 
COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 1000 Data size: 17624 Basic stats: 
COMPLETE Column stats: NONE
+                      table:
+                          input format: 
org.apache.hadoop.mapred.TextInputFormat
+                          output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.rcfile_merge3b
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.rcfile_merge3b
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+
+PREHOOK: query: INSERT OVERWRITE TABLE rcfile_merge3b
+    SELECT key, value FROM rcfile_merge3a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@rcfile_merge3a
+PREHOOK: Input: default@rcfile_merge3a@ds=1
+PREHOOK: Input: default@rcfile_merge3a@ds=2
+PREHOOK: Output: default@rcfile_merge3b
+POSTHOOK: query: INSERT OVERWRITE TABLE rcfile_merge3b
+    SELECT key, value FROM rcfile_merge3a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@rcfile_merge3a
+POSTHOOK: Input: default@rcfile_merge3a@ds=1
+POSTHOOK: Input: default@rcfile_merge3a@ds=2
+POSTHOOK: Output: default@rcfile_merge3b
+POSTHOOK: Lineage: rcfile_merge3b.key SIMPLE 
[(rcfile_merge3a)rcfile_merge3a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: rcfile_merge3b.value SIMPLE 
[(rcfile_merge3a)rcfile_merge3a.FieldSchema(name:value, type:string, 
comment:null), ]
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(key, value) USING 'tr \t _' AS (c)
+    FROM rcfile_merge3a
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@rcfile_merge3a
+PREHOOK: Input: default@rcfile_merge3a@ds=1
+PREHOOK: Input: default@rcfile_merge3a@ds=2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(key, value) USING 'tr \t _' AS (c)
+    FROM rcfile_merge3a
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@rcfile_merge3a
+POSTHOOK: Input: default@rcfile_merge3a@ds=1
+POSTHOOK: Input: default@rcfile_merge3a@ds=2
+#### A masked pattern was here ####
+14412220296
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(key, value) USING 'tr \t _' AS (c)
+    FROM rcfile_merge3b
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@rcfile_merge3b
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(key, value) USING 'tr \t _' AS (c)
+    FROM rcfile_merge3b
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@rcfile_merge3b
+#### A masked pattern was here ####
+14412220296
+PREHOOK: query: DROP TABLE rcfile_merge3a
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@rcfile_merge3a
+PREHOOK: Output: default@rcfile_merge3a
+POSTHOOK: query: DROP TABLE rcfile_merge3a
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@rcfile_merge3a
+POSTHOOK: Output: default@rcfile_merge3a
+PREHOOK: query: DROP TABLE rcfile_merge3b
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@rcfile_merge3b
+PREHOOK: Output: default@rcfile_merge3b
+POSTHOOK: query: DROP TABLE rcfile_merge3b
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@rcfile_merge3b
+POSTHOOK: Output: default@rcfile_merge3b

http://git-wip-us.apache.org/repos/asf/hive/blob/1f258e96/ql/src/test/results/clientpositive/llap/sample10.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/sample10.q.out 
b/ql/src/test/results/clientpositive/llap/sample10.q.out
new file mode 100644
index 0000000..3634b81
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/sample10.q.out
@@ -0,0 +1,437 @@
+PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
+
+create table srcpartbucket (key string, value string) partitioned by (ds 
string, hr string) clustered by (key) into 4 buckets
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@srcpartbucket
+POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
+
+create table srcpartbucket (key string, value string) partitioned by (ds 
string, hr string) clustered by (key) into 4 buckets
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@srcpartbucket
+PREHOOK: query: insert overwrite table srcpartbucket partition(ds, hr) select 
* from srcpart where ds is not null and key < 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+PREHOOK: Output: default@srcpartbucket
+POSTHOOK: query: insert overwrite table srcpartbucket partition(ds, hr) select 
* from srcpart where ds is not null and key < 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+POSTHOOK: Output: default@srcpartbucket@ds=2008-04-08/hr=11
+POSTHOOK: Output: default@srcpartbucket@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@srcpartbucket@ds=2008-04-09/hr=11
+POSTHOOK: Output: default@srcpartbucket@ds=2008-04-09/hr=12
+POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-08,hr=11).key SIMPLE 
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-08,hr=11).value SIMPLE 
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-08,hr=12).key SIMPLE 
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-08,hr=12).value SIMPLE 
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-09,hr=11).key SIMPLE 
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-09,hr=11).value SIMPLE 
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-09,hr=12).key SIMPLE 
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-09,hr=12).value SIMPLE 
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: explain extended
+select ds, count(1) from srcpartbucket tablesample (bucket 1 out of 4 on key) 
where ds is not null group by ds ORDER BY ds ASC
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+select ds, count(1) from srcpartbucket tablesample (bucket 1 out of 4 on key) 
where ds is not null group by ds ORDER BY ds ASC
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: srcpartbucket
+                  Statistics: Num rows: 40 Data size: 7600 Basic stats: 
COMPLETE Column stats: PARTIAL
+                  GatherStats: false
+                  Filter Operator
+                    isSamplingPred: true
+                    predicate: (((hash(key) & 2147483647) % 4) = 0) (type: 
boolean)
+                    sampleDesc: BUCKET 1 OUT OF 4
+                    Statistics: Num rows: 20 Data size: 3680 Basic stats: 
COMPLETE Column stats: PARTIAL
+                    Select Operator
+                      expressions: ds (type: string)
+                      outputColumnNames: ds
+                      Statistics: Num rows: 20 Data size: 3680 Basic stats: 
COMPLETE Column stats: PARTIAL
+                      Group By Operator
+                        aggregations: count(1)
+                        keys: ds (type: string)
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 1 Data size: 192 Basic stats: 
COMPLETE Column stats: PARTIAL
+                        Reduce Output Operator
+                          key expressions: _col0 (type: string)
+                          null sort order: a
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: string)
+                          Statistics: Num rows: 1 Data size: 192 Basic stats: 
COMPLETE Column stats: PARTIAL
+                          tag: -1
+                          value expressions: _col1 (type: bigint)
+                          auto parallelism: true
+            Execution mode: llap
+            LLAP IO: no inputs
+            Path -> Alias:
+#### A masked pattern was here ####
+            Path -> Partition:
+#### A masked pattern was here ####
+                Partition
+                  base file name: 000000_0
+                  input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                  output format: 
org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                    hr 11
+                  properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
+                    bucket_count 4
+                    bucket_field_name key
+                    columns key,value
+                    columns.comments 
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.srcpartbucket
+                    numFiles 3
+                    numRows 10
+                    partition_columns ds/hr
+                    partition_columns.types string:string
+                    rawDataSize 60
+                    serialization.ddl struct srcpartbucket { string key, 
string value}
+                    serialization.format 1
+                    serialization.lib 
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    totalSize 295
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                
+                    input format: 
org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                    output format: 
org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                    properties:
+                      bucket_count 4
+                      bucket_field_name key
+                      columns key,value
+                      columns.comments 
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.srcpartbucket
+                      partition_columns ds/hr
+                      partition_columns.types string:string
+                      serialization.ddl struct srcpartbucket { string key, 
string value}
+                      serialization.format 1
+                      serialization.lib 
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    name: default.srcpartbucket
+                  name: default.srcpartbucket
+#### A masked pattern was here ####
+                Partition
+                  base file name: 000000_0
+                  input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                  output format: 
org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                    hr 12
+                  properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
+                    bucket_count 4
+                    bucket_field_name key
+                    columns key,value
+                    columns.comments 
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.srcpartbucket
+                    numFiles 3
+                    numRows 10
+                    partition_columns ds/hr
+                    partition_columns.types string:string
+                    rawDataSize 60
+                    serialization.ddl struct srcpartbucket { string key, 
string value}
+                    serialization.format 1
+                    serialization.lib 
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    totalSize 295
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                
+                    input format: 
org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                    output format: 
org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                    properties:
+                      bucket_count 4
+                      bucket_field_name key
+                      columns key,value
+                      columns.comments 
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.srcpartbucket
+                      partition_columns ds/hr
+                      partition_columns.types string:string
+                      serialization.ddl struct srcpartbucket { string key, 
string value}
+                      serialization.format 1
+                      serialization.lib 
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    name: default.srcpartbucket
+                  name: default.srcpartbucket
+#### A masked pattern was here ####
+                Partition
+                  base file name: 000000_0
+                  input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                  output format: 
org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                  partition values:
+                    ds 2008-04-09
+                    hr 11
+                  properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
+                    bucket_count 4
+                    bucket_field_name key
+                    columns key,value
+                    columns.comments 
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.srcpartbucket
+                    numFiles 3
+                    numRows 10
+                    partition_columns ds/hr
+                    partition_columns.types string:string
+                    rawDataSize 60
+                    serialization.ddl struct srcpartbucket { string key, 
string value}
+                    serialization.format 1
+                    serialization.lib 
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    totalSize 295
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                
+                    input format: 
org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                    output format: 
org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                    properties:
+                      bucket_count 4
+                      bucket_field_name key
+                      columns key,value
+                      columns.comments 
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.srcpartbucket
+                      partition_columns ds/hr
+                      partition_columns.types string:string
+                      serialization.ddl struct srcpartbucket { string key, 
string value}
+                      serialization.format 1
+                      serialization.lib 
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    name: default.srcpartbucket
+                  name: default.srcpartbucket
+#### A masked pattern was here ####
+                Partition
+                  base file name: 000000_0
+                  input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                  output format: 
org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                  partition values:
+                    ds 2008-04-09
+                    hr 12
+                  properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
+                    bucket_count 4
+                    bucket_field_name key
+                    columns key,value
+                    columns.comments 
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.srcpartbucket
+                    numFiles 3
+                    numRows 10
+                    partition_columns ds/hr
+                    partition_columns.types string:string
+                    rawDataSize 60
+                    serialization.ddl struct srcpartbucket { string key, 
string value}
+                    serialization.format 1
+                    serialization.lib 
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    totalSize 295
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                
+                    input format: 
org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                    output format: 
org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                    properties:
+                      bucket_count 4
+                      bucket_field_name key
+                      columns key,value
+                      columns.comments 
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.srcpartbucket
+                      partition_columns ds/hr
+                      partition_columns.types string:string
+                      serialization.ddl struct srcpartbucket { string key, 
string value}
+                      serialization.format 1
+                      serialization.lib 
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    name: default.srcpartbucket
+                  name: default.srcpartbucket
+            Truncated Path -> Alias:
+              /srcpartbucket/ds=2008-04-08/hr=11/000000_0 [srcpartbucket]
+              /srcpartbucket/ds=2008-04-08/hr=12/000000_0 [srcpartbucket]
+              /srcpartbucket/ds=2008-04-09/hr=11/000000_0 [srcpartbucket]
+              /srcpartbucket/ds=2008-04-09/hr=12/000000_0 [srcpartbucket]
+        Reducer 2 
+            Execution mode: llap
+            Needs Tagging: false
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE 
Column stats: PARTIAL
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  null sort order: a
+                  sort order: +
+                  Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE 
Column stats: PARTIAL
+                  tag: -1
+                  value expressions: _col1 (type: bigint)
+                  auto parallelism: false
+        Reducer 3 
+            Execution mode: llap
+            Needs Tagging: false
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 
(type: bigint)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE 
Column stats: PARTIAL
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+#### A masked pattern was here ####
+                  NumFilesPerFileSink: 1
+                  Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE 
Column stats: PARTIAL
+#### A masked pattern was here ####
+                  table:
+                      input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      properties:
+                        columns _col0,_col1
+                        columns.types string:bigint
+                        escape.delim \
+                        hive.serialization.extend.additional.nesting.levels 
true
+                        serialization.escape.crlf true
+                        serialization.format 1
+                        serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  TotalFiles: 1
+                  GatherStats: false
+                  MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select ds, count(1) from srcpartbucket tablesample (bucket 1 
out of 4 on key) where ds is not null group by ds ORDER BY ds ASC
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpartbucket
+PREHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=11
+PREHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select ds, count(1) from srcpartbucket tablesample (bucket 1 
out of 4 on key) where ds is not null group by ds ORDER BY ds ASC
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpartbucket
+POSTHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=11
+POSTHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+2008-04-08     10
+2008-04-09     10
+PREHOOK: query: select ds, count(1) from srcpartbucket tablesample (bucket 1 
out of 2 on key) where ds is not null group by ds ORDER BY ds ASC
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpartbucket
+PREHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=11
+PREHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select ds, count(1) from srcpartbucket tablesample (bucket 1 
out of 2 on key) where ds is not null group by ds ORDER BY ds ASC
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpartbucket
+POSTHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=11
+POSTHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+2008-04-08     12
+2008-04-09     12
+PREHOOK: query: select * from srcpartbucket where ds is not null ORDER BY key 
ASC, value ASC, ds ASC, hr ASC
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpartbucket
+PREHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=11
+PREHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select * from srcpartbucket where ds is not null ORDER BY key 
ASC, value ASC, ds ASC, hr ASC
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpartbucket
+POSTHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=11
+POSTHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+0      val_0   2008-04-08      11
+0      val_0   2008-04-08      11
+0      val_0   2008-04-08      11
+0      val_0   2008-04-08      12
+0      val_0   2008-04-08      12
+0      val_0   2008-04-08      12
+0      val_0   2008-04-09      11
+0      val_0   2008-04-09      11
+0      val_0   2008-04-09      11
+0      val_0   2008-04-09      12
+0      val_0   2008-04-09      12
+0      val_0   2008-04-09      12
+2      val_2   2008-04-08      11
+2      val_2   2008-04-08      12
+2      val_2   2008-04-09      11
+2      val_2   2008-04-09      12
+4      val_4   2008-04-08      11
+4      val_4   2008-04-08      12
+4      val_4   2008-04-09      11
+4      val_4   2008-04-09      12
+5      val_5   2008-04-08      11
+5      val_5   2008-04-08      11
+5      val_5   2008-04-08      11
+5      val_5   2008-04-08      12
+5      val_5   2008-04-08      12
+5      val_5   2008-04-08      12
+5      val_5   2008-04-09      11
+5      val_5   2008-04-09      11
+5      val_5   2008-04-09      11
+5      val_5   2008-04-09      12
+5      val_5   2008-04-09      12
+5      val_5   2008-04-09      12
+8      val_8   2008-04-08      11
+8      val_8   2008-04-08      12
+8      val_8   2008-04-09      11
+8      val_8   2008-04-09      12
+9      val_9   2008-04-08      11
+9      val_9   2008-04-08      12
+9      val_9   2008-04-09      11
+9      val_9   2008-04-09      12

Reply via email to