Author: jvs
Date: Fri Sep 2 03:27:17 2011
New Revision: 1164340
URL: http://svn.apache.org/viewvc?rev=1164340&view=rev
Log:
HIVE-2337. Predicate pushdown erroneously conservative with outer joins
(Charles Chen via jvs)
Added:
hive/trunk/ql/src/test/queries/clientpositive/ppd_outer_join5.q
hive/trunk/ql/src/test/results/clientpositive/ppd_outer_join5.q.out
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
hive/trunk/ql/src/test/results/clientpositive/ppd_outer_join4.q.out
hive/trunk/ql/src/test/results/compiler/plan/input4.q.xml
hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java?rev=1164340&r1=1164339&r2=1164340&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java Fri
Sep 2 03:27:17 2011
@@ -270,18 +270,21 @@ public final class OpProcFactory {
/**
* Figures out the aliases for whom it is safe to push predicates based on
- * ANSI SQL semantics For inner join, all predicates for all aliases can be
- * pushed For full outer join, none of the predicates can be pushed as that
- * would limit the number of rows for join For left outer join, all the
- * predicates on the left side aliases can be pushed up For right outer
- * join, all the predicates on the right side aliases can be pushed up
Joins
- * chain containing both left and right outer joins are treated as full
- * outer join. TODO: further optimization opportunity for the case a.c1 =
- * b.c1 and b.c2 = c.c2 a and b are first joined and then the result with
c.
- * But the second join op currently treats a and b as separate aliases and
- * thus disallowing predicate expr containing both tables a and b (such as
- * a.c3 + a.c4 > 20). Such predicates also can be pushed just above the
- * second join and below the first join
+ * ANSI SQL semantics. The join conditions are left associative so "a
+ * RIGHT OUTER JOIN b LEFT OUTER JOIN c INNER JOIN d" is interpreted as
+ * "((a RIGHT OUTER JOIN b) LEFT OUTER JOIN c) INNER JOIN d". For inner
+ * joins, both the left and right join subexpressions are considered for
+ * pushing down aliases, for the right outer join, the right subexpression
+ * is considered and the left ignored and for the left outer join, the
+ * left subexpression is considered and the left ignored. Here, aliases b
+ * and d are eligible to be pushed up.
+ *
+ * TODO: further optimization opportunity for the case a.c1 = b.c1 and b.c2
+ * = c.c2 a and b are first joined and then the result with c. But the
+ * second join op currently treats a and b as separate aliases and thus
+ * disallowing predicate expr containing both tables a and b (such as a.c3
+ * + a.c4 > 20). Such predicates also can be pushed just above the second
+ * join and below the first join
*
* @param op
* Join Operator
@@ -291,40 +294,23 @@ public final class OpProcFactory {
*/
private Set<String> getQualifiedAliases(JoinOperator op, RowResolver rr) {
Set<String> aliases = new HashSet<String>();
- int loj = Integer.MAX_VALUE;
- int roj = -1;
- boolean oj = false;
JoinCondDesc[] conds = op.getConf().getConds();
Map<Integer, Set<String>> posToAliasMap = op.getPosToAliasMap();
- for (JoinCondDesc jc : conds) {
- if (jc.getType() == JoinDesc.FULL_OUTER_JOIN) {
- oj = true;
+ int i;
+ for (i=conds.length-1; i>=0; i--){
+ if (conds[i].getType() == JoinDesc.INNER_JOIN) {
+ aliases.addAll(posToAliasMap.get(i+1));
+ } else if (conds[i].getType() == JoinDesc.FULL_OUTER_JOIN) {
break;
- } else if (jc.getType() == JoinDesc.LEFT_OUTER_JOIN) {
- if (jc.getLeft() < loj) {
- loj = jc.getLeft();
- }
- } else if (jc.getType() == JoinDesc.RIGHT_OUTER_JOIN) {
- if (jc.getRight() > roj) {
- roj = jc.getRight();
- }
+ } else if (conds[i].getType() == JoinDesc.RIGHT_OUTER_JOIN) {
+ aliases.addAll(posToAliasMap.get(i+1));
+ break;
+ } else if (conds[i].getType() == JoinDesc.LEFT_OUTER_JOIN) {
+ continue;
}
}
- if (oj || (loj != Integer.MAX_VALUE && roj != -1)) {
- return aliases;
- }
- for (Entry<Integer, Set<String>> pa : posToAliasMap.entrySet()) {
- if (loj != Integer.MAX_VALUE) {
- if (pa.getKey() <= loj) {
- aliases.addAll(pa.getValue());
- }
- } else if (roj != -1) {
- if (pa.getKey() >= roj) {
- aliases.addAll(pa.getValue());
- }
- } else {
- aliases.addAll(pa.getValue());
- }
+ if(i == -1){
+ aliases.addAll(posToAliasMap.get(0));
}
Set<String> aliases2 = rr.getTableNames();
aliases.retainAll(aliases2);
Added: hive/trunk/ql/src/test/queries/clientpositive/ppd_outer_join5.q
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/ppd_outer_join5.q?rev=1164340&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/ppd_outer_join5.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/ppd_outer_join5.q Fri Sep 2
03:27:17 2011
@@ -0,0 +1,16 @@
+set hive.optimize.ppd=true;
+set hive.ppd.remove.duplicatefilters=true;
+
+create table t1 (id int, key string, value string);
+create table t2 (id int, key string, value string);
+create table t3 (id int, key string, value string);
+create table t4 (id int, key string, value string);
+
+explain select * from t1 full outer join t2 on t1.id=t2.id join t3 on
t2.id=t3.id where t3.id=20;
+explain select * from t1 join t2 on (t1.id=t2.id) left outer join t3 on
(t2.id=t3.id) where t2.id=20;
+explain select * from t1 join t2 on (t1.id=t2.id) left outer join t3 on
(t1.id=t3.id) where t2.id=20;
+
+drop table t1;
+drop table t2;
+drop table t3;
+drop table t4;
\ No newline at end of file
Modified: hive/trunk/ql/src/test/results/clientpositive/ppd_outer_join4.q.out
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/ppd_outer_join4.q.out?rev=1164340&r1=1164339&r2=1164340&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/ppd_outer_join4.q.out
(original)
+++ hive/trunk/ql/src/test/results/clientpositive/ppd_outer_join4.q.out Fri Sep
2 03:27:17 2011
@@ -70,18 +70,22 @@ STAGE PLANS:
c
TableScan
alias: c
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 2
- value expressions:
- expr: key
- type: string
+ Filter Operator
+ predicate:
+ expr: (sqrt(key) <> 13)
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 2
+ value expressions:
+ expr: key
+ type: string
Reduce Operator Tree:
Join Operator
condition map:
@@ -134,7 +138,7 @@ PREHOOK: query: FROM
WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' AND
sqrt(c.key) <> 13
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output:
file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-38-11_041_8830294243573092446/-mr-10000
+PREHOOK: Output:
file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-31_17-04-03_652_7389043450109466394/-mr-10000
POSTHOOK: query: FROM
src a
LEFT OUTER JOIN
@@ -147,7 +151,7 @@ POSTHOOK: query: FROM
WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' AND
sqrt(c.key) <> 13
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output:
file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-38-11_041_8830294243573092446/-mr-10000
+POSTHOOK: Output:
file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-31_17-04-03_652_7389043450109466394/-mr-10000
150 val_150 150 val_150 150
152 val_152 152 val_152 152
152 val_152 152 val_152 152
@@ -450,18 +454,22 @@ STAGE PLANS:
c
TableScan
alias: c
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 2
- value expressions:
- expr: key
- type: string
+ Filter Operator
+ predicate:
+ expr: (sqrt(key) <> 13)
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 2
+ value expressions:
+ expr: key
+ type: string
Reduce Operator Tree:
Join Operator
condition map:
@@ -475,7 +483,7 @@ STAGE PLANS:
outputColumnNames: _col0, _col1, _col4, _col5, _col8
Filter Operator
predicate:
- expr: ((((_col4 > '15') and (_col4 < '25')) and (sqrt(_col8)
<> 13)) and ((_col0 > '10') and (_col0 < '20')))
+ expr: (((_col4 > '15') and (_col4 < '25')) and ((_col0 > '10')
and (_col0 < '20')))
type: boolean
Select Operator
expressions:
@@ -514,7 +522,7 @@ PREHOOK: query: FROM
WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' AND
sqrt(c.key) <> 13
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output:
file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-38-20_602_416360783321217123/-mr-10000
+PREHOOK: Output:
file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-31_17-04-10_850_6246519718607931090/-mr-10000
POSTHOOK: query: FROM
src a
LEFT OUTER JOIN
@@ -527,7 +535,7 @@ POSTHOOK: query: FROM
WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' AND
sqrt(c.key) <> 13
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output:
file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-38-20_602_416360783321217123/-mr-10000
+POSTHOOK: Output:
file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-31_17-04-10_850_6246519718607931090/-mr-10000
150 val_150 150 val_150 150
152 val_152 152 val_152 152
152 val_152 152 val_152 152
Added: hive/trunk/ql/src/test/results/clientpositive/ppd_outer_join5.q.out
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/ppd_outer_join5.q.out?rev=1164340&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/ppd_outer_join5.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/ppd_outer_join5.q.out Fri Sep
2 03:27:17 2011
@@ -0,0 +1,412 @@
+PREHOOK: query: create table t1 (id int, key string, value string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table t1 (id int, key string, value string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@t1
+PREHOOK: query: create table t2 (id int, key string, value string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table t2 (id int, key string, value string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@t2
+PREHOOK: query: create table t3 (id int, key string, value string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table t3 (id int, key string, value string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@t3
+PREHOOK: query: create table t4 (id int, key string, value string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table t4 (id int, key string, value string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@t4
+PREHOOK: query: explain select * from t1 full outer join t2 on t1.id=t2.id
join t3 on t2.id=t3.id where t3.id=20
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select * from t1 full outer join t2 on t1.id=t2.id
join t3 on t2.id=t3.id where t3.id=20
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_FULLOUTERJOIN (TOK_TABREF (TOK_TABNAME
t1)) (TOK_TABREF (TOK_TABNAME t2)) (= (. (TOK_TABLE_OR_COL t1) id) (.
(TOK_TABLE_OR_COL t2) id))) (TOK_TABREF (TOK_TABNAME t3)) (= (.
(TOK_TABLE_OR_COL t2) id) (. (TOK_TABLE_OR_COL t3) id)))) (TOK_INSERT
(TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR
TOK_ALLCOLREF)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL t3) id) 20))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ t1
+ TableScan
+ alias: t1
+ Reduce Output Operator
+ key expressions:
+ expr: id
+ type: int
+ sort order: +
+ Map-reduce partition columns:
+ expr: id
+ type: int
+ tag: 0
+ value expressions:
+ expr: id
+ type: int
+ expr: key
+ type: string
+ expr: value
+ type: string
+ t2
+ TableScan
+ alias: t2
+ Reduce Output Operator
+ key expressions:
+ expr: id
+ type: int
+ sort order: +
+ Map-reduce partition columns:
+ expr: id
+ type: int
+ tag: 1
+ value expressions:
+ expr: id
+ type: int
+ expr: key
+ type: string
+ expr: value
+ type: string
+ t3
+ TableScan
+ alias: t3
+ Filter Operator
+ predicate:
+ expr: (id = 20)
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: id
+ type: int
+ sort order: +
+ Map-reduce partition columns:
+ expr: id
+ type: int
+ tag: 2
+ value expressions:
+ expr: id
+ type: int
+ expr: key
+ type: string
+ expr: value
+ type: string
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Outer Join 0 to 1
+ Inner Join 1 to 2
+ condition expressions:
+ 0 {VALUE._col0} {VALUE._col1} {VALUE._col2}
+ 1 {VALUE._col0} {VALUE._col1} {VALUE._col2}
+ 2 {VALUE._col0} {VALUE._col1} {VALUE._col2}
+ handleSkewJoin: false
+ outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7, _col10,
_col11, _col12
+ Select Operator
+ expressions:
+ expr: _col0
+ type: int
+ expr: _col1
+ type: string
+ expr: _col2
+ type: string
+ expr: _col5
+ type: int
+ expr: _col6
+ type: string
+ expr: _col7
+ type: string
+ expr: _col10
+ type: int
+ expr: _col11
+ type: string
+ expr: _col12
+ type: string
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5,
_col6, _col7, _col8
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format:
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+
+PREHOOK: query: explain select * from t1 join t2 on (t1.id=t2.id) left outer
join t3 on (t2.id=t3.id) where t2.id=20
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select * from t1 join t2 on (t1.id=t2.id) left outer
join t3 on (t2.id=t3.id) where t2.id=20
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_JOIN (TOK_TABREF (TOK_TABNAME
t1)) (TOK_TABREF (TOK_TABNAME t2)) (= (. (TOK_TABLE_OR_COL t1) id) (.
(TOK_TABLE_OR_COL t2) id))) (TOK_TABREF (TOK_TABNAME t3)) (= (.
(TOK_TABLE_OR_COL t2) id) (. (TOK_TABLE_OR_COL t3) id)))) (TOK_INSERT
(TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR
TOK_ALLCOLREF)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL t2) id) 20))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ t1
+ TableScan
+ alias: t1
+ Reduce Output Operator
+ key expressions:
+ expr: id
+ type: int
+ sort order: +
+ Map-reduce partition columns:
+ expr: id
+ type: int
+ tag: 0
+ value expressions:
+ expr: id
+ type: int
+ expr: key
+ type: string
+ expr: value
+ type: string
+ t2
+ TableScan
+ alias: t2
+ Filter Operator
+ predicate:
+ expr: (id = 20)
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: id
+ type: int
+ sort order: +
+ Map-reduce partition columns:
+ expr: id
+ type: int
+ tag: 1
+ value expressions:
+ expr: id
+ type: int
+ expr: key
+ type: string
+ expr: value
+ type: string
+ t3
+ TableScan
+ alias: t3
+ Reduce Output Operator
+ key expressions:
+ expr: id
+ type: int
+ sort order: +
+ Map-reduce partition columns:
+ expr: id
+ type: int
+ tag: 2
+ value expressions:
+ expr: id
+ type: int
+ expr: key
+ type: string
+ expr: value
+ type: string
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Inner Join 0 to 1
+ Left Outer Join1 to 2
+ condition expressions:
+ 0 {VALUE._col0} {VALUE._col1} {VALUE._col2}
+ 1 {VALUE._col0} {VALUE._col1} {VALUE._col2}
+ 2 {VALUE._col0} {VALUE._col1} {VALUE._col2}
+ handleSkewJoin: false
+ outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7, _col10,
_col11, _col12
+ Select Operator
+ expressions:
+ expr: _col0
+ type: int
+ expr: _col1
+ type: string
+ expr: _col2
+ type: string
+ expr: _col5
+ type: int
+ expr: _col6
+ type: string
+ expr: _col7
+ type: string
+ expr: _col10
+ type: int
+ expr: _col11
+ type: string
+ expr: _col12
+ type: string
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5,
_col6, _col7, _col8
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format:
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+
+PREHOOK: query: explain select * from t1 join t2 on (t1.id=t2.id) left outer
join t3 on (t1.id=t3.id) where t2.id=20
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select * from t1 join t2 on (t1.id=t2.id) left outer
join t3 on (t1.id=t3.id) where t2.id=20
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_JOIN (TOK_TABREF (TOK_TABNAME
t1)) (TOK_TABREF (TOK_TABNAME t2)) (= (. (TOK_TABLE_OR_COL t1) id) (.
(TOK_TABLE_OR_COL t2) id))) (TOK_TABREF (TOK_TABNAME t3)) (= (.
(TOK_TABLE_OR_COL t1) id) (. (TOK_TABLE_OR_COL t3) id)))) (TOK_INSERT
(TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR
TOK_ALLCOLREF)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL t2) id) 20))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ t1
+ TableScan
+ alias: t1
+ Reduce Output Operator
+ key expressions:
+ expr: id
+ type: int
+ sort order: +
+ Map-reduce partition columns:
+ expr: id
+ type: int
+ tag: 0
+ value expressions:
+ expr: id
+ type: int
+ expr: key
+ type: string
+ expr: value
+ type: string
+ t2
+ TableScan
+ alias: t2
+ Filter Operator
+ predicate:
+ expr: (id = 20)
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: id
+ type: int
+ sort order: +
+ Map-reduce partition columns:
+ expr: id
+ type: int
+ tag: 1
+ value expressions:
+ expr: id
+ type: int
+ expr: key
+ type: string
+ expr: value
+ type: string
+ t3
+ TableScan
+ alias: t3
+ Reduce Output Operator
+ key expressions:
+ expr: id
+ type: int
+ sort order: +
+ Map-reduce partition columns:
+ expr: id
+ type: int
+ tag: 2
+ value expressions:
+ expr: id
+ type: int
+ expr: key
+ type: string
+ expr: value
+ type: string
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Inner Join 0 to 1
+ Left Outer Join0 to 2
+ condition expressions:
+ 0 {VALUE._col0} {VALUE._col1} {VALUE._col2}
+ 1 {VALUE._col0} {VALUE._col1} {VALUE._col2}
+ 2 {VALUE._col0} {VALUE._col1} {VALUE._col2}
+ handleSkewJoin: false
+ outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7, _col10,
_col11, _col12
+ Select Operator
+ expressions:
+ expr: _col0
+ type: int
+ expr: _col1
+ type: string
+ expr: _col2
+ type: string
+ expr: _col5
+ type: int
+ expr: _col6
+ type: string
+ expr: _col7
+ type: string
+ expr: _col10
+ type: int
+ expr: _col11
+ type: string
+ expr: _col12
+ type: string
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5,
_col6, _col7, _col8
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format:
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+
+PREHOOK: query: drop table t1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@t1
+PREHOOK: Output: default@t1
+POSTHOOK: query: drop table t1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@t1
+POSTHOOK: Output: default@t1
+PREHOOK: query: drop table t2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@t2
+PREHOOK: Output: default@t2
+POSTHOOK: query: drop table t2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@t2
+POSTHOOK: Output: default@t2
+PREHOOK: query: drop table t3
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@t3
+PREHOOK: Output: default@t3
+POSTHOOK: query: drop table t3
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@t3
+POSTHOOK: Output: default@t3
+PREHOOK: query: drop table t4
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@t4
+PREHOOK: Output: default@t4
+POSTHOOK: query: drop table t4
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@t4
+POSTHOOK: Output: default@t4
Modified: hive/trunk/ql/src/test/results/compiler/plan/input4.q.xml
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/compiler/plan/input4.q.xml?rev=1164340&r1=1164339&r2=1164340&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/compiler/plan/input4.q.xml (original)
+++ hive/trunk/ql/src/test/results/compiler/plan/input4.q.xml Fri Sep 2
03:27:17 2011
@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
-<java version="1.6.0_01" class="java.beans.XMLDecoder">
+<java version="1.6.0_26" class="java.beans.XMLDecoder">
<object id="MapRedTask0" class="org.apache.hadoop.hive.ql.exec.MapRedTask">
<void property="childTasks">
<object class="java.util.ArrayList">
@@ -22,7 +22,7 @@
<void property="work">
<object class="org.apache.hadoop.hive.ql.plan.StatsWork">
<void property="aggKey">
-
<string>pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-08_392_4021619949206592501/-ext-10000/</string>
+
<string>pfile:/Users/charleschen/hive-trunk-h/build/ql/scratchdir/hive_2011-08-31_21-37-54_282_5074339267564751737/-ext-10000/</string>
</void>
</object>
</void>
@@ -58,7 +58,7 @@
<boolean>true</boolean>
</void>
<void property="sourceDir">
-
<string>pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-08_392_4021619949206592501/-ext-10000</string>
+
<string>pfile:/Users/charleschen/hive-trunk-h/build/ql/scratchdir/hive_2011-08-31_21-37-54_282_5074339267564751737/-ext-10000</string>
</void>
<void property="table">
<object id="TableDesc0"
class="org.apache.hadoop.hive.ql.plan.TableDesc">
@@ -111,11 +111,11 @@
</void>
<void method="put">
<string>location</string>
-
<string>pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1</string>
+
<string>pfile:/Users/charleschen/hive-trunk-h/build/ql/test/data/warehouse/dest1</string>
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1310382308</string>
+ <string>1314851874</string>
</void>
</object>
</void>
@@ -125,7 +125,7 @@
</object>
</void>
<void property="tmpDir">
-
<string>pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-08_392_4021619949206592501/-ext-10001</string>
+
<string>pfile:/Users/charleschen/hive-trunk-h/build/ql/scratchdir/hive_2011-08-31_21-37-54_282_5074339267564751737/-ext-10001</string>
</void>
</object>
</void>
@@ -196,11 +196,11 @@
</void>
<void method="put">
<string>location</string>
-
<string>pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src</string>
+
<string>pfile:/Users/charleschen/hive-trunk-h/build/ql/test/data/warehouse/src</string>
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1310382307</string>
+ <string>1314851873</string>
</void>
</object>
</void>
@@ -258,11 +258,11 @@
</void>
<void method="put">
<string>location</string>
-
<string>pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src</string>
+
<string>pfile:/Users/charleschen/hive-trunk-h/build/ql/test/data/warehouse/src</string>
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1310382307</string>
+ <string>1314851873</string>
</void>
</object>
</void>
@@ -525,14 +525,7 @@
<void property="childExprs">
<object class="java.util.ArrayList">
<void method="add">
- <object
class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc">
- <void property="column">
- <string>_col0</string>
- </void>
- <void property="typeInfo">
- <object idref="PrimitiveTypeInfo0"/>
- </void>
- </object>
+ <object idref="ExprNodeColumnDesc1"/>
</void>
<void method="add">
<object
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc">
@@ -991,7 +984,7 @@
<void property="pathToAliases">
<object class="java.util.LinkedHashMap">
<void method="put">
-
<string>pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src</string>
+
<string>pfile:/Users/charleschen/hive-trunk-h/build/ql/test/data/warehouse/src</string>
<object class="java.util.ArrayList">
<void method="add">
<string>tmap:src</string>
@@ -1003,7 +996,7 @@
<void property="pathToPartitionInfo">
<object class="java.util.LinkedHashMap">
<void method="put">
-
<string>pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src</string>
+
<string>pfile:/Users/charleschen/hive-trunk-h/build/ql/test/data/warehouse/src</string>
<object class="org.apache.hadoop.hive.ql.plan.PartitionDesc">
<void property="baseFileName">
<string>src</string>
@@ -1060,11 +1053,11 @@
</void>
<void method="put">
<string>location</string>
-
<string>pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src</string>
+
<string>pfile:/Users/charleschen/hive-trunk-h/build/ql/test/data/warehouse/src</string>
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1310382307</string>
+ <string>1314851873</string>
</void>
</object>
</void>
@@ -1122,11 +1115,11 @@
</void>
<void method="put">
<string>location</string>
-
<string>pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src</string>
+
<string>pfile:/Users/charleschen/hive-trunk-h/build/ql/test/data/warehouse/src</string>
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1310382307</string>
+ <string>1314851873</string>
</void>
</object>
</void>
@@ -1155,7 +1148,7 @@
<int>1</int>
</void>
<void property="dirName">
-
<string>pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-08_392_4021619949206592501/-ext-10000</string>
+
<string>pfile:/Users/charleschen/hive-trunk-h/build/ql/scratchdir/hive_2011-08-31_21-37-54_282_5074339267564751737/-ext-10000</string>
</void>
<void property="gatherStats">
<boolean>true</boolean>
@@ -1164,7 +1157,7 @@
<int>1</int>
</void>
<void property="statsAggPrefix">
-
<string>pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-08_392_4021619949206592501/-ext-10000/</string>
+
<string>pfile:/Users/charleschen/hive-trunk-h/build/ql/scratchdir/hive_2011-08-31_21-37-54_282_5074339267564751737/-ext-10000/</string>
</void>
<void property="tableInfo">
<object idref="TableDesc0"/>
Modified: hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml?rev=1164340&r1=1164339&r2=1164340&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml (original)
+++ hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml Fri Sep 2
03:27:17 2011
@@ -62,11 +62,11 @@
</void>
<void method="put">
<string>location</string>
-
<string>pfile:/Users/charleschen/hive-trunk-f/build/ql/test/data/warehouse/src</string>
+
<string>pfile:/Users/charleschen/hive-trunk-h/build/ql/test/data/warehouse/src</string>
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1314756952</string>
+ <string>1314851960</string>
</void>
</object>
</void>
@@ -124,11 +124,11 @@
</void>
<void method="put">
<string>location</string>
-
<string>pfile:/Users/charleschen/hive-trunk-f/build/ql/test/data/warehouse/src</string>
+
<string>pfile:/Users/charleschen/hive-trunk-h/build/ql/test/data/warehouse/src</string>
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1314756952</string>
+ <string>1314851960</string>
</void>
</object>
</void>
@@ -194,11 +194,11 @@
</void>
<void method="put">
<string>location</string>
-
<string>pfile:/Users/charleschen/hive-trunk-f/build/ql/test/data/warehouse/src</string>
+
<string>pfile:/Users/charleschen/hive-trunk-h/build/ql/test/data/warehouse/src</string>
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1314756952</string>
+ <string>1314851960</string>
</void>
</object>
</void>
@@ -256,11 +256,11 @@
</void>
<void method="put">
<string>location</string>
-
<string>pfile:/Users/charleschen/hive-trunk-f/build/ql/test/data/warehouse/src</string>
+
<string>pfile:/Users/charleschen/hive-trunk-h/build/ql/test/data/warehouse/src</string>
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1314756952</string>
+ <string>1314851960</string>
</void>
</object>
</void>
@@ -437,21 +437,21 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_RS_6_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_RS_493_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_RS_6_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_RS_493_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_RS_6_TIME_TAKEN</string>
+ <string>CNTR_NAME_RS_493_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_RS_6_FATAL_ERROR</string>
+ <string>CNTR_NAME_RS_493_FATAL_ERROR</string>
</void>
</object>
</void>
<void property="operatorId">
- <string>RS_6</string>
+ <string>RS_493</string>
</void>
<void property="parentOperators">
<object class="java.util.ArrayList">
@@ -557,21 +557,21 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_SEL_5_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_SEL_492_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_SEL_5_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_SEL_492_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_SEL_5_TIME_TAKEN</string>
+ <string>CNTR_NAME_SEL_492_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_SEL_5_FATAL_ERROR</string>
+ <string>CNTR_NAME_SEL_492_FATAL_ERROR</string>
</void>
</object>
</void>
<void property="operatorId">
- <string>SEL_5</string>
+ <string>SEL_492</string>
</void>
<void property="parentOperators">
<object class="java.util.ArrayList">
@@ -756,21 +756,21 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_FIL_14_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_FIL_501_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_FIL_14_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_FIL_501_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_FIL_14_TIME_TAKEN</string>
+ <string>CNTR_NAME_FIL_501_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_FIL_14_FATAL_ERROR</string>
+ <string>CNTR_NAME_FIL_501_FATAL_ERROR</string>
</void>
</object>
</void>
<void property="operatorId">
- <string>FIL_14</string>
+ <string>FIL_501</string>
</void>
<void property="parentOperators">
<object class="java.util.ArrayList">
@@ -866,16 +866,16 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_TS_3_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_TS_490_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_TS_3_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_TS_490_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_TS_3_TIME_TAKEN</string>
+ <string>CNTR_NAME_TS_490_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_TS_3_FATAL_ERROR</string>
+ <string>CNTR_NAME_TS_490_FATAL_ERROR</string>
</void>
</object>
</void>
@@ -890,7 +890,7 @@
</object>
</void>
<void property="operatorId">
- <string>TS_3</string>
+ <string>TS_490</string>
</void>
<void property="schema">
<object class="org.apache.hadoop.hive.ql.exec.RowSchema">
@@ -1063,21 +1063,21 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_RS_7_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_RS_494_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_RS_7_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_RS_494_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_RS_7_TIME_TAKEN</string>
+ <string>CNTR_NAME_RS_494_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_RS_7_FATAL_ERROR</string>
+ <string>CNTR_NAME_RS_494_FATAL_ERROR</string>
</void>
</object>
</void>
<void property="operatorId">
- <string>RS_7</string>
+ <string>RS_494</string>
</void>
<void property="parentOperators">
<object class="java.util.ArrayList">
@@ -1183,21 +1183,21 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_SEL_2_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_SEL_489_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_SEL_489_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_SEL_2_TIME_TAKEN</string>
+ <string>CNTR_NAME_SEL_489_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_SEL_2_FATAL_ERROR</string>
+ <string>CNTR_NAME_SEL_489_FATAL_ERROR</string>
</void>
</object>
</void>
<void property="operatorId">
- <string>SEL_2</string>
+ <string>SEL_489</string>
</void>
<void property="parentOperators">
<object class="java.util.ArrayList">
@@ -1333,21 +1333,21 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_FIL_15_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_FIL_502_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_FIL_15_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_FIL_502_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_FIL_15_TIME_TAKEN</string>
+ <string>CNTR_NAME_FIL_502_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_FIL_15_FATAL_ERROR</string>
+ <string>CNTR_NAME_FIL_502_FATAL_ERROR</string>
</void>
</object>
</void>
<void property="operatorId">
- <string>FIL_15</string>
+ <string>FIL_502</string>
</void>
<void property="parentOperators">
<object class="java.util.ArrayList">
@@ -1439,16 +1439,16 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_TS_0_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_TS_487_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_TS_0_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_TS_487_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_TS_0_TIME_TAKEN</string>
+ <string>CNTR_NAME_TS_487_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_TS_0_FATAL_ERROR</string>
+ <string>CNTR_NAME_TS_487_FATAL_ERROR</string>
</void>
</object>
</void>
@@ -1463,7 +1463,7 @@
</object>
</void>
<void property="operatorId">
- <string>TS_0</string>
+ <string>TS_487</string>
</void>
<void property="schema">
<object class="org.apache.hadoop.hive.ql.exec.RowSchema">
@@ -1491,7 +1491,7 @@
<void property="pathToAliases">
<object class="java.util.LinkedHashMap">
<void method="put">
-
<string>pfile:/Users/charleschen/hive-trunk-f/build/ql/test/data/warehouse/src</string>
+
<string>pfile:/Users/charleschen/hive-trunk-h/build/ql/test/data/warehouse/src</string>
<object class="java.util.ArrayList">
<void method="add">
<string>c:a:src1</string>
@@ -1506,7 +1506,7 @@
<void property="pathToPartitionInfo">
<object class="java.util.LinkedHashMap">
<void method="put">
-
<string>pfile:/Users/charleschen/hive-trunk-f/build/ql/test/data/warehouse/src</string>
+
<string>pfile:/Users/charleschen/hive-trunk-h/build/ql/test/data/warehouse/src</string>
<object class="org.apache.hadoop.hive.ql.plan.PartitionDesc">
<void property="baseFileName">
<string>src</string>
@@ -1563,11 +1563,11 @@
</void>
<void method="put">
<string>location</string>
-
<string>pfile:/Users/charleschen/hive-trunk-f/build/ql/test/data/warehouse/src</string>
+
<string>pfile:/Users/charleschen/hive-trunk-h/build/ql/test/data/warehouse/src</string>
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1314756952</string>
+ <string>1314851960</string>
</void>
</object>
</void>
@@ -1625,11 +1625,11 @@
</void>
<void method="put">
<string>location</string>
-
<string>pfile:/Users/charleschen/hive-trunk-f/build/ql/test/data/warehouse/src</string>
+
<string>pfile:/Users/charleschen/hive-trunk-h/build/ql/test/data/warehouse/src</string>
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1314756952</string>
+ <string>1314851960</string>
</void>
</object>
</void>
@@ -1663,13 +1663,13 @@
<void property="conf">
<object
class="org.apache.hadoop.hive.ql.plan.FileSinkDesc">
<void property="dirName">
-
<string>file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-30_19-15-53_467_7462858773918170539/-ext-10001</string>
+
<string>file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-31_21-39-21_672_6916679709209442717/-ext-10001</string>
</void>
<void property="numFiles">
<int>1</int>
</void>
<void property="statsAggPrefix">
-
<string>file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-30_19-15-53_467_7462858773918170539/-ext-10001/</string>
+
<string>file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-31_21-39-21_672_6916679709209442717/-ext-10001/</string>
</void>
<void property="tableInfo">
<object
class="org.apache.hadoop.hive.ql.plan.TableDesc">
@@ -1708,21 +1708,21 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_FS_12_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_FS_499_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_FS_12_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_FS_499_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_FS_12_TIME_TAKEN</string>
+ <string>CNTR_NAME_FS_499_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_FS_12_FATAL_ERROR</string>
+ <string>CNTR_NAME_FS_499_FATAL_ERROR</string>
</void>
</object>
</void>
<void property="operatorId">
- <string>FS_12</string>
+ <string>FS_499</string>
</void>
<void property="parentOperators">
<object class="java.util.ArrayList">
@@ -1882,21 +1882,21 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_SEL_11_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_SEL_498_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_SEL_11_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_SEL_498_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_SEL_11_TIME_TAKEN</string>
+ <string>CNTR_NAME_SEL_498_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_SEL_11_FATAL_ERROR</string>
+ <string>CNTR_NAME_SEL_498_FATAL_ERROR</string>
</void>
</object>
</void>
<void property="operatorId">
- <string>SEL_11</string>
+ <string>SEL_498</string>
</void>
<void property="parentOperators">
<object class="java.util.ArrayList">
@@ -2080,21 +2080,21 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_SEL_9_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_SEL_496_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_SEL_9_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_SEL_496_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_SEL_9_TIME_TAKEN</string>
+ <string>CNTR_NAME_SEL_496_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_SEL_9_FATAL_ERROR</string>
+ <string>CNTR_NAME_SEL_496_FATAL_ERROR</string>
</void>
</object>
</void>
<void property="operatorId">
- <string>SEL_9</string>
+ <string>SEL_496</string>
</void>
<void property="parentOperators">
<object class="java.util.ArrayList">
@@ -2179,21 +2179,21 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_FIL_13_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_FIL_500_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_FIL_13_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_FIL_500_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_FIL_13_TIME_TAKEN</string>
+ <string>CNTR_NAME_FIL_500_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_FIL_13_FATAL_ERROR</string>
+ <string>CNTR_NAME_FIL_500_FATAL_ERROR</string>
</void>
</object>
</void>
<void property="operatorId">
- <string>FIL_13</string>
+ <string>FIL_500</string>
</void>
<void property="parentOperators">
<object class="java.util.ArrayList">
@@ -2431,21 +2431,21 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_JOIN_8_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_JOIN_495_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_JOIN_8_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_JOIN_495_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_JOIN_8_TIME_TAKEN</string>
+ <string>CNTR_NAME_JOIN_495_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_JOIN_8_FATAL_ERROR</string>
+ <string>CNTR_NAME_JOIN_495_FATAL_ERROR</string>
</void>
</object>
</void>
<void property="operatorId">
- <string>JOIN_8</string>
+ <string>JOIN_495</string>
</void>
<void property="parentOperators">
<object class="java.util.ArrayList">