Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/join5.q.xml URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/join5.q.xml?rev=712905&r1=712904&r2=712905&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/join5.q.xml (original) +++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/join5.q.xml Mon Nov 10 17:50:06 2008 @@ -1,104 +1,6 @@ <?xml version="1.0" encoding="UTF-8"?> <java version="1.6.0_07" class="java.beans.XMLDecoder"> - <object id="MapRedTask0" class="org.apache.hadoop.hive.ql.exec.MapRedTask"> - <void property="childTasks"> - <object class="java.util.ArrayList"> - <void method="add"> - <object class="org.apache.hadoop.hive.ql.exec.MoveTask"> - <void property="id"> - <string>Stage-1</string> - </void> - <void property="parentTasks"> - <object class="java.util.ArrayList"> - <void method="add"> - <object idref="MapRedTask0"/> - </void> - </object> - </void> - <void property="work"> - <object class="org.apache.hadoop.hive.ql.plan.moveWork"> - <void property="loadFileWork"> - <object class="java.util.ArrayList"/> - </void> - <void property="loadTableWork"> - <object class="java.util.ArrayList"> - <void method="add"> - <object class="org.apache.hadoop.hive.ql.plan.loadTableDesc"> - <void property="partitionSpec"> - <object class="java.util.HashMap"/> - </void> - <void property="replace"> - <boolean>true</boolean> - </void> - <void property="sourceDir"> - <string>/tmp/hive-zshao/936351131/313796179.10000.insclause-0</string> - </void> - <void property="table"> - <object id="tableDesc0" class="org.apache.hadoop.hive.ql.plan.tableDesc"> - <void property="deserializerClass"> - <class>org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe</class> - </void> - <void property="inputFileFormatClass"> - <class>org.apache.hadoop.mapred.TextInputFormat</class> - </void> - <void property="outputFileFormatClass"> - <class>org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat</class> - </void> - <void property="properties"> - <object class="java.util.Properties"> - <void method="put"> - <string>name</string> - <string>dest1</string> - </void> - <void method="put"> - <string>serialization.ddl</string> - <string>struct dest1 { string key, string value}</string> - </void> - <void method="put"> - <string>serialization.format</string> - <string>1</string> - </void> - <void method="put"> - <string>columns</string> - <string>key,value</string> - </void> - <void method="put"> - <string>bucket_count</string> - <string>-1</string> - </void> - <void method="put"> - <string>serialization.lib</string> - <string>org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe</string> - </void> - <void method="put"> - <string>file.inputformat</string> - <string>org.apache.hadoop.mapred.TextInputFormat</string> - </void> - <void method="put"> - <string>file.outputformat</string> - <string>org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat</string> - </void> - <void method="put"> - <string>location</string> - <string>file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1</string> - </void> - </object> - </void> - <void property="serdeClassName"> - <string>org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe</string> - </void> - </object> - </void> - </object> - </void> - </object> - </void> - </object> - </void> - </object> - </void> - </object> - </void> + <object class="org.apache.hadoop.hive.ql.exec.MapRedTask"> <void property="id"> <string>Stage-2</string> </void> @@ -108,23 +10,23 @@ <object class="java.util.HashMap"> <void method="put"> <string>c:b:src2</string> - <object class="org.apache.hadoop.hive.ql.exec.ForwardOperator"> + <object id="ForwardOperator0" class="org.apache.hadoop.hive.ql.exec.ForwardOperator"> <void property="childOperators"> <object class="java.util.ArrayList"> <void method="add"> - <object class="org.apache.hadoop.hive.ql.exec.FilterOperator"> + <object id="FilterOperator0" class="org.apache.hadoop.hive.ql.exec.FilterOperator"> <void property="childOperators"> <object class="java.util.ArrayList"> <void method="add"> - <object class="org.apache.hadoop.hive.ql.exec.SelectOperator"> + <object id="SelectOperator0" class="org.apache.hadoop.hive.ql.exec.SelectOperator"> <void property="childOperators"> <object class="java.util.ArrayList"> <void method="add"> - <object class="org.apache.hadoop.hive.ql.exec.ReduceSinkOperator"> + <object id="ReduceSinkOperator0" class="org.apache.hadoop.hive.ql.exec.ReduceSinkOperator"> <void property="conf"> <object class="org.apache.hadoop.hive.ql.plan.reduceSinkDesc"> <void property="keyCols"> - <object class="java.util.ArrayList"> + <object id="ArrayList0" class="java.util.ArrayList"> <void method="add"> <object class="org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc"> <void property="column"> @@ -142,36 +44,40 @@ </object> </void> <void property="keySerializeInfo"> - <object class="org.apache.hadoop.hive.ql.plan.tableDesc"> + <object id="tableDesc0" class="org.apache.hadoop.hive.ql.plan.tableDesc"> <void property="deserializerClass"> - <class>org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe</class> + <class>org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe</class> </void> <void property="inputFileFormatClass"> - <class>org.apache.hadoop.mapred.TextInputFormat</class> + <class>org.apache.hadoop.mapred.SequenceFileInputFormat</class> </void> <void property="outputFileFormatClass"> - <class>org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat</class> + <class>org.apache.hadoop.mapred.SequenceFileOutputFormat</class> </void> <void property="properties"> <object class="java.util.Properties"> <void method="put"> - <string>columns</string> - <string>0</string> + <string>name</string> + <string>binary_sortable_table</string> + </void> + <void method="put"> + <string>serialization.ddl</string> + <string>struct binary_sortable_table { string joinkey0}</string> </void> <void method="put"> <string>serialization.format</string> - <string>1</string> + <string>org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol</string> </void> </object> </void> </object> </void> - <void property="numPartitionFields"> - <int>1</int> - </void> <void property="numReducers"> <int>-1</int> </void> + <void property="partitionCols"> + <object idref="ArrayList0"/> + </void> <void property="tag"> <int>1</int> </void> @@ -200,25 +106,29 @@ </object> </void> <void property="valueSerializeInfo"> - <object class="org.apache.hadoop.hive.ql.plan.tableDesc"> + <object id="tableDesc1" class="org.apache.hadoop.hive.ql.plan.tableDesc"> <void property="deserializerClass"> - <class>org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe</class> + <class>org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe</class> </void> <void property="inputFileFormatClass"> - <class>org.apache.hadoop.mapred.TextInputFormat</class> + <class>org.apache.hadoop.mapred.SequenceFileInputFormat</class> </void> <void property="outputFileFormatClass"> - <class>org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat</class> + <class>org.apache.hadoop.mapred.SequenceFileOutputFormat</class> </void> <void property="properties"> <object class="java.util.Properties"> <void method="put"> - <string>columns</string> - <string>0,1</string> + <string>name</string> + <string>binary_table</string> + </void> + <void method="put"> + <string>serialization.ddl</string> + <string>struct binary_table { string reducesinkvalue0, string reducesinkvalue1}</string> </void> <void method="put"> <string>serialization.format</string> - <string>1</string> + <string>com.facebook.thrift.protocol.TBinaryProtocol</string> </void> </object> </void> @@ -226,6 +136,13 @@ </void> </object> </void> + <void property="parentOperators"> + <object class="java.util.ArrayList"> + <void method="add"> + <object idref="SelectOperator0"/> + </void> + </object> + </void> <void property="schema"> <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> <void property="signature"> @@ -286,6 +203,13 @@ </void> </object> </void> + <void property="parentOperators"> + <object class="java.util.ArrayList"> + <void method="add"> + <object idref="FilterOperator0"/> + </void> + </object> + </void> <void property="schema"> <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> <void property="signature"> @@ -451,6 +375,13 @@ </void> </object> </void> + <void property="parentOperators"> + <object class="java.util.ArrayList"> + <void method="add"> + <object idref="ForwardOperator0"/> + </void> + </object> + </void> <void property="schema"> <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> <void property="signature"> @@ -494,23 +425,23 @@ </void> <void method="put"> <string>c:a:src1</string> - <object class="org.apache.hadoop.hive.ql.exec.ForwardOperator"> + <object id="ForwardOperator1" class="org.apache.hadoop.hive.ql.exec.ForwardOperator"> <void property="childOperators"> <object class="java.util.ArrayList"> <void method="add"> - <object class="org.apache.hadoop.hive.ql.exec.FilterOperator"> + <object id="FilterOperator1" class="org.apache.hadoop.hive.ql.exec.FilterOperator"> <void property="childOperators"> <object class="java.util.ArrayList"> <void method="add"> - <object class="org.apache.hadoop.hive.ql.exec.SelectOperator"> + <object id="SelectOperator1" class="org.apache.hadoop.hive.ql.exec.SelectOperator"> <void property="childOperators"> <object class="java.util.ArrayList"> <void method="add"> - <object class="org.apache.hadoop.hive.ql.exec.ReduceSinkOperator"> + <object id="ReduceSinkOperator1" class="org.apache.hadoop.hive.ql.exec.ReduceSinkOperator"> <void property="conf"> <object class="org.apache.hadoop.hive.ql.plan.reduceSinkDesc"> <void property="keyCols"> - <object class="java.util.ArrayList"> + <object id="ArrayList1" class="java.util.ArrayList"> <void method="add"> <object class="org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc"> <void property="column"> @@ -526,34 +457,38 @@ <void property="keySerializeInfo"> <object class="org.apache.hadoop.hive.ql.plan.tableDesc"> <void property="deserializerClass"> - <class>org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe</class> + <class>org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe</class> </void> <void property="inputFileFormatClass"> - <class>org.apache.hadoop.mapred.TextInputFormat</class> + <class>org.apache.hadoop.mapred.SequenceFileInputFormat</class> </void> <void property="outputFileFormatClass"> - <class>org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat</class> + <class>org.apache.hadoop.mapred.SequenceFileOutputFormat</class> </void> <void property="properties"> <object class="java.util.Properties"> <void method="put"> - <string>columns</string> - <string>0</string> + <string>name</string> + <string>binary_sortable_table</string> + </void> + <void method="put"> + <string>serialization.ddl</string> + <string>struct binary_sortable_table { string joinkey0}</string> </void> <void method="put"> <string>serialization.format</string> - <string>1</string> + <string>org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol</string> </void> </object> </void> </object> </void> - <void property="numPartitionFields"> - <int>1</int> - </void> <void property="numReducers"> <int>-1</int> </void> + <void property="partitionCols"> + <object idref="ArrayList1"/> + </void> <void property="valueCols"> <object class="java.util.ArrayList"> <void method="add"> @@ -579,25 +514,29 @@ </object> </void> <void property="valueSerializeInfo"> - <object class="org.apache.hadoop.hive.ql.plan.tableDesc"> + <object id="tableDesc2" class="org.apache.hadoop.hive.ql.plan.tableDesc"> <void property="deserializerClass"> - <class>org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe</class> + <class>org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe</class> </void> <void property="inputFileFormatClass"> - <class>org.apache.hadoop.mapred.TextInputFormat</class> + <class>org.apache.hadoop.mapred.SequenceFileInputFormat</class> </void> <void property="outputFileFormatClass"> - <class>org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat</class> + <class>org.apache.hadoop.mapred.SequenceFileOutputFormat</class> </void> <void property="properties"> <object class="java.util.Properties"> <void method="put"> - <string>columns</string> - <string>0,1</string> + <string>name</string> + <string>binary_table</string> + </void> + <void method="put"> + <string>serialization.ddl</string> + <string>struct binary_table { string reducesinkvalue0, string reducesinkvalue1}</string> </void> <void method="put"> <string>serialization.format</string> - <string>1</string> + <string>com.facebook.thrift.protocol.TBinaryProtocol</string> </void> </object> </void> @@ -605,6 +544,13 @@ </void> </object> </void> + <void property="parentOperators"> + <object class="java.util.ArrayList"> + <void method="add"> + <object idref="SelectOperator1"/> + </void> + </object> + </void> <void property="schema"> <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> <void property="signature"> @@ -665,6 +611,13 @@ </void> </object> </void> + <void property="parentOperators"> + <object class="java.util.ArrayList"> + <void method="add"> + <object idref="FilterOperator1"/> + </void> + </object> + </void> <void property="schema"> <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> <void property="signature"> @@ -822,6 +775,13 @@ </void> </object> </void> + <void property="parentOperators"> + <object class="java.util.ArrayList"> + <void method="add"> + <object idref="ForwardOperator1"/> + </void> + </object> + </void> <void property="schema"> <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> <void property="signature"> @@ -865,13 +825,16 @@ </void> </object> </void> + <void property="keyDesc"> + <object idref="tableDesc0"/> + </void> <void property="needsTagging"> <boolean>true</boolean> </void> <void property="pathToAliases"> <object class="java.util.LinkedHashMap"> <void method="put"> - <string>file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src</string> + <string>file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src</string> <object class="java.util.ArrayList"> <void method="add"> <string>c:a:src1</string> @@ -886,7 +849,7 @@ <void property="pathToPartitionInfo"> <object class="java.util.LinkedHashMap"> <void method="put"> - <string>file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src</string> + <string>file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src</string> <object class="org.apache.hadoop.hive.ql.plan.partitionDesc"> <void property="partSpec"> <object class="java.util.LinkedHashMap"/> @@ -938,7 +901,7 @@ </void> <void method="put"> <string>location</string> - <string>file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src</string> + <string>file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src</string> </void> </object> </void> @@ -952,15 +915,15 @@ </object> </void> <void property="reducer"> - <object class="org.apache.hadoop.hive.ql.exec.JoinOperator"> + <object id="JoinOperator0" class="org.apache.hadoop.hive.ql.exec.JoinOperator"> <void property="childOperators"> <object class="java.util.ArrayList"> <void method="add"> - <object class="org.apache.hadoop.hive.ql.exec.SelectOperator"> + <object id="SelectOperator2" class="org.apache.hadoop.hive.ql.exec.SelectOperator"> <void property="childOperators"> <object class="java.util.ArrayList"> <void method="add"> - <object class="org.apache.hadoop.hive.ql.exec.SelectOperator"> + <object id="SelectOperator3" class="org.apache.hadoop.hive.ql.exec.SelectOperator"> <void property="childOperators"> <object class="java.util.ArrayList"> <void method="add"> @@ -968,10 +931,39 @@ <void property="conf"> <object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc"> <void property="dirName"> - <string>/tmp/hive-zshao/936351131/313796179.10000.insclause-0</string> + <string>/tmp/hive-njain/198286603/204778341.10001.insclause-0</string> </void> <void property="tableInfo"> - <object idref="tableDesc0"/> + <object class="org.apache.hadoop.hive.ql.plan.tableDesc"> + <void property="deserializerClass"> + <class>org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe</class> + </void> + <void property="inputFileFormatClass"> + <class>org.apache.hadoop.mapred.TextInputFormat</class> + </void> + <void property="outputFileFormatClass"> + <class>org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat</class> + </void> + <void property="properties"> + <object class="java.util.Properties"> + <void method="put"> + <string>columns</string> + <string>c1,c2,c3,c4</string> + </void> + <void method="put"> + <string>serialization.format</string> + <string>1</string> + </void> + </object> + </void> + </object> + </void> + </object> + </void> + <void property="parentOperators"> + <object class="java.util.ArrayList"> + <void method="add"> + <object idref="SelectOperator3"/> </void> </object> </void> @@ -1075,6 +1067,13 @@ </void> </object> </void> + <void property="parentOperators"> + <object class="java.util.ArrayList"> + <void method="add"> + <object idref="SelectOperator2"/> + </void> + </object> + </void> <void property="schema"> <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> <void property="signature"> @@ -1134,6 +1133,13 @@ </void> </object> </void> + <void property="parentOperators"> + <object class="java.util.ArrayList"> + <void method="add"> + <object idref="JoinOperator0"/> + </void> + </object> + </void> <void property="schema"> <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> <void property="signature"> @@ -1258,6 +1264,16 @@ </void> </object> </void> + <void property="parentOperators"> + <object class="java.util.ArrayList"> + <void method="add"> + <object idref="ReduceSinkOperator1"/> + </void> + <void method="add"> + <object idref="ReduceSinkOperator0"/> + </void> + </object> + </void> <void property="schema"> <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> <void property="signature"> @@ -1308,6 +1324,16 @@ </void> </object> </void> + <void property="tagToValueDesc"> + <object class="java.util.ArrayList"> + <void method="add"> + <object idref="tableDesc2"/> + </void> + <void method="add"> + <object idref="tableDesc1"/> + </void> + </object> + </void> </object> </void> </object>
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/join6.q.xml URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/join6.q.xml?rev=712905&r1=712904&r2=712905&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/join6.q.xml (original) +++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/join6.q.xml Mon Nov 10 17:50:06 2008 @@ -1,104 +1,6 @@ <?xml version="1.0" encoding="UTF-8"?> <java version="1.6.0_07" class="java.beans.XMLDecoder"> - <object id="MapRedTask0" class="org.apache.hadoop.hive.ql.exec.MapRedTask"> - <void property="childTasks"> - <object class="java.util.ArrayList"> - <void method="add"> - <object class="org.apache.hadoop.hive.ql.exec.MoveTask"> - <void property="id"> - <string>Stage-1</string> - </void> - <void property="parentTasks"> - <object class="java.util.ArrayList"> - <void method="add"> - <object idref="MapRedTask0"/> - </void> - </object> - </void> - <void property="work"> - <object class="org.apache.hadoop.hive.ql.plan.moveWork"> - <void property="loadFileWork"> - <object class="java.util.ArrayList"/> - </void> - <void property="loadTableWork"> - <object class="java.util.ArrayList"> - <void method="add"> - <object class="org.apache.hadoop.hive.ql.plan.loadTableDesc"> - <void property="partitionSpec"> - <object class="java.util.HashMap"/> - </void> - <void property="replace"> - <boolean>true</boolean> - </void> - <void property="sourceDir"> - <string>/tmp/hive-zshao/161126421/564619381.10000.insclause-0</string> - </void> - <void property="table"> - <object id="tableDesc0" class="org.apache.hadoop.hive.ql.plan.tableDesc"> - <void property="deserializerClass"> - <class>org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe</class> - </void> - <void property="inputFileFormatClass"> - <class>org.apache.hadoop.mapred.TextInputFormat</class> - </void> - <void property="outputFileFormatClass"> - <class>org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat</class> - </void> - <void property="properties"> - <object class="java.util.Properties"> - <void method="put"> - <string>name</string> - <string>dest1</string> - </void> - <void method="put"> - <string>serialization.ddl</string> - <string>struct dest1 { string key, string value}</string> - </void> - <void method="put"> - <string>serialization.format</string> - <string>1</string> - </void> - <void method="put"> - <string>columns</string> - <string>key,value</string> - </void> - <void method="put"> - <string>bucket_count</string> - <string>-1</string> - </void> - <void method="put"> - <string>serialization.lib</string> - <string>org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe</string> - </void> - <void method="put"> - <string>file.inputformat</string> - <string>org.apache.hadoop.mapred.TextInputFormat</string> - </void> - <void method="put"> - <string>file.outputformat</string> - <string>org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat</string> - </void> - <void method="put"> - <string>location</string> - <string>file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1</string> - </void> - </object> - </void> - <void property="serdeClassName"> - <string>org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe</string> - </void> - </object> - </void> - </object> - </void> - </object> - </void> - </object> - </void> - </object> - </void> - </object> - </void> + <object class="org.apache.hadoop.hive.ql.exec.MapRedTask"> <void property="id"> <string>Stage-2</string> </void> @@ -108,23 +10,23 @@ <object class="java.util.HashMap"> <void method="put"> <string>c:b:src2</string> - <object class="org.apache.hadoop.hive.ql.exec.ForwardOperator"> + <object id="ForwardOperator0" class="org.apache.hadoop.hive.ql.exec.ForwardOperator"> <void property="childOperators"> <object class="java.util.ArrayList"> <void method="add"> - <object class="org.apache.hadoop.hive.ql.exec.FilterOperator"> + <object id="FilterOperator0" class="org.apache.hadoop.hive.ql.exec.FilterOperator"> <void property="childOperators"> <object class="java.util.ArrayList"> <void method="add"> - <object class="org.apache.hadoop.hive.ql.exec.SelectOperator"> + <object id="SelectOperator0" class="org.apache.hadoop.hive.ql.exec.SelectOperator"> <void property="childOperators"> <object class="java.util.ArrayList"> <void method="add"> - <object class="org.apache.hadoop.hive.ql.exec.ReduceSinkOperator"> + <object id="ReduceSinkOperator0" class="org.apache.hadoop.hive.ql.exec.ReduceSinkOperator"> <void property="conf"> <object class="org.apache.hadoop.hive.ql.plan.reduceSinkDesc"> <void property="keyCols"> - <object class="java.util.ArrayList"> + <object id="ArrayList0" class="java.util.ArrayList"> <void method="add"> <object class="org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc"> <void property="column"> @@ -142,36 +44,40 @@ </object> </void> <void property="keySerializeInfo"> - <object class="org.apache.hadoop.hive.ql.plan.tableDesc"> + <object id="tableDesc0" class="org.apache.hadoop.hive.ql.plan.tableDesc"> <void property="deserializerClass"> - <class>org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe</class> + <class>org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe</class> </void> <void property="inputFileFormatClass"> - <class>org.apache.hadoop.mapred.TextInputFormat</class> + <class>org.apache.hadoop.mapred.SequenceFileInputFormat</class> </void> <void property="outputFileFormatClass"> - <class>org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat</class> + <class>org.apache.hadoop.mapred.SequenceFileOutputFormat</class> </void> <void property="properties"> <object class="java.util.Properties"> <void method="put"> - <string>columns</string> - <string>0</string> + <string>name</string> + <string>binary_sortable_table</string> + </void> + <void method="put"> + <string>serialization.ddl</string> + <string>struct binary_sortable_table { string joinkey0}</string> </void> <void method="put"> <string>serialization.format</string> - <string>1</string> + <string>org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol</string> </void> </object> </void> </object> </void> - <void property="numPartitionFields"> - <int>1</int> - </void> <void property="numReducers"> <int>-1</int> </void> + <void property="partitionCols"> + <object idref="ArrayList0"/> + </void> <void property="tag"> <int>1</int> </void> @@ -200,25 +106,29 @@ </object> </void> <void property="valueSerializeInfo"> - <object class="org.apache.hadoop.hive.ql.plan.tableDesc"> + <object id="tableDesc1" class="org.apache.hadoop.hive.ql.plan.tableDesc"> <void property="deserializerClass"> - <class>org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe</class> + <class>org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe</class> </void> <void property="inputFileFormatClass"> - <class>org.apache.hadoop.mapred.TextInputFormat</class> + <class>org.apache.hadoop.mapred.SequenceFileInputFormat</class> </void> <void property="outputFileFormatClass"> - <class>org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat</class> + <class>org.apache.hadoop.mapred.SequenceFileOutputFormat</class> </void> <void property="properties"> <object class="java.util.Properties"> <void method="put"> - <string>columns</string> - <string>0,1</string> + <string>name</string> + <string>binary_table</string> + </void> + <void method="put"> + <string>serialization.ddl</string> + <string>struct binary_table { string reducesinkvalue0, string reducesinkvalue1}</string> </void> <void method="put"> <string>serialization.format</string> - <string>1</string> + <string>com.facebook.thrift.protocol.TBinaryProtocol</string> </void> </object> </void> @@ -226,6 +136,13 @@ </void> </object> </void> + <void property="parentOperators"> + <object class="java.util.ArrayList"> + <void method="add"> + <object idref="SelectOperator0"/> + </void> + </object> + </void> <void property="schema"> <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> <void property="signature"> @@ -286,6 +203,13 @@ </void> </object> </void> + <void property="parentOperators"> + <object class="java.util.ArrayList"> + <void method="add"> + <object idref="FilterOperator0"/> + </void> + </object> + </void> <void property="schema"> <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> <void property="signature"> @@ -451,6 +375,13 @@ </void> </object> </void> + <void property="parentOperators"> + <object class="java.util.ArrayList"> + <void method="add"> + <object idref="ForwardOperator0"/> + </void> + </object> + </void> <void property="schema"> <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> <void property="signature"> @@ -494,23 +425,23 @@ </void> <void method="put"> <string>c:a:src1</string> - <object class="org.apache.hadoop.hive.ql.exec.ForwardOperator"> + <object id="ForwardOperator1" class="org.apache.hadoop.hive.ql.exec.ForwardOperator"> <void property="childOperators"> <object class="java.util.ArrayList"> <void method="add"> - <object class="org.apache.hadoop.hive.ql.exec.FilterOperator"> + <object id="FilterOperator1" class="org.apache.hadoop.hive.ql.exec.FilterOperator"> <void property="childOperators"> <object class="java.util.ArrayList"> <void method="add"> - <object class="org.apache.hadoop.hive.ql.exec.SelectOperator"> + <object id="SelectOperator1" class="org.apache.hadoop.hive.ql.exec.SelectOperator"> <void property="childOperators"> <object class="java.util.ArrayList"> <void method="add"> - <object class="org.apache.hadoop.hive.ql.exec.ReduceSinkOperator"> + <object id="ReduceSinkOperator1" class="org.apache.hadoop.hive.ql.exec.ReduceSinkOperator"> <void property="conf"> <object class="org.apache.hadoop.hive.ql.plan.reduceSinkDesc"> <void property="keyCols"> - <object class="java.util.ArrayList"> + <object id="ArrayList1" class="java.util.ArrayList"> <void method="add"> <object class="org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc"> <void property="column"> @@ -526,34 +457,38 @@ <void property="keySerializeInfo"> <object class="org.apache.hadoop.hive.ql.plan.tableDesc"> <void property="deserializerClass"> - <class>org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe</class> + <class>org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe</class> </void> <void property="inputFileFormatClass"> - <class>org.apache.hadoop.mapred.TextInputFormat</class> + <class>org.apache.hadoop.mapred.SequenceFileInputFormat</class> </void> <void property="outputFileFormatClass"> - <class>org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat</class> + <class>org.apache.hadoop.mapred.SequenceFileOutputFormat</class> </void> <void property="properties"> <object class="java.util.Properties"> <void method="put"> - <string>columns</string> - <string>0</string> + <string>name</string> + <string>binary_sortable_table</string> + </void> + <void method="put"> + <string>serialization.ddl</string> + <string>struct binary_sortable_table { string joinkey0}</string> </void> <void method="put"> <string>serialization.format</string> - <string>1</string> + <string>org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol</string> </void> </object> </void> </object> </void> - <void property="numPartitionFields"> - <int>1</int> - </void> <void property="numReducers"> <int>-1</int> </void> + <void property="partitionCols"> + <object idref="ArrayList1"/> + </void> <void property="valueCols"> <object class="java.util.ArrayList"> <void method="add"> @@ -579,25 +514,29 @@ </object> </void> <void property="valueSerializeInfo"> - <object class="org.apache.hadoop.hive.ql.plan.tableDesc"> + <object id="tableDesc2" class="org.apache.hadoop.hive.ql.plan.tableDesc"> <void property="deserializerClass"> - <class>org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe</class> + <class>org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe</class> </void> <void property="inputFileFormatClass"> - <class>org.apache.hadoop.mapred.TextInputFormat</class> + <class>org.apache.hadoop.mapred.SequenceFileInputFormat</class> </void> <void property="outputFileFormatClass"> - <class>org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat</class> + <class>org.apache.hadoop.mapred.SequenceFileOutputFormat</class> </void> <void property="properties"> <object class="java.util.Properties"> <void method="put"> - <string>columns</string> - <string>0,1</string> + <string>name</string> + <string>binary_table</string> + </void> + <void method="put"> + <string>serialization.ddl</string> + <string>struct binary_table { string reducesinkvalue0, string reducesinkvalue1}</string> </void> <void method="put"> <string>serialization.format</string> - <string>1</string> + <string>com.facebook.thrift.protocol.TBinaryProtocol</string> </void> </object> </void> @@ -605,6 +544,13 @@ </void> </object> </void> + <void property="parentOperators"> + <object class="java.util.ArrayList"> + <void method="add"> + <object idref="SelectOperator1"/> + </void> + </object> + </void> <void property="schema"> <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> <void property="signature"> @@ -665,6 +611,13 @@ </void> </object> </void> + <void property="parentOperators"> + <object class="java.util.ArrayList"> + <void method="add"> + <object idref="FilterOperator1"/> + </void> + </object> + </void> <void property="schema"> <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> <void property="signature"> @@ -822,6 +775,13 @@ </void> </object> </void> + <void property="parentOperators"> + <object class="java.util.ArrayList"> + <void method="add"> + <object idref="ForwardOperator1"/> + </void> + </object> + </void> <void property="schema"> <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> <void property="signature"> @@ -865,13 +825,16 @@ </void> </object> </void> + <void property="keyDesc"> + <object idref="tableDesc0"/> + </void> <void property="needsTagging"> <boolean>true</boolean> </void> <void property="pathToAliases"> <object class="java.util.LinkedHashMap"> <void method="put"> - <string>file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src</string> + <string>file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src</string> <object class="java.util.ArrayList"> <void method="add"> <string>c:a:src1</string> @@ -886,7 +849,7 @@ <void property="pathToPartitionInfo"> <object class="java.util.LinkedHashMap"> <void method="put"> - <string>file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src</string> + <string>file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src</string> <object class="org.apache.hadoop.hive.ql.plan.partitionDesc"> <void property="partSpec"> <object class="java.util.LinkedHashMap"/> @@ -938,7 +901,7 @@ </void> <void method="put"> <string>location</string> - <string>file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src</string> + <string>file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src</string> </void> </object> </void> @@ -952,15 +915,15 @@ </object> </void> <void property="reducer"> - <object class="org.apache.hadoop.hive.ql.exec.JoinOperator"> + <object id="JoinOperator0" class="org.apache.hadoop.hive.ql.exec.JoinOperator"> <void property="childOperators"> <object class="java.util.ArrayList"> <void method="add"> - <object class="org.apache.hadoop.hive.ql.exec.SelectOperator"> + <object id="SelectOperator2" class="org.apache.hadoop.hive.ql.exec.SelectOperator"> <void property="childOperators"> <object class="java.util.ArrayList"> <void method="add"> - <object class="org.apache.hadoop.hive.ql.exec.SelectOperator"> + <object id="SelectOperator3" class="org.apache.hadoop.hive.ql.exec.SelectOperator"> <void property="childOperators"> <object class="java.util.ArrayList"> <void method="add"> @@ -968,10 +931,39 @@ <void property="conf"> <object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc"> <void property="dirName"> - <string>/tmp/hive-zshao/161126421/564619381.10000.insclause-0</string> + <string>/tmp/hive-njain/113474329/1108575385.10001.insclause-0</string> </void> <void property="tableInfo"> - <object idref="tableDesc0"/> + <object class="org.apache.hadoop.hive.ql.plan.tableDesc"> + <void property="deserializerClass"> + <class>org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe</class> + </void> + <void property="inputFileFormatClass"> + <class>org.apache.hadoop.mapred.TextInputFormat</class> + </void> + <void property="outputFileFormatClass"> + <class>org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat</class> + </void> + <void property="properties"> + <object class="java.util.Properties"> + <void method="put"> + <string>columns</string> + <string>c1,c2,c3,c4</string> + </void> + <void method="put"> + <string>serialization.format</string> + <string>1</string> + </void> + </object> + </void> + </object> + </void> + </object> + </void> + <void property="parentOperators"> + <object class="java.util.ArrayList"> + <void method="add"> + <object idref="SelectOperator3"/> </void> </object> </void> @@ -1075,6 +1067,13 @@ </void> </object> </void> + <void property="parentOperators"> + <object class="java.util.ArrayList"> + <void method="add"> + <object idref="SelectOperator2"/> + </void> + </object> + </void> <void property="schema"> <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> <void property="signature"> @@ -1134,6 +1133,13 @@ </void> </object> </void> + <void property="parentOperators"> + <object class="java.util.ArrayList"> + <void method="add"> + <object idref="JoinOperator0"/> + </void> + </object> + </void> <void property="schema"> <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> <void property="signature"> @@ -1258,6 +1264,16 @@ </void> </object> </void> + <void property="parentOperators"> + <object class="java.util.ArrayList"> + <void method="add"> + <object idref="ReduceSinkOperator1"/> + </void> + <void method="add"> + <object idref="ReduceSinkOperator0"/> + </void> + </object> + </void> <void property="schema"> <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> <void property="signature"> @@ -1308,6 +1324,16 @@ </void> </object> </void> + <void property="tagToValueDesc"> + <object class="java.util.ArrayList"> + <void method="add"> + <object idref="tableDesc2"/> + </void> + <void method="add"> + <object idref="tableDesc1"/> + </void> + </object> + </void> </object> </void> </object>
